1
2
3
4
5
6
7
8
9#include <linux/kernel.h>
10#include <asm/unaligned.h>
11#include <crypto/internal/poly1305.h>
12
13typedef __uint128_t u128;
14
15void poly1305_core_setkey(struct poly1305_core_key *key, const u8 raw_key[16])
16{
17 u64 t0, t1;
18
19
20 t0 = get_unaligned_le64(&raw_key[0]);
21 t1 = get_unaligned_le64(&raw_key[8]);
22
23 key->key.r64[0] = t0 & 0xffc0fffffffULL;
24 key->key.r64[1] = ((t0 >> 44) | (t1 << 20)) & 0xfffffc0ffffULL;
25 key->key.r64[2] = ((t1 >> 24)) & 0x00ffffffc0fULL;
26
27
28 key->precomputed_s.r64[0] = key->key.r64[1] * 20;
29 key->precomputed_s.r64[1] = key->key.r64[2] * 20;
30}
31EXPORT_SYMBOL(poly1305_core_setkey);
32
33void poly1305_core_blocks(struct poly1305_state *state,
34 const struct poly1305_core_key *key, const void *src,
35 unsigned int nblocks, u32 hibit)
36{
37 const u8 *input = src;
38 u64 hibit64;
39 u64 r0, r1, r2;
40 u64 s1, s2;
41 u64 h0, h1, h2;
42 u64 c;
43 u128 d0, d1, d2, d;
44
45 if (!nblocks)
46 return;
47
48 hibit64 = ((u64)hibit) << 40;
49
50 r0 = key->key.r64[0];
51 r1 = key->key.r64[1];
52 r2 = key->key.r64[2];
53
54 h0 = state->h64[0];
55 h1 = state->h64[1];
56 h2 = state->h64[2];
57
58 s1 = key->precomputed_s.r64[0];
59 s2 = key->precomputed_s.r64[1];
60
61 do {
62 u64 t0, t1;
63
64
65 t0 = get_unaligned_le64(&input[0]);
66 t1 = get_unaligned_le64(&input[8]);
67
68 h0 += t0 & 0xfffffffffffULL;
69 h1 += ((t0 >> 44) | (t1 << 20)) & 0xfffffffffffULL;
70 h2 += (((t1 >> 24)) & 0x3ffffffffffULL) | hibit64;
71
72
73 d0 = (u128)h0 * r0;
74 d = (u128)h1 * s2;
75 d0 += d;
76 d = (u128)h2 * s1;
77 d0 += d;
78 d1 = (u128)h0 * r1;
79 d = (u128)h1 * r0;
80 d1 += d;
81 d = (u128)h2 * s2;
82 d1 += d;
83 d2 = (u128)h0 * r2;
84 d = (u128)h1 * r1;
85 d2 += d;
86 d = (u128)h2 * r0;
87 d2 += d;
88
89
90 c = (u64)(d0 >> 44);
91 h0 = (u64)d0 & 0xfffffffffffULL;
92 d1 += c;
93 c = (u64)(d1 >> 44);
94 h1 = (u64)d1 & 0xfffffffffffULL;
95 d2 += c;
96 c = (u64)(d2 >> 42);
97 h2 = (u64)d2 & 0x3ffffffffffULL;
98 h0 += c * 5;
99 c = h0 >> 44;
100 h0 = h0 & 0xfffffffffffULL;
101 h1 += c;
102
103 input += POLY1305_BLOCK_SIZE;
104 } while (--nblocks);
105
106 state->h64[0] = h0;
107 state->h64[1] = h1;
108 state->h64[2] = h2;
109}
110EXPORT_SYMBOL(poly1305_core_blocks);
111
112void poly1305_core_emit(const struct poly1305_state *state, const u32 nonce[4],
113 void *dst)
114{
115 u8 *mac = dst;
116 u64 h0, h1, h2, c;
117 u64 g0, g1, g2;
118 u64 t0, t1;
119
120
121 h0 = state->h64[0];
122 h1 = state->h64[1];
123 h2 = state->h64[2];
124
125 c = h1 >> 44;
126 h1 &= 0xfffffffffffULL;
127 h2 += c;
128 c = h2 >> 42;
129 h2 &= 0x3ffffffffffULL;
130 h0 += c * 5;
131 c = h0 >> 44;
132 h0 &= 0xfffffffffffULL;
133 h1 += c;
134 c = h1 >> 44;
135 h1 &= 0xfffffffffffULL;
136 h2 += c;
137 c = h2 >> 42;
138 h2 &= 0x3ffffffffffULL;
139 h0 += c * 5;
140 c = h0 >> 44;
141 h0 &= 0xfffffffffffULL;
142 h1 += c;
143
144
145 g0 = h0 + 5;
146 c = g0 >> 44;
147 g0 &= 0xfffffffffffULL;
148 g1 = h1 + c;
149 c = g1 >> 44;
150 g1 &= 0xfffffffffffULL;
151 g2 = h2 + c - (1ULL << 42);
152
153
154 c = (g2 >> ((sizeof(u64) * 8) - 1)) - 1;
155 g0 &= c;
156 g1 &= c;
157 g2 &= c;
158 c = ~c;
159 h0 = (h0 & c) | g0;
160 h1 = (h1 & c) | g1;
161 h2 = (h2 & c) | g2;
162
163 if (likely(nonce)) {
164
165 t0 = ((u64)nonce[1] << 32) | nonce[0];
166 t1 = ((u64)nonce[3] << 32) | nonce[2];
167
168 h0 += t0 & 0xfffffffffffULL;
169 c = h0 >> 44;
170 h0 &= 0xfffffffffffULL;
171 h1 += (((t0 >> 44) | (t1 << 20)) & 0xfffffffffffULL) + c;
172 c = h1 >> 44;
173 h1 &= 0xfffffffffffULL;
174 h2 += (((t1 >> 24)) & 0x3ffffffffffULL) + c;
175 h2 &= 0x3ffffffffffULL;
176 }
177
178
179 h0 = h0 | (h1 << 44);
180 h1 = (h1 >> 20) | (h2 << 24);
181
182 put_unaligned_le64(h0, &mac[0]);
183 put_unaligned_le64(h1, &mac[8]);
184}
185EXPORT_SYMBOL(poly1305_core_emit);
186