1
2
3
4
5
6
7
8
9
10
11
12
13
14#include <linux/linkage.h>
15#include <asm/frame.h>
16
17
18
19#define state_h0 0
20#define state_h1 4
21#define state_h2 8
22#define state_h3 12
23#define state_h4 16
24#define state_h5 20
25#define state_h6 24
26#define state_h7 28
27
28
29
30
31
32#define K0 2043430169
33#define K1 -208106958
34#define K2 -416213915
35#define K3 -832427829
36#define K4 -1664855657
37#define K5 965255983
38#define K6 1930511966
39#define K7 -433943364
40#define K8 -867886727
41#define K9 -1735773453
42#define K10 823420391
43#define K11 1646840782
44#define K12 -1001285732
45#define K13 -2002571463
46#define K14 289824371
47#define K15 579648742
48#define K16 -1651869049
49#define K17 991229199
50#define K18 1982458398
51#define K19 -330050500
52#define K20 -660100999
53#define K21 -1320201997
54#define K22 1654563303
55#define K23 -985840690
56#define K24 -1971681379
57#define K25 351604539
58#define K26 703209078
59#define K27 1406418156
60#define K28 -1482130984
61#define K29 1330705329
62#define K30 -1633556638
63#define K31 1027854021
64#define K32 2055708042
65#define K33 -183551212
66#define K34 -367102423
67#define K35 -734204845
68#define K36 -1468409689
69#define K37 1358147919
70#define K38 -1578671458
71#define K39 1137624381
72#define K40 -2019718534
73#define K41 255530229
74#define K42 511060458
75#define K43 1022120916
76#define K44 2044241832
77#define K45 -206483632
78#define K46 -412967263
79#define K47 -825934525
80#define K48 -1651869049
81#define K49 991229199
82#define K50 1982458398
83#define K51 -330050500
84#define K52 -660100999
85#define K53 -1320201997
86#define K54 1654563303
87#define K55 -985840690
88#define K56 -1971681379
89#define K57 351604539
90#define K58 703209078
91#define K59 1406418156
92#define K60 -1482130984
93#define K61 1330705329
94#define K62 -1633556638
95#define K63 1027854021
96
97
98
99#define RSTATE %rdi
100#define RDATA %rsi
101#define RNBLKS %rdx
102
103#define t0 %eax
104#define t1 %ebx
105#define t2 %ecx
106
107#define a %r8d
108#define b %r9d
109#define c %r10d
110#define d %r11d
111#define e %r12d
112#define f %r13d
113#define g %r14d
114#define h %r15d
115
116#define W0 %xmm0
117#define W1 %xmm1
118#define W2 %xmm2
119#define W3 %xmm3
120#define W4 %xmm4
121#define W5 %xmm5
122
123#define XTMP0 %xmm6
124#define XTMP1 %xmm7
125#define XTMP2 %xmm8
126#define XTMP3 %xmm9
127#define XTMP4 %xmm10
128#define XTMP5 %xmm11
129#define XTMP6 %xmm12
130
131#define BSWAP_REG %xmm15
132
133
134
135#define STACK_W_SIZE (32 * 2 * 3)
136#define STACK_REG_SAVE_SIZE (64)
137
138#define STACK_W (0)
139#define STACK_REG_SAVE (STACK_W + STACK_W_SIZE)
140#define STACK_SIZE (STACK_REG_SAVE + STACK_REG_SAVE_SIZE)
141
142
143
144#define roll2(v, reg) \
145 roll $(v), reg;
146
147#define roll3mov(v, src, dst) \
148 movl src, dst; \
149 roll $(v), dst;
150
151#define roll3(v, src, dst) \
152 rorxl $(32-(v)), src, dst;
153
154#define addl2(a, out) \
155 leal (a, out), out;
156
157
158
159#define GG1(x, y, z, o, t) \
160 movl x, o; \
161 xorl y, o; \
162 xorl z, o;
163
164#define FF1(x, y, z, o, t) GG1(x, y, z, o, t)
165
166#define GG2(x, y, z, o, t) \
167 andnl z, x, o; \
168 movl y, t; \
169 andl x, t; \
170 addl2(t, o);
171
172#define FF2(x, y, z, o, t) \
173 movl y, o; \
174 xorl x, o; \
175 movl y, t; \
176 andl x, t; \
177 andl z, o; \
178 xorl t, o;
179
180#define R(i, a, b, c, d, e, f, g, h, round, widx, wtype) \
181 \
182 roll3mov(12, a, t0); \
183 \
184 leal K
185 roll2(7, t1); \
186 \
187 addl wtype
188 \
189 addl2(t1, h); \
190 \
191 xorl t1, t0; \
192 \
193 addl wtype
194 \
195 FF
196 \
197 addl2(t1, d); \
198 \
199 GG
200 \
201 addl2(t2, h); \
202 \
203 roll2(19, f); \
204 \
205 addl2(t0, d); \
206 \
207 roll2(9, b); \
208 \
209 roll3(9, h, t2); \
210 roll3(17, h, t1); \
211 xorl t2, h; \
212 xorl t1, h;
213
214#define R1(a, b, c, d, e, f, g, h, round, widx, wtype) \
215 R(1, a, b, c, d, e, f, g, h, round, widx, wtype)
216
217#define R2(a, b, c, d, e, f, g, h, round, widx, wtype) \
218 R(2, a, b, c, d, e, f, g, h, round, widx, wtype)
219
220
221
222
223#define IW_W_ADDR(round, widx, offs) \
224 (STACK_W + ((round) / 4) * 64 + (offs) + ((widx) * 4))(%rsp)
225
226
227#define XW_W_ADDR(round, widx, offs) \
228 (STACK_W + ((((round) / 3) - 4) % 2) * 64 + (offs) + ((widx) * 4))(%rsp)
229
230
231#define IW_W1_ADDR(round, widx) IW_W_ADDR(round, widx, 0)
232#define IW_W1W2_ADDR(round, widx) IW_W_ADDR(round, widx, 32)
233
234
235#define XW_W1_ADDR(round, widx) XW_W_ADDR(round, widx, 0)
236#define XW_W1W2_ADDR(round, widx) XW_W_ADDR(round, widx, 32)
237
238
239#define LOAD_W_XMM_1() \
240 vmovdqu 0*16(RDATA), XTMP0; \
241 vmovdqu 1*16(RDATA), XTMP1; \
242 vmovdqu 2*16(RDATA), XTMP2; \
243 vmovdqu 3*16(RDATA), XTMP3; \
244 vpshufb BSWAP_REG, XTMP0, XTMP0; \
245 vpshufb BSWAP_REG, XTMP1, XTMP1; \
246 vpshufb BSWAP_REG, XTMP2, XTMP2; \
247 vpshufb BSWAP_REG, XTMP3, XTMP3; \
248 vpxor XTMP0, XTMP1, XTMP4; \
249 vpxor XTMP1, XTMP2, XTMP5; \
250 vpxor XTMP2, XTMP3, XTMP6; \
251 leaq 64(RDATA), RDATA; \
252 vmovdqa XTMP0, IW_W1_ADDR(0, 0); \
253 vmovdqa XTMP4, IW_W1W2_ADDR(0, 0); \
254 vmovdqa XTMP1, IW_W1_ADDR(4, 0); \
255 vmovdqa XTMP5, IW_W1W2_ADDR(4, 0);
256
257#define LOAD_W_XMM_2() \
258 vmovdqa XTMP2, IW_W1_ADDR(8, 0); \
259 vmovdqa XTMP6, IW_W1W2_ADDR(8, 0);
260
261#define LOAD_W_XMM_3() \
262 vpshufd $0b00000000, XTMP0, W0; \
263 vpshufd $0b11111001, XTMP0, W1; \
264 vmovdqa XTMP1, W2; \
265 vpalignr $12, XTMP1, XTMP2, W3; \
266 vpalignr $8, XTMP2, XTMP3, W4; \
267 vpshufd $0b11111001, XTMP3, W5;
268
269
270#define SCHED_W_0(round, w0, w1, w2, w3, w4, w5) \
271 \
272 vpshufd $0b10111111, w0, XTMP0; \
273 vpalignr $12, XTMP0, w1, XTMP0; \
274 \
275 vpshufd $0b10111111, w1, XTMP1; \
276 vpalignr $12, XTMP1, w2, XTMP1; \
277 \
278 \
279 vpxor w3, XTMP0, XTMP0;
280
281#define SCHED_W_1(round, w0, w1, w2, w3, w4, w5) \
282 \
283 \
284 vpslld $15, w5, XTMP2; \
285 vpsrld $(32-15), w5, XTMP3; \
286 vpxor XTMP2, XTMP3, XTMP3; \
287 vpxor XTMP3, XTMP0, XTMP0; \
288 \
289 vpslld $7, XTMP1, XTMP5; \
290 vpsrld $(32-7), XTMP1, XTMP1; \
291 vpxor XTMP5, XTMP1, XTMP1; \
292 \
293 vpxor w4, XTMP1, XTMP1; \
294 \
295 \
296 vpslld $15, XTMP0, XTMP5; \
297 vpsrld $(32-15), XTMP0, XTMP6; \
298 vpslld $23, XTMP0, XTMP2; \
299 vpsrld $(32-23), XTMP0, XTMP3; \
300 vpxor XTMP0, XTMP1, XTMP1; \
301 vpxor XTMP6, XTMP5, XTMP5; \
302 vpxor XTMP3, XTMP2, XTMP2; \
303 vpxor XTMP2, XTMP5, XTMP5; \
304 vpxor XTMP5, XTMP1, w0;
305
306#define SCHED_W_2(round, w0, w1, w2, w3, w4, w5) \
307 \
308 vpshufd $0b10111111, w4, XTMP4; \
309 vpalignr $12, XTMP4, w5, XTMP4; \
310 vmovdqa XTMP4, XW_W1_ADDR((round), 0); \
311 \
312 vpxor w0, XTMP4, XTMP1; \
313 vmovdqa XTMP1, XW_W1W2_ADDR((round), 0);
314
315
316.section .rodata.cst16, "aM", @progbits, 16
317.align 16
318
319.Lbe32mask:
320 .long 0x00010203, 0x04050607, 0x08090a0b, 0x0c0d0e0f
321
322.text
323
324
325
326
327
328
329
330.align 16
331SYM_FUNC_START(sm3_transform_avx)
332
333
334
335
336
337 vzeroupper;
338
339 pushq %rbp;
340 movq %rsp, %rbp;
341
342 movq %rdx, RNBLKS;
343
344 subq $STACK_SIZE, %rsp;
345 andq $(~63), %rsp;
346
347 movq %rbx, (STACK_REG_SAVE + 0 * 8)(%rsp);
348 movq %r15, (STACK_REG_SAVE + 1 * 8)(%rsp);
349 movq %r14, (STACK_REG_SAVE + 2 * 8)(%rsp);
350 movq %r13, (STACK_REG_SAVE + 3 * 8)(%rsp);
351 movq %r12, (STACK_REG_SAVE + 4 * 8)(%rsp);
352
353 vmovdqa .Lbe32mask (%rip), BSWAP_REG;
354
355
356 movl state_h0(RSTATE), a;
357 movl state_h1(RSTATE), b;
358 movl state_h2(RSTATE), c;
359 movl state_h3(RSTATE), d;
360 movl state_h4(RSTATE), e;
361 movl state_h5(RSTATE), f;
362 movl state_h6(RSTATE), g;
363 movl state_h7(RSTATE), h;
364
365.align 16
366.Loop:
367
368 LOAD_W_XMM_1();
369
370 leaq -1(RNBLKS), RNBLKS;
371
372
373 R1(a, b, c, d, e, f, g, h, 0, 0, IW); LOAD_W_XMM_2();
374 R1(d, a, b, c, h, e, f, g, 1, 1, IW);
375 R1(c, d, a, b, g, h, e, f, 2, 2, IW);
376 R1(b, c, d, a, f, g, h, e, 3, 3, IW); LOAD_W_XMM_3();
377
378
379 R1(a, b, c, d, e, f, g, h, 4, 0, IW);
380 R1(d, a, b, c, h, e, f, g, 5, 1, IW);
381 R1(c, d, a, b, g, h, e, f, 6, 2, IW); SCHED_W_0(12, W0, W1, W2, W3, W4, W5);
382 R1(b, c, d, a, f, g, h, e, 7, 3, IW); SCHED_W_1(12, W0, W1, W2, W3, W4, W5);
383
384
385 R1(a, b, c, d, e, f, g, h, 8, 0, IW); SCHED_W_2(12, W0, W1, W2, W3, W4, W5);
386 R1(d, a, b, c, h, e, f, g, 9, 1, IW); SCHED_W_0(15, W1, W2, W3, W4, W5, W0);
387 R1(c, d, a, b, g, h, e, f, 10, 2, IW); SCHED_W_1(15, W1, W2, W3, W4, W5, W0);
388 R1(b, c, d, a, f, g, h, e, 11, 3, IW); SCHED_W_2(15, W1, W2, W3, W4, W5, W0);
389
390
391 R1(a, b, c, d, e, f, g, h, 12, 0, XW); SCHED_W_0(18, W2, W3, W4, W5, W0, W1);
392 R1(d, a, b, c, h, e, f, g, 13, 1, XW); SCHED_W_1(18, W2, W3, W4, W5, W0, W1);
393 R1(c, d, a, b, g, h, e, f, 14, 2, XW); SCHED_W_2(18, W2, W3, W4, W5, W0, W1);
394
395
396 R1(b, c, d, a, f, g, h, e, 15, 0, XW); SCHED_W_0(21, W3, W4, W5, W0, W1, W2);
397 R2(a, b, c, d, e, f, g, h, 16, 1, XW); SCHED_W_1(21, W3, W4, W5, W0, W1, W2);
398 R2(d, a, b, c, h, e, f, g, 17, 2, XW); SCHED_W_2(21, W3, W4, W5, W0, W1, W2);
399
400
401 R2(c, d, a, b, g, h, e, f, 18, 0, XW); SCHED_W_0(24, W4, W5, W0, W1, W2, W3);
402 R2(b, c, d, a, f, g, h, e, 19, 1, XW); SCHED_W_1(24, W4, W5, W0, W1, W2, W3);
403 R2(a, b, c, d, e, f, g, h, 20, 2, XW); SCHED_W_2(24, W4, W5, W0, W1, W2, W3);
404
405
406 R2(d, a, b, c, h, e, f, g, 21, 0, XW); SCHED_W_0(27, W5, W0, W1, W2, W3, W4);
407 R2(c, d, a, b, g, h, e, f, 22, 1, XW); SCHED_W_1(27, W5, W0, W1, W2, W3, W4);
408 R2(b, c, d, a, f, g, h, e, 23, 2, XW); SCHED_W_2(27, W5, W0, W1, W2, W3, W4);
409
410
411 R2(a, b, c, d, e, f, g, h, 24, 0, XW); SCHED_W_0(30, W0, W1, W2, W3, W4, W5);
412 R2(d, a, b, c, h, e, f, g, 25, 1, XW); SCHED_W_1(30, W0, W1, W2, W3, W4, W5);
413 R2(c, d, a, b, g, h, e, f, 26, 2, XW); SCHED_W_2(30, W0, W1, W2, W3, W4, W5);
414
415
416 R2(b, c, d, a, f, g, h, e, 27, 0, XW); SCHED_W_0(33, W1, W2, W3, W4, W5, W0);
417 R2(a, b, c, d, e, f, g, h, 28, 1, XW); SCHED_W_1(33, W1, W2, W3, W4, W5, W0);
418 R2(d, a, b, c, h, e, f, g, 29, 2, XW); SCHED_W_2(33, W1, W2, W3, W4, W5, W0);
419
420
421 R2(c, d, a, b, g, h, e, f, 30, 0, XW); SCHED_W_0(36, W2, W3, W4, W5, W0, W1);
422 R2(b, c, d, a, f, g, h, e, 31, 1, XW); SCHED_W_1(36, W2, W3, W4, W5, W0, W1);
423 R2(a, b, c, d, e, f, g, h, 32, 2, XW); SCHED_W_2(36, W2, W3, W4, W5, W0, W1);
424
425
426 R2(d, a, b, c, h, e, f, g, 33, 0, XW); SCHED_W_0(39, W3, W4, W5, W0, W1, W2);
427 R2(c, d, a, b, g, h, e, f, 34, 1, XW); SCHED_W_1(39, W3, W4, W5, W0, W1, W2);
428 R2(b, c, d, a, f, g, h, e, 35, 2, XW); SCHED_W_2(39, W3, W4, W5, W0, W1, W2);
429
430
431 R2(a, b, c, d, e, f, g, h, 36, 0, XW); SCHED_W_0(42, W4, W5, W0, W1, W2, W3);
432 R2(d, a, b, c, h, e, f, g, 37, 1, XW); SCHED_W_1(42, W4, W5, W0, W1, W2, W3);
433 R2(c, d, a, b, g, h, e, f, 38, 2, XW); SCHED_W_2(42, W4, W5, W0, W1, W2, W3);
434
435
436 R2(b, c, d, a, f, g, h, e, 39, 0, XW); SCHED_W_0(45, W5, W0, W1, W2, W3, W4);
437 R2(a, b, c, d, e, f, g, h, 40, 1, XW); SCHED_W_1(45, W5, W0, W1, W2, W3, W4);
438 R2(d, a, b, c, h, e, f, g, 41, 2, XW); SCHED_W_2(45, W5, W0, W1, W2, W3, W4);
439
440
441 R2(c, d, a, b, g, h, e, f, 42, 0, XW); SCHED_W_0(48, W0, W1, W2, W3, W4, W5);
442 R2(b, c, d, a, f, g, h, e, 43, 1, XW); SCHED_W_1(48, W0, W1, W2, W3, W4, W5);
443 R2(a, b, c, d, e, f, g, h, 44, 2, XW); SCHED_W_2(48, W0, W1, W2, W3, W4, W5);
444
445
446 R2(d, a, b, c, h, e, f, g, 45, 0, XW); SCHED_W_0(51, W1, W2, W3, W4, W5, W0);
447 R2(c, d, a, b, g, h, e, f, 46, 1, XW); SCHED_W_1(51, W1, W2, W3, W4, W5, W0);
448 R2(b, c, d, a, f, g, h, e, 47, 2, XW); SCHED_W_2(51, W1, W2, W3, W4, W5, W0);
449
450
451 R2(a, b, c, d, e, f, g, h, 48, 0, XW); SCHED_W_0(54, W2, W3, W4, W5, W0, W1);
452 R2(d, a, b, c, h, e, f, g, 49, 1, XW); SCHED_W_1(54, W2, W3, W4, W5, W0, W1);
453 R2(c, d, a, b, g, h, e, f, 50, 2, XW); SCHED_W_2(54, W2, W3, W4, W5, W0, W1);
454
455
456 R2(b, c, d, a, f, g, h, e, 51, 0, XW); SCHED_W_0(57, W3, W4, W5, W0, W1, W2);
457 R2(a, b, c, d, e, f, g, h, 52, 1, XW); SCHED_W_1(57, W3, W4, W5, W0, W1, W2);
458 R2(d, a, b, c, h, e, f, g, 53, 2, XW); SCHED_W_2(57, W3, W4, W5, W0, W1, W2);
459
460
461 R2(c, d, a, b, g, h, e, f, 54, 0, XW); SCHED_W_0(60, W4, W5, W0, W1, W2, W3);
462 R2(b, c, d, a, f, g, h, e, 55, 1, XW); SCHED_W_1(60, W4, W5, W0, W1, W2, W3);
463 R2(a, b, c, d, e, f, g, h, 56, 2, XW); SCHED_W_2(60, W4, W5, W0, W1, W2, W3);
464
465
466 R2(d, a, b, c, h, e, f, g, 57, 0, XW); SCHED_W_0(63, W5, W0, W1, W2, W3, W4);
467 R2(c, d, a, b, g, h, e, f, 58, 1, XW);
468 R2(b, c, d, a, f, g, h, e, 59, 2, XW); SCHED_W_1(63, W5, W0, W1, W2, W3, W4);
469
470
471 R2(a, b, c, d, e, f, g, h, 60, 0, XW);
472 R2(d, a, b, c, h, e, f, g, 61, 1, XW); SCHED_W_2(63, W5, W0, W1, W2, W3, W4);
473 R2(c, d, a, b, g, h, e, f, 62, 2, XW);
474
475
476 R2(b, c, d, a, f, g, h, e, 63, 0, XW);
477
478
479 xorl state_h0(RSTATE), a;
480 xorl state_h1(RSTATE), b;
481 xorl state_h2(RSTATE), c;
482 xorl state_h3(RSTATE), d;
483 movl a, state_h0(RSTATE);
484 movl b, state_h1(RSTATE);
485 movl c, state_h2(RSTATE);
486 movl d, state_h3(RSTATE);
487 xorl state_h4(RSTATE), e;
488 xorl state_h5(RSTATE), f;
489 xorl state_h6(RSTATE), g;
490 xorl state_h7(RSTATE), h;
491 movl e, state_h4(RSTATE);
492 movl f, state_h5(RSTATE);
493 movl g, state_h6(RSTATE);
494 movl h, state_h7(RSTATE);
495
496 cmpq $0, RNBLKS;
497 jne .Loop;
498
499 vzeroall;
500
501 movq (STACK_REG_SAVE + 0 * 8)(%rsp), %rbx;
502 movq (STACK_REG_SAVE + 1 * 8)(%rsp), %r15;
503 movq (STACK_REG_SAVE + 2 * 8)(%rsp), %r14;
504 movq (STACK_REG_SAVE + 3 * 8)(%rsp), %r13;
505 movq (STACK_REG_SAVE + 4 * 8)(%rsp), %r12;
506
507 vmovdqa %xmm0, IW_W1_ADDR(0, 0);
508 vmovdqa %xmm0, IW_W1W2_ADDR(0, 0);
509 vmovdqa %xmm0, IW_W1_ADDR(4, 0);
510 vmovdqa %xmm0, IW_W1W2_ADDR(4, 0);
511 vmovdqa %xmm0, IW_W1_ADDR(8, 0);
512 vmovdqa %xmm0, IW_W1W2_ADDR(8, 0);
513
514 movq %rbp, %rsp;
515 popq %rbp;
516 RET;
517SYM_FUNC_END(sm3_transform_avx)
518