1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22#include <linux/hardirq.h>
23#include <linux/types.h>
24#include <linux/module.h>
25#include <linux/err.h>
26#include <crypto/algapi.h>
27#include <crypto/aes.h>
28#include <crypto/cryptd.h>
29#include <crypto/ctr.h>
30#include <crypto/b128ops.h>
31#include <crypto/gcm.h>
32#include <crypto/xts.h>
33#include <asm/cpu_device_id.h>
34#include <asm/fpu/api.h>
35#include <asm/crypto/aes.h>
36#include <crypto/scatterwalk.h>
37#include <crypto/internal/aead.h>
38#include <crypto/internal/simd.h>
39#include <crypto/internal/skcipher.h>
40#include <linux/workqueue.h>
41#include <linux/spinlock.h>
42#ifdef CONFIG_X86_64
43#include <asm/crypto/glue_helper.h>
44#endif
45
46
47#define AESNI_ALIGN 16
48#define AESNI_ALIGN_ATTR __attribute__ ((__aligned__(AESNI_ALIGN)))
49#define AES_BLOCK_MASK (~(AES_BLOCK_SIZE - 1))
50#define RFC4106_HASH_SUBKEY_SIZE 16
51#define AESNI_ALIGN_EXTRA ((AESNI_ALIGN - 1) & ~(CRYPTO_MINALIGN - 1))
52#define CRYPTO_AES_CTX_SIZE (sizeof(struct crypto_aes_ctx) + AESNI_ALIGN_EXTRA)
53#define XTS_AES_CTX_SIZE (sizeof(struct aesni_xts_ctx) + AESNI_ALIGN_EXTRA)
54
55
56
57
58
59struct aesni_rfc4106_gcm_ctx {
60 u8 hash_subkey[16] AESNI_ALIGN_ATTR;
61 struct crypto_aes_ctx aes_key_expanded AESNI_ALIGN_ATTR;
62 u8 nonce[4];
63};
64
65struct generic_gcmaes_ctx {
66 u8 hash_subkey[16] AESNI_ALIGN_ATTR;
67 struct crypto_aes_ctx aes_key_expanded AESNI_ALIGN_ATTR;
68};
69
70struct aesni_xts_ctx {
71 u8 raw_tweak_ctx[sizeof(struct crypto_aes_ctx)] AESNI_ALIGN_ATTR;
72 u8 raw_crypt_ctx[sizeof(struct crypto_aes_ctx)] AESNI_ALIGN_ATTR;
73};
74
75#define GCM_BLOCK_LEN 16
76
77struct gcm_context_data {
78
79 u8 aad_hash[GCM_BLOCK_LEN];
80 u64 aad_length;
81 u64 in_length;
82 u8 partial_block_enc_key[GCM_BLOCK_LEN];
83 u8 orig_IV[GCM_BLOCK_LEN];
84 u8 current_counter[GCM_BLOCK_LEN];
85 u64 partial_block_len;
86 u64 unused;
87 u8 hash_keys[GCM_BLOCK_LEN * 16];
88};
89
90asmlinkage int aesni_set_key(struct crypto_aes_ctx *ctx, const u8 *in_key,
91 unsigned int key_len);
92asmlinkage void aesni_enc(struct crypto_aes_ctx *ctx, u8 *out,
93 const u8 *in);
94asmlinkage void aesni_dec(struct crypto_aes_ctx *ctx, u8 *out,
95 const u8 *in);
96asmlinkage void aesni_ecb_enc(struct crypto_aes_ctx *ctx, u8 *out,
97 const u8 *in, unsigned int len);
98asmlinkage void aesni_ecb_dec(struct crypto_aes_ctx *ctx, u8 *out,
99 const u8 *in, unsigned int len);
100asmlinkage void aesni_cbc_enc(struct crypto_aes_ctx *ctx, u8 *out,
101 const u8 *in, unsigned int len, u8 *iv);
102asmlinkage void aesni_cbc_dec(struct crypto_aes_ctx *ctx, u8 *out,
103 const u8 *in, unsigned int len, u8 *iv);
104
105#define AVX_GEN2_OPTSIZE 640
106#define AVX_GEN4_OPTSIZE 4096
107
108#ifdef CONFIG_X86_64
109
110static void (*aesni_ctr_enc_tfm)(struct crypto_aes_ctx *ctx, u8 *out,
111 const u8 *in, unsigned int len, u8 *iv);
112asmlinkage void aesni_ctr_enc(struct crypto_aes_ctx *ctx, u8 *out,
113 const u8 *in, unsigned int len, u8 *iv);
114
115asmlinkage void aesni_xts_crypt8(struct crypto_aes_ctx *ctx, u8 *out,
116 const u8 *in, bool enc, u8 *iv);
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133asmlinkage void aesni_gcm_enc(void *ctx,
134 struct gcm_context_data *gdata, u8 *out,
135 const u8 *in, unsigned long plaintext_len, u8 *iv,
136 u8 *hash_subkey, const u8 *aad, unsigned long aad_len,
137 u8 *auth_tag, unsigned long auth_tag_len);
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155asmlinkage void aesni_gcm_dec(void *ctx,
156 struct gcm_context_data *gdata, u8 *out,
157 const u8 *in, unsigned long ciphertext_len, u8 *iv,
158 u8 *hash_subkey, const u8 *aad, unsigned long aad_len,
159 u8 *auth_tag, unsigned long auth_tag_len);
160
161
162asmlinkage void aesni_gcm_init(void *ctx,
163 struct gcm_context_data *gdata,
164 u8 *iv,
165 u8 *hash_subkey, const u8 *aad,
166 unsigned long aad_len);
167asmlinkage void aesni_gcm_enc_update(void *ctx,
168 struct gcm_context_data *gdata, u8 *out,
169 const u8 *in, unsigned long plaintext_len);
170asmlinkage void aesni_gcm_dec_update(void *ctx,
171 struct gcm_context_data *gdata, u8 *out,
172 const u8 *in,
173 unsigned long ciphertext_len);
174asmlinkage void aesni_gcm_finalize(void *ctx,
175 struct gcm_context_data *gdata,
176 u8 *auth_tag, unsigned long auth_tag_len);
177
178static const struct aesni_gcm_tfm_s {
179 void (*init)(void *ctx, struct gcm_context_data *gdata, u8 *iv,
180 u8 *hash_subkey, const u8 *aad, unsigned long aad_len);
181 void (*enc_update)(void *ctx, struct gcm_context_data *gdata, u8 *out,
182 const u8 *in, unsigned long plaintext_len);
183 void (*dec_update)(void *ctx, struct gcm_context_data *gdata, u8 *out,
184 const u8 *in, unsigned long ciphertext_len);
185 void (*finalize)(void *ctx, struct gcm_context_data *gdata,
186 u8 *auth_tag, unsigned long auth_tag_len);
187} *aesni_gcm_tfm;
188
189static const struct aesni_gcm_tfm_s aesni_gcm_tfm_sse = {
190 .init = &aesni_gcm_init,
191 .enc_update = &aesni_gcm_enc_update,
192 .dec_update = &aesni_gcm_dec_update,
193 .finalize = &aesni_gcm_finalize,
194};
195
196#ifdef CONFIG_AS_AVX
197asmlinkage void aes_ctr_enc_128_avx_by8(const u8 *in, u8 *iv,
198 void *keys, u8 *out, unsigned int num_bytes);
199asmlinkage void aes_ctr_enc_192_avx_by8(const u8 *in, u8 *iv,
200 void *keys, u8 *out, unsigned int num_bytes);
201asmlinkage void aes_ctr_enc_256_avx_by8(const u8 *in, u8 *iv,
202 void *keys, u8 *out, unsigned int num_bytes);
203
204
205
206
207
208asmlinkage void aesni_gcm_init_avx_gen2(void *my_ctx_data,
209 struct gcm_context_data *gdata,
210 u8 *iv,
211 u8 *hash_subkey,
212 const u8 *aad,
213 unsigned long aad_len);
214
215asmlinkage void aesni_gcm_enc_update_avx_gen2(void *ctx,
216 struct gcm_context_data *gdata, u8 *out,
217 const u8 *in, unsigned long plaintext_len);
218asmlinkage void aesni_gcm_dec_update_avx_gen2(void *ctx,
219 struct gcm_context_data *gdata, u8 *out,
220 const u8 *in,
221 unsigned long ciphertext_len);
222asmlinkage void aesni_gcm_finalize_avx_gen2(void *ctx,
223 struct gcm_context_data *gdata,
224 u8 *auth_tag, unsigned long auth_tag_len);
225
226asmlinkage void aesni_gcm_enc_avx_gen2(void *ctx,
227 struct gcm_context_data *gdata, u8 *out,
228 const u8 *in, unsigned long plaintext_len, u8 *iv,
229 const u8 *aad, unsigned long aad_len,
230 u8 *auth_tag, unsigned long auth_tag_len);
231
232asmlinkage void aesni_gcm_dec_avx_gen2(void *ctx,
233 struct gcm_context_data *gdata, u8 *out,
234 const u8 *in, unsigned long ciphertext_len, u8 *iv,
235 const u8 *aad, unsigned long aad_len,
236 u8 *auth_tag, unsigned long auth_tag_len);
237
238static const struct aesni_gcm_tfm_s aesni_gcm_tfm_avx_gen2 = {
239 .init = &aesni_gcm_init_avx_gen2,
240 .enc_update = &aesni_gcm_enc_update_avx_gen2,
241 .dec_update = &aesni_gcm_dec_update_avx_gen2,
242 .finalize = &aesni_gcm_finalize_avx_gen2,
243};
244
245#endif
246
247#ifdef CONFIG_AS_AVX2
248
249
250
251
252
253asmlinkage void aesni_gcm_init_avx_gen4(void *my_ctx_data,
254 struct gcm_context_data *gdata,
255 u8 *iv,
256 u8 *hash_subkey,
257 const u8 *aad,
258 unsigned long aad_len);
259
260asmlinkage void aesni_gcm_enc_update_avx_gen4(void *ctx,
261 struct gcm_context_data *gdata, u8 *out,
262 const u8 *in, unsigned long plaintext_len);
263asmlinkage void aesni_gcm_dec_update_avx_gen4(void *ctx,
264 struct gcm_context_data *gdata, u8 *out,
265 const u8 *in,
266 unsigned long ciphertext_len);
267asmlinkage void aesni_gcm_finalize_avx_gen4(void *ctx,
268 struct gcm_context_data *gdata,
269 u8 *auth_tag, unsigned long auth_tag_len);
270
271asmlinkage void aesni_gcm_enc_avx_gen4(void *ctx,
272 struct gcm_context_data *gdata, u8 *out,
273 const u8 *in, unsigned long plaintext_len, u8 *iv,
274 const u8 *aad, unsigned long aad_len,
275 u8 *auth_tag, unsigned long auth_tag_len);
276
277asmlinkage void aesni_gcm_dec_avx_gen4(void *ctx,
278 struct gcm_context_data *gdata, u8 *out,
279 const u8 *in, unsigned long ciphertext_len, u8 *iv,
280 const u8 *aad, unsigned long aad_len,
281 u8 *auth_tag, unsigned long auth_tag_len);
282
283static const struct aesni_gcm_tfm_s aesni_gcm_tfm_avx_gen4 = {
284 .init = &aesni_gcm_init_avx_gen4,
285 .enc_update = &aesni_gcm_enc_update_avx_gen4,
286 .dec_update = &aesni_gcm_dec_update_avx_gen4,
287 .finalize = &aesni_gcm_finalize_avx_gen4,
288};
289
290#endif
291
292static inline struct
293aesni_rfc4106_gcm_ctx *aesni_rfc4106_gcm_ctx_get(struct crypto_aead *tfm)
294{
295 unsigned long align = AESNI_ALIGN;
296
297 if (align <= crypto_tfm_ctx_alignment())
298 align = 1;
299 return PTR_ALIGN(crypto_aead_ctx(tfm), align);
300}
301
302static inline struct
303generic_gcmaes_ctx *generic_gcmaes_ctx_get(struct crypto_aead *tfm)
304{
305 unsigned long align = AESNI_ALIGN;
306
307 if (align <= crypto_tfm_ctx_alignment())
308 align = 1;
309 return PTR_ALIGN(crypto_aead_ctx(tfm), align);
310}
311#endif
312
313static inline struct crypto_aes_ctx *aes_ctx(void *raw_ctx)
314{
315 unsigned long addr = (unsigned long)raw_ctx;
316 unsigned long align = AESNI_ALIGN;
317
318 if (align <= crypto_tfm_ctx_alignment())
319 align = 1;
320 return (struct crypto_aes_ctx *)ALIGN(addr, align);
321}
322
323static int aes_set_key_common(struct crypto_tfm *tfm, void *raw_ctx,
324 const u8 *in_key, unsigned int key_len)
325{
326 struct crypto_aes_ctx *ctx = aes_ctx(raw_ctx);
327 u32 *flags = &tfm->crt_flags;
328 int err;
329
330 if (key_len != AES_KEYSIZE_128 && key_len != AES_KEYSIZE_192 &&
331 key_len != AES_KEYSIZE_256) {
332 *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
333 return -EINVAL;
334 }
335
336 if (!irq_fpu_usable())
337 err = crypto_aes_expand_key(ctx, in_key, key_len);
338 else {
339 kernel_fpu_begin();
340 err = aesni_set_key(ctx, in_key, key_len);
341 kernel_fpu_end();
342 }
343
344 return err;
345}
346
347static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
348 unsigned int key_len)
349{
350 return aes_set_key_common(tfm, crypto_tfm_ctx(tfm), in_key, key_len);
351}
352
353static void aes_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
354{
355 struct crypto_aes_ctx *ctx = aes_ctx(crypto_tfm_ctx(tfm));
356
357 if (!irq_fpu_usable())
358 crypto_aes_encrypt_x86(ctx, dst, src);
359 else {
360 kernel_fpu_begin();
361 aesni_enc(ctx, dst, src);
362 kernel_fpu_end();
363 }
364}
365
366static void aes_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
367{
368 struct crypto_aes_ctx *ctx = aes_ctx(crypto_tfm_ctx(tfm));
369
370 if (!irq_fpu_usable())
371 crypto_aes_decrypt_x86(ctx, dst, src);
372 else {
373 kernel_fpu_begin();
374 aesni_dec(ctx, dst, src);
375 kernel_fpu_end();
376 }
377}
378
379static void __aes_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
380{
381 struct crypto_aes_ctx *ctx = aes_ctx(crypto_tfm_ctx(tfm));
382
383 aesni_enc(ctx, dst, src);
384}
385
386static void __aes_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
387{
388 struct crypto_aes_ctx *ctx = aes_ctx(crypto_tfm_ctx(tfm));
389
390 aesni_dec(ctx, dst, src);
391}
392
393static int aesni_skcipher_setkey(struct crypto_skcipher *tfm, const u8 *key,
394 unsigned int len)
395{
396 return aes_set_key_common(crypto_skcipher_tfm(tfm),
397 crypto_skcipher_ctx(tfm), key, len);
398}
399
400static int ecb_encrypt(struct skcipher_request *req)
401{
402 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
403 struct crypto_aes_ctx *ctx = aes_ctx(crypto_skcipher_ctx(tfm));
404 struct skcipher_walk walk;
405 unsigned int nbytes;
406 int err;
407
408 err = skcipher_walk_virt(&walk, req, true);
409
410 kernel_fpu_begin();
411 while ((nbytes = walk.nbytes)) {
412 aesni_ecb_enc(ctx, walk.dst.virt.addr, walk.src.virt.addr,
413 nbytes & AES_BLOCK_MASK);
414 nbytes &= AES_BLOCK_SIZE - 1;
415 err = skcipher_walk_done(&walk, nbytes);
416 }
417 kernel_fpu_end();
418
419 return err;
420}
421
422static int ecb_decrypt(struct skcipher_request *req)
423{
424 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
425 struct crypto_aes_ctx *ctx = aes_ctx(crypto_skcipher_ctx(tfm));
426 struct skcipher_walk walk;
427 unsigned int nbytes;
428 int err;
429
430 err = skcipher_walk_virt(&walk, req, true);
431
432 kernel_fpu_begin();
433 while ((nbytes = walk.nbytes)) {
434 aesni_ecb_dec(ctx, walk.dst.virt.addr, walk.src.virt.addr,
435 nbytes & AES_BLOCK_MASK);
436 nbytes &= AES_BLOCK_SIZE - 1;
437 err = skcipher_walk_done(&walk, nbytes);
438 }
439 kernel_fpu_end();
440
441 return err;
442}
443
444static int cbc_encrypt(struct skcipher_request *req)
445{
446 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
447 struct crypto_aes_ctx *ctx = aes_ctx(crypto_skcipher_ctx(tfm));
448 struct skcipher_walk walk;
449 unsigned int nbytes;
450 int err;
451
452 err = skcipher_walk_virt(&walk, req, true);
453
454 kernel_fpu_begin();
455 while ((nbytes = walk.nbytes)) {
456 aesni_cbc_enc(ctx, walk.dst.virt.addr, walk.src.virt.addr,
457 nbytes & AES_BLOCK_MASK, walk.iv);
458 nbytes &= AES_BLOCK_SIZE - 1;
459 err = skcipher_walk_done(&walk, nbytes);
460 }
461 kernel_fpu_end();
462
463 return err;
464}
465
466static int cbc_decrypt(struct skcipher_request *req)
467{
468 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
469 struct crypto_aes_ctx *ctx = aes_ctx(crypto_skcipher_ctx(tfm));
470 struct skcipher_walk walk;
471 unsigned int nbytes;
472 int err;
473
474 err = skcipher_walk_virt(&walk, req, true);
475
476 kernel_fpu_begin();
477 while ((nbytes = walk.nbytes)) {
478 aesni_cbc_dec(ctx, walk.dst.virt.addr, walk.src.virt.addr,
479 nbytes & AES_BLOCK_MASK, walk.iv);
480 nbytes &= AES_BLOCK_SIZE - 1;
481 err = skcipher_walk_done(&walk, nbytes);
482 }
483 kernel_fpu_end();
484
485 return err;
486}
487
488#ifdef CONFIG_X86_64
489static void ctr_crypt_final(struct crypto_aes_ctx *ctx,
490 struct skcipher_walk *walk)
491{
492 u8 *ctrblk = walk->iv;
493 u8 keystream[AES_BLOCK_SIZE];
494 u8 *src = walk->src.virt.addr;
495 u8 *dst = walk->dst.virt.addr;
496 unsigned int nbytes = walk->nbytes;
497
498 aesni_enc(ctx, keystream, ctrblk);
499 crypto_xor_cpy(dst, keystream, src, nbytes);
500
501 crypto_inc(ctrblk, AES_BLOCK_SIZE);
502}
503
504#ifdef CONFIG_AS_AVX
505static void aesni_ctr_enc_avx_tfm(struct crypto_aes_ctx *ctx, u8 *out,
506 const u8 *in, unsigned int len, u8 *iv)
507{
508
509
510
511
512
513
514 if (ctx->key_length == AES_KEYSIZE_128)
515 aes_ctr_enc_128_avx_by8(in, iv, (void *)ctx, out, len);
516 else if (ctx->key_length == AES_KEYSIZE_192)
517 aes_ctr_enc_192_avx_by8(in, iv, (void *)ctx, out, len);
518 else
519 aes_ctr_enc_256_avx_by8(in, iv, (void *)ctx, out, len);
520}
521#endif
522
523static int ctr_crypt(struct skcipher_request *req)
524{
525 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
526 struct crypto_aes_ctx *ctx = aes_ctx(crypto_skcipher_ctx(tfm));
527 struct skcipher_walk walk;
528 unsigned int nbytes;
529 int err;
530
531 err = skcipher_walk_virt(&walk, req, true);
532
533 kernel_fpu_begin();
534 while ((nbytes = walk.nbytes) >= AES_BLOCK_SIZE) {
535 aesni_ctr_enc_tfm(ctx, walk.dst.virt.addr, walk.src.virt.addr,
536 nbytes & AES_BLOCK_MASK, walk.iv);
537 nbytes &= AES_BLOCK_SIZE - 1;
538 err = skcipher_walk_done(&walk, nbytes);
539 }
540 if (walk.nbytes) {
541 ctr_crypt_final(ctx, &walk);
542 err = skcipher_walk_done(&walk, 0);
543 }
544 kernel_fpu_end();
545
546 return err;
547}
548
549static int xts_aesni_setkey(struct crypto_skcipher *tfm, const u8 *key,
550 unsigned int keylen)
551{
552 struct aesni_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
553 int err;
554
555 err = xts_verify_key(tfm, key, keylen);
556 if (err)
557 return err;
558
559 keylen /= 2;
560
561
562 err = aes_set_key_common(crypto_skcipher_tfm(tfm), ctx->raw_crypt_ctx,
563 key, keylen);
564 if (err)
565 return err;
566
567
568 return aes_set_key_common(crypto_skcipher_tfm(tfm), ctx->raw_tweak_ctx,
569 key + keylen, keylen);
570}
571
572
573static void aesni_xts_tweak(void *ctx, u8 *out, const u8 *in)
574{
575 aesni_enc(ctx, out, in);
576}
577
578static void aesni_xts_enc(void *ctx, u128 *dst, const u128 *src, le128 *iv)
579{
580 glue_xts_crypt_128bit_one(ctx, dst, src, iv, GLUE_FUNC_CAST(aesni_enc));
581}
582
583static void aesni_xts_dec(void *ctx, u128 *dst, const u128 *src, le128 *iv)
584{
585 glue_xts_crypt_128bit_one(ctx, dst, src, iv, GLUE_FUNC_CAST(aesni_dec));
586}
587
588static void aesni_xts_enc8(void *ctx, u128 *dst, const u128 *src, le128 *iv)
589{
590 aesni_xts_crypt8(ctx, (u8 *)dst, (const u8 *)src, true, (u8 *)iv);
591}
592
593static void aesni_xts_dec8(void *ctx, u128 *dst, const u128 *src, le128 *iv)
594{
595 aesni_xts_crypt8(ctx, (u8 *)dst, (const u8 *)src, false, (u8 *)iv);
596}
597
598static const struct common_glue_ctx aesni_enc_xts = {
599 .num_funcs = 2,
600 .fpu_blocks_limit = 1,
601
602 .funcs = { {
603 .num_blocks = 8,
604 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(aesni_xts_enc8) }
605 }, {
606 .num_blocks = 1,
607 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(aesni_xts_enc) }
608 } }
609};
610
611static const struct common_glue_ctx aesni_dec_xts = {
612 .num_funcs = 2,
613 .fpu_blocks_limit = 1,
614
615 .funcs = { {
616 .num_blocks = 8,
617 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(aesni_xts_dec8) }
618 }, {
619 .num_blocks = 1,
620 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(aesni_xts_dec) }
621 } }
622};
623
624static int xts_encrypt(struct skcipher_request *req)
625{
626 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
627 struct aesni_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
628
629 return glue_xts_req_128bit(&aesni_enc_xts, req,
630 XTS_TWEAK_CAST(aesni_xts_tweak),
631 aes_ctx(ctx->raw_tweak_ctx),
632 aes_ctx(ctx->raw_crypt_ctx));
633}
634
635static int xts_decrypt(struct skcipher_request *req)
636{
637 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
638 struct aesni_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
639
640 return glue_xts_req_128bit(&aesni_dec_xts, req,
641 XTS_TWEAK_CAST(aesni_xts_tweak),
642 aes_ctx(ctx->raw_tweak_ctx),
643 aes_ctx(ctx->raw_crypt_ctx));
644}
645
646static int rfc4106_init(struct crypto_aead *aead)
647{
648 struct cryptd_aead *cryptd_tfm;
649 struct cryptd_aead **ctx = crypto_aead_ctx(aead);
650
651 cryptd_tfm = cryptd_alloc_aead("__driver-gcm-aes-aesni",
652 CRYPTO_ALG_INTERNAL,
653 CRYPTO_ALG_INTERNAL);
654 if (IS_ERR(cryptd_tfm))
655 return PTR_ERR(cryptd_tfm);
656
657 *ctx = cryptd_tfm;
658 crypto_aead_set_reqsize(aead, crypto_aead_reqsize(&cryptd_tfm->base));
659 return 0;
660}
661
662static void rfc4106_exit(struct crypto_aead *aead)
663{
664 struct cryptd_aead **ctx = crypto_aead_ctx(aead);
665
666 cryptd_free_aead(*ctx);
667}
668
669static int
670rfc4106_set_hash_subkey(u8 *hash_subkey, const u8 *key, unsigned int key_len)
671{
672 struct crypto_cipher *tfm;
673 int ret;
674
675 tfm = crypto_alloc_cipher("aes", 0, 0);
676 if (IS_ERR(tfm))
677 return PTR_ERR(tfm);
678
679 ret = crypto_cipher_setkey(tfm, key, key_len);
680 if (ret)
681 goto out_free_cipher;
682
683
684
685 memset(hash_subkey, 0, RFC4106_HASH_SUBKEY_SIZE);
686
687 crypto_cipher_encrypt_one(tfm, hash_subkey, hash_subkey);
688
689out_free_cipher:
690 crypto_free_cipher(tfm);
691 return ret;
692}
693
694static int common_rfc4106_set_key(struct crypto_aead *aead, const u8 *key,
695 unsigned int key_len)
696{
697 struct aesni_rfc4106_gcm_ctx *ctx = aesni_rfc4106_gcm_ctx_get(aead);
698
699 if (key_len < 4) {
700 crypto_aead_set_flags(aead, CRYPTO_TFM_RES_BAD_KEY_LEN);
701 return -EINVAL;
702 }
703
704 key_len -= 4;
705
706 memcpy(ctx->nonce, key + key_len, sizeof(ctx->nonce));
707
708 return aes_set_key_common(crypto_aead_tfm(aead),
709 &ctx->aes_key_expanded, key, key_len) ?:
710 rfc4106_set_hash_subkey(ctx->hash_subkey, key, key_len);
711}
712
713static int gcmaes_wrapper_set_key(struct crypto_aead *parent, const u8 *key,
714 unsigned int key_len)
715{
716 struct cryptd_aead **ctx = crypto_aead_ctx(parent);
717 struct cryptd_aead *cryptd_tfm = *ctx;
718
719 return crypto_aead_setkey(&cryptd_tfm->base, key, key_len);
720}
721
722static int common_rfc4106_set_authsize(struct crypto_aead *aead,
723 unsigned int authsize)
724{
725 switch (authsize) {
726 case 8:
727 case 12:
728 case 16:
729 break;
730 default:
731 return -EINVAL;
732 }
733
734 return 0;
735}
736
737
738
739static int gcmaes_wrapper_set_authsize(struct crypto_aead *parent,
740 unsigned int authsize)
741{
742 struct cryptd_aead **ctx = crypto_aead_ctx(parent);
743 struct cryptd_aead *cryptd_tfm = *ctx;
744
745 return crypto_aead_setauthsize(&cryptd_tfm->base, authsize);
746}
747
748static int generic_gcmaes_set_authsize(struct crypto_aead *tfm,
749 unsigned int authsize)
750{
751 switch (authsize) {
752 case 4:
753 case 8:
754 case 12:
755 case 13:
756 case 14:
757 case 15:
758 case 16:
759 break;
760 default:
761 return -EINVAL;
762 }
763
764 return 0;
765}
766
767static int gcmaes_crypt_by_sg(bool enc, struct aead_request *req,
768 unsigned int assoclen, u8 *hash_subkey,
769 u8 *iv, void *aes_ctx)
770{
771 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
772 unsigned long auth_tag_len = crypto_aead_authsize(tfm);
773 const struct aesni_gcm_tfm_s *gcm_tfm = aesni_gcm_tfm;
774 struct gcm_context_data data AESNI_ALIGN_ATTR;
775 struct scatter_walk dst_sg_walk = {};
776 unsigned long left = req->cryptlen;
777 unsigned long len, srclen, dstlen;
778 struct scatter_walk assoc_sg_walk;
779 struct scatter_walk src_sg_walk;
780 struct scatterlist src_start[2];
781 struct scatterlist dst_start[2];
782 struct scatterlist *src_sg;
783 struct scatterlist *dst_sg;
784 u8 *src, *dst, *assoc;
785 u8 *assocmem = NULL;
786 u8 authTag[16];
787
788 if (!enc)
789 left -= auth_tag_len;
790
791#ifdef CONFIG_AS_AVX2
792 if (left < AVX_GEN4_OPTSIZE && gcm_tfm == &aesni_gcm_tfm_avx_gen4)
793 gcm_tfm = &aesni_gcm_tfm_avx_gen2;
794#endif
795#ifdef CONFIG_AS_AVX
796 if (left < AVX_GEN2_OPTSIZE && gcm_tfm == &aesni_gcm_tfm_avx_gen2)
797 gcm_tfm = &aesni_gcm_tfm_sse;
798#endif
799
800
801 if (req->src->length >= assoclen && req->src->length &&
802 (!PageHighMem(sg_page(req->src)) ||
803 req->src->offset + req->src->length <= PAGE_SIZE)) {
804 scatterwalk_start(&assoc_sg_walk, req->src);
805 assoc = scatterwalk_map(&assoc_sg_walk);
806 } else {
807
808 assocmem = kmalloc(assoclen, GFP_ATOMIC);
809 if (unlikely(!assocmem))
810 return -ENOMEM;
811 assoc = assocmem;
812
813 scatterwalk_map_and_copy(assoc, req->src, 0, assoclen, 0);
814 }
815
816 if (left) {
817 src_sg = scatterwalk_ffwd(src_start, req->src, req->assoclen);
818 scatterwalk_start(&src_sg_walk, src_sg);
819 if (req->src != req->dst) {
820 dst_sg = scatterwalk_ffwd(dst_start, req->dst,
821 req->assoclen);
822 scatterwalk_start(&dst_sg_walk, dst_sg);
823 }
824 }
825
826 kernel_fpu_begin();
827 gcm_tfm->init(aes_ctx, &data, iv,
828 hash_subkey, assoc, assoclen);
829 if (req->src != req->dst) {
830 while (left) {
831 src = scatterwalk_map(&src_sg_walk);
832 dst = scatterwalk_map(&dst_sg_walk);
833 srclen = scatterwalk_clamp(&src_sg_walk, left);
834 dstlen = scatterwalk_clamp(&dst_sg_walk, left);
835 len = min(srclen, dstlen);
836 if (len) {
837 if (enc)
838 gcm_tfm->enc_update(aes_ctx, &data,
839 dst, src, len);
840 else
841 gcm_tfm->dec_update(aes_ctx, &data,
842 dst, src, len);
843 }
844 left -= len;
845
846 scatterwalk_unmap(src);
847 scatterwalk_unmap(dst);
848 scatterwalk_advance(&src_sg_walk, len);
849 scatterwalk_advance(&dst_sg_walk, len);
850 scatterwalk_done(&src_sg_walk, 0, left);
851 scatterwalk_done(&dst_sg_walk, 1, left);
852 }
853 } else {
854 while (left) {
855 dst = src = scatterwalk_map(&src_sg_walk);
856 len = scatterwalk_clamp(&src_sg_walk, left);
857 if (len) {
858 if (enc)
859 gcm_tfm->enc_update(aes_ctx, &data,
860 src, src, len);
861 else
862 gcm_tfm->dec_update(aes_ctx, &data,
863 src, src, len);
864 }
865 left -= len;
866 scatterwalk_unmap(src);
867 scatterwalk_advance(&src_sg_walk, len);
868 scatterwalk_done(&src_sg_walk, 1, left);
869 }
870 }
871 gcm_tfm->finalize(aes_ctx, &data, authTag, auth_tag_len);
872 kernel_fpu_end();
873
874 if (!assocmem)
875 scatterwalk_unmap(assoc);
876 else
877 kfree(assocmem);
878
879 if (!enc) {
880 u8 authTagMsg[16];
881
882
883 scatterwalk_map_and_copy(authTagMsg, req->src,
884 req->assoclen + req->cryptlen -
885 auth_tag_len,
886 auth_tag_len, 0);
887
888
889 return crypto_memneq(authTagMsg, authTag, auth_tag_len) ?
890 -EBADMSG : 0;
891 }
892
893
894 scatterwalk_map_and_copy(authTag, req->dst,
895 req->assoclen + req->cryptlen,
896 auth_tag_len, 1);
897
898 return 0;
899}
900
901static int gcmaes_encrypt(struct aead_request *req, unsigned int assoclen,
902 u8 *hash_subkey, u8 *iv, void *aes_ctx)
903{
904 return gcmaes_crypt_by_sg(true, req, assoclen, hash_subkey, iv,
905 aes_ctx);
906}
907
908static int gcmaes_decrypt(struct aead_request *req, unsigned int assoclen,
909 u8 *hash_subkey, u8 *iv, void *aes_ctx)
910{
911 return gcmaes_crypt_by_sg(false, req, assoclen, hash_subkey, iv,
912 aes_ctx);
913}
914
915static int helper_rfc4106_encrypt(struct aead_request *req)
916{
917 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
918 struct aesni_rfc4106_gcm_ctx *ctx = aesni_rfc4106_gcm_ctx_get(tfm);
919 void *aes_ctx = &(ctx->aes_key_expanded);
920 u8 iv[16] __attribute__ ((__aligned__(AESNI_ALIGN)));
921 unsigned int i;
922 __be32 counter = cpu_to_be32(1);
923
924
925
926
927 if (unlikely(req->assoclen != 16 && req->assoclen != 20))
928 return -EINVAL;
929
930
931 for (i = 0; i < 4; i++)
932 *(iv+i) = ctx->nonce[i];
933 for (i = 0; i < 8; i++)
934 *(iv+4+i) = req->iv[i];
935 *((__be32 *)(iv+12)) = counter;
936
937 return gcmaes_encrypt(req, req->assoclen - 8, ctx->hash_subkey, iv,
938 aes_ctx);
939}
940
941static int helper_rfc4106_decrypt(struct aead_request *req)
942{
943 __be32 counter = cpu_to_be32(1);
944 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
945 struct aesni_rfc4106_gcm_ctx *ctx = aesni_rfc4106_gcm_ctx_get(tfm);
946 void *aes_ctx = &(ctx->aes_key_expanded);
947 u8 iv[16] __attribute__ ((__aligned__(AESNI_ALIGN)));
948 unsigned int i;
949
950 if (unlikely(req->assoclen != 16 && req->assoclen != 20))
951 return -EINVAL;
952
953
954
955
956
957
958 for (i = 0; i < 4; i++)
959 *(iv+i) = ctx->nonce[i];
960 for (i = 0; i < 8; i++)
961 *(iv+4+i) = req->iv[i];
962 *((__be32 *)(iv+12)) = counter;
963
964 return gcmaes_decrypt(req, req->assoclen - 8, ctx->hash_subkey, iv,
965 aes_ctx);
966}
967
968static int gcmaes_wrapper_encrypt(struct aead_request *req)
969{
970 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
971 struct cryptd_aead **ctx = crypto_aead_ctx(tfm);
972 struct cryptd_aead *cryptd_tfm = *ctx;
973
974 tfm = &cryptd_tfm->base;
975 if (irq_fpu_usable() && (!in_atomic() ||
976 !cryptd_aead_queued(cryptd_tfm)))
977 tfm = cryptd_aead_child(cryptd_tfm);
978
979 aead_request_set_tfm(req, tfm);
980
981 return crypto_aead_encrypt(req);
982}
983
984static int gcmaes_wrapper_decrypt(struct aead_request *req)
985{
986 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
987 struct cryptd_aead **ctx = crypto_aead_ctx(tfm);
988 struct cryptd_aead *cryptd_tfm = *ctx;
989
990 tfm = &cryptd_tfm->base;
991 if (irq_fpu_usable() && (!in_atomic() ||
992 !cryptd_aead_queued(cryptd_tfm)))
993 tfm = cryptd_aead_child(cryptd_tfm);
994
995 aead_request_set_tfm(req, tfm);
996
997 return crypto_aead_decrypt(req);
998}
999#endif
1000
1001static struct crypto_alg aesni_algs[] = { {
1002 .cra_name = "aes",
1003 .cra_driver_name = "aes-aesni",
1004 .cra_priority = 300,
1005 .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
1006 .cra_blocksize = AES_BLOCK_SIZE,
1007 .cra_ctxsize = CRYPTO_AES_CTX_SIZE,
1008 .cra_module = THIS_MODULE,
1009 .cra_u = {
1010 .cipher = {
1011 .cia_min_keysize = AES_MIN_KEY_SIZE,
1012 .cia_max_keysize = AES_MAX_KEY_SIZE,
1013 .cia_setkey = aes_set_key,
1014 .cia_encrypt = aes_encrypt,
1015 .cia_decrypt = aes_decrypt
1016 }
1017 }
1018}, {
1019 .cra_name = "__aes",
1020 .cra_driver_name = "__aes-aesni",
1021 .cra_priority = 300,
1022 .cra_flags = CRYPTO_ALG_TYPE_CIPHER | CRYPTO_ALG_INTERNAL,
1023 .cra_blocksize = AES_BLOCK_SIZE,
1024 .cra_ctxsize = CRYPTO_AES_CTX_SIZE,
1025 .cra_module = THIS_MODULE,
1026 .cra_u = {
1027 .cipher = {
1028 .cia_min_keysize = AES_MIN_KEY_SIZE,
1029 .cia_max_keysize = AES_MAX_KEY_SIZE,
1030 .cia_setkey = aes_set_key,
1031 .cia_encrypt = __aes_encrypt,
1032 .cia_decrypt = __aes_decrypt
1033 }
1034 }
1035} };
1036
1037static struct skcipher_alg aesni_skciphers[] = {
1038 {
1039 .base = {
1040 .cra_name = "__ecb(aes)",
1041 .cra_driver_name = "__ecb-aes-aesni",
1042 .cra_priority = 400,
1043 .cra_flags = CRYPTO_ALG_INTERNAL,
1044 .cra_blocksize = AES_BLOCK_SIZE,
1045 .cra_ctxsize = CRYPTO_AES_CTX_SIZE,
1046 .cra_module = THIS_MODULE,
1047 },
1048 .min_keysize = AES_MIN_KEY_SIZE,
1049 .max_keysize = AES_MAX_KEY_SIZE,
1050 .setkey = aesni_skcipher_setkey,
1051 .encrypt = ecb_encrypt,
1052 .decrypt = ecb_decrypt,
1053 }, {
1054 .base = {
1055 .cra_name = "__cbc(aes)",
1056 .cra_driver_name = "__cbc-aes-aesni",
1057 .cra_priority = 400,
1058 .cra_flags = CRYPTO_ALG_INTERNAL,
1059 .cra_blocksize = AES_BLOCK_SIZE,
1060 .cra_ctxsize = CRYPTO_AES_CTX_SIZE,
1061 .cra_module = THIS_MODULE,
1062 },
1063 .min_keysize = AES_MIN_KEY_SIZE,
1064 .max_keysize = AES_MAX_KEY_SIZE,
1065 .ivsize = AES_BLOCK_SIZE,
1066 .setkey = aesni_skcipher_setkey,
1067 .encrypt = cbc_encrypt,
1068 .decrypt = cbc_decrypt,
1069#ifdef CONFIG_X86_64
1070 }, {
1071 .base = {
1072 .cra_name = "__ctr(aes)",
1073 .cra_driver_name = "__ctr-aes-aesni",
1074 .cra_priority = 400,
1075 .cra_flags = CRYPTO_ALG_INTERNAL,
1076 .cra_blocksize = 1,
1077 .cra_ctxsize = CRYPTO_AES_CTX_SIZE,
1078 .cra_module = THIS_MODULE,
1079 },
1080 .min_keysize = AES_MIN_KEY_SIZE,
1081 .max_keysize = AES_MAX_KEY_SIZE,
1082 .ivsize = AES_BLOCK_SIZE,
1083 .chunksize = AES_BLOCK_SIZE,
1084 .setkey = aesni_skcipher_setkey,
1085 .encrypt = ctr_crypt,
1086 .decrypt = ctr_crypt,
1087 }, {
1088 .base = {
1089 .cra_name = "__xts(aes)",
1090 .cra_driver_name = "__xts-aes-aesni",
1091 .cra_priority = 401,
1092 .cra_flags = CRYPTO_ALG_INTERNAL,
1093 .cra_blocksize = AES_BLOCK_SIZE,
1094 .cra_ctxsize = XTS_AES_CTX_SIZE,
1095 .cra_module = THIS_MODULE,
1096 },
1097 .min_keysize = 2 * AES_MIN_KEY_SIZE,
1098 .max_keysize = 2 * AES_MAX_KEY_SIZE,
1099 .ivsize = AES_BLOCK_SIZE,
1100 .setkey = xts_aesni_setkey,
1101 .encrypt = xts_encrypt,
1102 .decrypt = xts_decrypt,
1103#endif
1104 }
1105};
1106
1107static
1108struct simd_skcipher_alg *aesni_simd_skciphers[ARRAY_SIZE(aesni_skciphers)];
1109
1110#ifdef CONFIG_X86_64
1111static int generic_gcmaes_set_key(struct crypto_aead *aead, const u8 *key,
1112 unsigned int key_len)
1113{
1114 struct generic_gcmaes_ctx *ctx = generic_gcmaes_ctx_get(aead);
1115
1116 return aes_set_key_common(crypto_aead_tfm(aead),
1117 &ctx->aes_key_expanded, key, key_len) ?:
1118 rfc4106_set_hash_subkey(ctx->hash_subkey, key, key_len);
1119}
1120
1121static int generic_gcmaes_encrypt(struct aead_request *req)
1122{
1123 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
1124 struct generic_gcmaes_ctx *ctx = generic_gcmaes_ctx_get(tfm);
1125 void *aes_ctx = &(ctx->aes_key_expanded);
1126 u8 iv[16] __attribute__ ((__aligned__(AESNI_ALIGN)));
1127 __be32 counter = cpu_to_be32(1);
1128
1129 memcpy(iv, req->iv, 12);
1130 *((__be32 *)(iv+12)) = counter;
1131
1132 return gcmaes_encrypt(req, req->assoclen, ctx->hash_subkey, iv,
1133 aes_ctx);
1134}
1135
1136static int generic_gcmaes_decrypt(struct aead_request *req)
1137{
1138 __be32 counter = cpu_to_be32(1);
1139 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
1140 struct generic_gcmaes_ctx *ctx = generic_gcmaes_ctx_get(tfm);
1141 void *aes_ctx = &(ctx->aes_key_expanded);
1142 u8 iv[16] __attribute__ ((__aligned__(AESNI_ALIGN)));
1143
1144 memcpy(iv, req->iv, 12);
1145 *((__be32 *)(iv+12)) = counter;
1146
1147 return gcmaes_decrypt(req, req->assoclen, ctx->hash_subkey, iv,
1148 aes_ctx);
1149}
1150
1151static int generic_gcmaes_init(struct crypto_aead *aead)
1152{
1153 struct cryptd_aead *cryptd_tfm;
1154 struct cryptd_aead **ctx = crypto_aead_ctx(aead);
1155
1156 cryptd_tfm = cryptd_alloc_aead("__driver-generic-gcm-aes-aesni",
1157 CRYPTO_ALG_INTERNAL,
1158 CRYPTO_ALG_INTERNAL);
1159 if (IS_ERR(cryptd_tfm))
1160 return PTR_ERR(cryptd_tfm);
1161
1162 *ctx = cryptd_tfm;
1163 crypto_aead_set_reqsize(aead, crypto_aead_reqsize(&cryptd_tfm->base));
1164
1165 return 0;
1166}
1167
1168static void generic_gcmaes_exit(struct crypto_aead *aead)
1169{
1170 struct cryptd_aead **ctx = crypto_aead_ctx(aead);
1171
1172 cryptd_free_aead(*ctx);
1173}
1174
1175static struct aead_alg aesni_aead_algs[] = { {
1176 .setkey = common_rfc4106_set_key,
1177 .setauthsize = common_rfc4106_set_authsize,
1178 .encrypt = helper_rfc4106_encrypt,
1179 .decrypt = helper_rfc4106_decrypt,
1180 .ivsize = GCM_RFC4106_IV_SIZE,
1181 .maxauthsize = 16,
1182 .base = {
1183 .cra_name = "__gcm-aes-aesni",
1184 .cra_driver_name = "__driver-gcm-aes-aesni",
1185 .cra_flags = CRYPTO_ALG_INTERNAL,
1186 .cra_blocksize = 1,
1187 .cra_ctxsize = sizeof(struct aesni_rfc4106_gcm_ctx),
1188 .cra_alignmask = AESNI_ALIGN - 1,
1189 .cra_module = THIS_MODULE,
1190 },
1191}, {
1192 .init = rfc4106_init,
1193 .exit = rfc4106_exit,
1194 .setkey = gcmaes_wrapper_set_key,
1195 .setauthsize = gcmaes_wrapper_set_authsize,
1196 .encrypt = gcmaes_wrapper_encrypt,
1197 .decrypt = gcmaes_wrapper_decrypt,
1198 .ivsize = GCM_RFC4106_IV_SIZE,
1199 .maxauthsize = 16,
1200 .base = {
1201 .cra_name = "rfc4106(gcm(aes))",
1202 .cra_driver_name = "rfc4106-gcm-aesni",
1203 .cra_priority = 400,
1204 .cra_flags = CRYPTO_ALG_ASYNC,
1205 .cra_blocksize = 1,
1206 .cra_ctxsize = sizeof(struct cryptd_aead *),
1207 .cra_module = THIS_MODULE,
1208 },
1209}, {
1210 .setkey = generic_gcmaes_set_key,
1211 .setauthsize = generic_gcmaes_set_authsize,
1212 .encrypt = generic_gcmaes_encrypt,
1213 .decrypt = generic_gcmaes_decrypt,
1214 .ivsize = GCM_AES_IV_SIZE,
1215 .maxauthsize = 16,
1216 .base = {
1217 .cra_name = "__generic-gcm-aes-aesni",
1218 .cra_driver_name = "__driver-generic-gcm-aes-aesni",
1219 .cra_priority = 0,
1220 .cra_flags = CRYPTO_ALG_INTERNAL,
1221 .cra_blocksize = 1,
1222 .cra_ctxsize = sizeof(struct generic_gcmaes_ctx),
1223 .cra_alignmask = AESNI_ALIGN - 1,
1224 .cra_module = THIS_MODULE,
1225 },
1226}, {
1227 .init = generic_gcmaes_init,
1228 .exit = generic_gcmaes_exit,
1229 .setkey = gcmaes_wrapper_set_key,
1230 .setauthsize = gcmaes_wrapper_set_authsize,
1231 .encrypt = gcmaes_wrapper_encrypt,
1232 .decrypt = gcmaes_wrapper_decrypt,
1233 .ivsize = GCM_AES_IV_SIZE,
1234 .maxauthsize = 16,
1235 .base = {
1236 .cra_name = "gcm(aes)",
1237 .cra_driver_name = "generic-gcm-aesni",
1238 .cra_priority = 400,
1239 .cra_flags = CRYPTO_ALG_ASYNC,
1240 .cra_blocksize = 1,
1241 .cra_ctxsize = sizeof(struct cryptd_aead *),
1242 .cra_module = THIS_MODULE,
1243 },
1244} };
1245#else
1246static struct aead_alg aesni_aead_algs[0];
1247#endif
1248
1249
1250static const struct x86_cpu_id aesni_cpu_id[] = {
1251 X86_FEATURE_MATCH(X86_FEATURE_AES),
1252 {}
1253};
1254MODULE_DEVICE_TABLE(x86cpu, aesni_cpu_id);
1255
1256static void aesni_free_simds(void)
1257{
1258 int i;
1259
1260 for (i = 0; i < ARRAY_SIZE(aesni_simd_skciphers) &&
1261 aesni_simd_skciphers[i]; i++)
1262 simd_skcipher_free(aesni_simd_skciphers[i]);
1263}
1264
1265static int __init aesni_init(void)
1266{
1267 struct simd_skcipher_alg *simd;
1268 const char *basename;
1269 const char *algname;
1270 const char *drvname;
1271 int err;
1272 int i;
1273
1274 if (!x86_match_cpu(aesni_cpu_id))
1275 return -ENODEV;
1276#ifdef CONFIG_X86_64
1277#ifdef CONFIG_AS_AVX2
1278 if (boot_cpu_has(X86_FEATURE_AVX2)) {
1279 pr_info("AVX2 version of gcm_enc/dec engaged.\n");
1280 aesni_gcm_tfm = &aesni_gcm_tfm_avx_gen4;
1281 } else
1282#endif
1283#ifdef CONFIG_AS_AVX
1284 if (boot_cpu_has(X86_FEATURE_AVX)) {
1285 pr_info("AVX version of gcm_enc/dec engaged.\n");
1286 aesni_gcm_tfm = &aesni_gcm_tfm_avx_gen2;
1287 } else
1288#endif
1289 {
1290 pr_info("SSE version of gcm_enc/dec engaged.\n");
1291 aesni_gcm_tfm = &aesni_gcm_tfm_sse;
1292 }
1293 aesni_ctr_enc_tfm = aesni_ctr_enc;
1294#ifdef CONFIG_AS_AVX
1295 if (boot_cpu_has(X86_FEATURE_AVX)) {
1296
1297 aesni_ctr_enc_tfm = aesni_ctr_enc_avx_tfm;
1298 pr_info("AES CTR mode by8 optimization enabled\n");
1299 }
1300#endif
1301#endif
1302
1303 err = crypto_register_algs(aesni_algs, ARRAY_SIZE(aesni_algs));
1304 if (err)
1305 return err;
1306
1307 err = crypto_register_skciphers(aesni_skciphers,
1308 ARRAY_SIZE(aesni_skciphers));
1309 if (err)
1310 goto unregister_algs;
1311
1312 err = crypto_register_aeads(aesni_aead_algs,
1313 ARRAY_SIZE(aesni_aead_algs));
1314 if (err)
1315 goto unregister_skciphers;
1316
1317 for (i = 0; i < ARRAY_SIZE(aesni_skciphers); i++) {
1318 algname = aesni_skciphers[i].base.cra_name + 2;
1319 drvname = aesni_skciphers[i].base.cra_driver_name + 2;
1320 basename = aesni_skciphers[i].base.cra_driver_name;
1321 simd = simd_skcipher_create_compat(algname, drvname, basename);
1322 err = PTR_ERR(simd);
1323 if (IS_ERR(simd))
1324 goto unregister_simds;
1325
1326 aesni_simd_skciphers[i] = simd;
1327 }
1328
1329 return 0;
1330
1331unregister_simds:
1332 aesni_free_simds();
1333 crypto_unregister_aeads(aesni_aead_algs, ARRAY_SIZE(aesni_aead_algs));
1334unregister_skciphers:
1335 crypto_unregister_skciphers(aesni_skciphers,
1336 ARRAY_SIZE(aesni_skciphers));
1337unregister_algs:
1338 crypto_unregister_algs(aesni_algs, ARRAY_SIZE(aesni_algs));
1339 return err;
1340}
1341
1342static void __exit aesni_exit(void)
1343{
1344 aesni_free_simds();
1345 crypto_unregister_aeads(aesni_aead_algs, ARRAY_SIZE(aesni_aead_algs));
1346 crypto_unregister_skciphers(aesni_skciphers,
1347 ARRAY_SIZE(aesni_skciphers));
1348 crypto_unregister_algs(aesni_algs, ARRAY_SIZE(aesni_algs));
1349}
1350
1351late_initcall(aesni_init);
1352module_exit(aesni_exit);
1353
1354MODULE_DESCRIPTION("Rijndael (AES) Cipher Algorithm, Intel AES-NI instructions optimized");
1355MODULE_LICENSE("GPL");
1356MODULE_ALIAS_CRYPTO("aes");
1357