1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22#include <linux/hardirq.h>
23#include <linux/types.h>
24#include <linux/crypto.h>
25#include <linux/module.h>
26#include <linux/err.h>
27#include <crypto/algapi.h>
28#include <crypto/aes.h>
29#include <crypto/cryptd.h>
30#include <crypto/ctr.h>
31#include <crypto/b128ops.h>
32#include <crypto/lrw.h>
33#include <crypto/xts.h>
34#include <asm/cpu_device_id.h>
35#include <asm/i387.h>
36#include <asm/crypto/aes.h>
37#include <asm/crypto/ablk_helper.h>
38#include <crypto/scatterwalk.h>
39#include <crypto/internal/aead.h>
40#include <linux/workqueue.h>
41#include <linux/spinlock.h>
42#ifdef CONFIG_X86_64
43#include <asm/crypto/glue_helper.h>
44#endif
45
46#if defined(CONFIG_CRYPTO_PCBC) || defined(CONFIG_CRYPTO_PCBC_MODULE)
47#define HAS_PCBC
48#endif
49
50
51
52
53
54
55struct aesni_rfc4106_gcm_ctx {
56 u8 hash_subkey[16];
57 struct crypto_aes_ctx aes_key_expanded;
58 u8 nonce[4];
59 struct cryptd_aead *cryptd_tfm;
60};
61
62struct aesni_gcm_set_hash_subkey_result {
63 int err;
64 struct completion completion;
65};
66
67struct aesni_hash_subkey_req_data {
68 u8 iv[16];
69 struct aesni_gcm_set_hash_subkey_result result;
70 struct scatterlist sg;
71};
72
73#define AESNI_ALIGN (16)
74#define AESNI_ALIGN_ATTR __attribute__ ((__aligned__(AESNI_ALIGN)))
75#define AES_BLOCK_MASK (~(AES_BLOCK_SIZE-1))
76#define RFC4106_HASH_SUBKEY_SIZE 16
77
78struct aesni_lrw_ctx {
79 struct lrw_table_ctx lrw_table;
80 u8 raw_aes_ctx[sizeof(struct crypto_aes_ctx) + AESNI_ALIGN - 1];
81};
82
83struct generic_gcmaes_ctx {
84 u8 hash_subkey[16] AESNI_ALIGN_ATTR;
85 struct crypto_aes_ctx aes_key_expanded AESNI_ALIGN_ATTR;
86 struct cryptd_aead *cryptd_tfm;
87};
88
89struct aesni_xts_ctx {
90 u8 raw_tweak_ctx[sizeof(struct crypto_aes_ctx) + AESNI_ALIGN - 1];
91 u8 raw_crypt_ctx[sizeof(struct crypto_aes_ctx) + AESNI_ALIGN - 1];
92};
93
94asmlinkage int aesni_set_key(struct crypto_aes_ctx *ctx, const u8 *in_key,
95 unsigned int key_len);
96asmlinkage void aesni_enc(struct crypto_aes_ctx *ctx, u8 *out,
97 const u8 *in);
98asmlinkage void aesni_dec(struct crypto_aes_ctx *ctx, u8 *out,
99 const u8 *in);
100asmlinkage void aesni_ecb_enc(struct crypto_aes_ctx *ctx, u8 *out,
101 const u8 *in, unsigned int len);
102asmlinkage void aesni_ecb_dec(struct crypto_aes_ctx *ctx, u8 *out,
103 const u8 *in, unsigned int len);
104asmlinkage void aesni_cbc_enc(struct crypto_aes_ctx *ctx, u8 *out,
105 const u8 *in, unsigned int len, u8 *iv);
106asmlinkage void aesni_cbc_dec(struct crypto_aes_ctx *ctx, u8 *out,
107 const u8 *in, unsigned int len, u8 *iv);
108
109int crypto_fpu_init(void);
110void crypto_fpu_exit(void);
111
112#define AVX_GEN2_OPTSIZE 640
113#define AVX_GEN4_OPTSIZE 4096
114
115#ifdef CONFIG_X86_64
116
117static void (*aesni_ctr_enc_tfm)(struct crypto_aes_ctx *ctx, u8 *out,
118 const u8 *in, unsigned int len, u8 *iv);
119asmlinkage void aesni_ctr_enc(struct crypto_aes_ctx *ctx, u8 *out,
120 const u8 *in, unsigned int len, u8 *iv);
121
122asmlinkage void aesni_xts_crypt8(struct crypto_aes_ctx *ctx, u8 *out,
123 const u8 *in, bool enc, u8 *iv);
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139asmlinkage void aesni_gcm_enc(void *ctx, u8 *out,
140 const u8 *in, unsigned long plaintext_len, u8 *iv,
141 u8 *hash_subkey, const u8 *aad, unsigned long aad_len,
142 u8 *auth_tag, unsigned long auth_tag_len);
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159asmlinkage void aesni_gcm_dec(void *ctx, u8 *out,
160 const u8 *in, unsigned long ciphertext_len, u8 *iv,
161 u8 *hash_subkey, const u8 *aad, unsigned long aad_len,
162 u8 *auth_tag, unsigned long auth_tag_len);
163
164
165#ifdef CONFIG_AS_AVX
166asmlinkage void aes_ctr_enc_128_avx_by8(const u8 *in, u8 *iv,
167 void *keys, u8 *out, unsigned int num_bytes);
168asmlinkage void aes_ctr_enc_192_avx_by8(const u8 *in, u8 *iv,
169 void *keys, u8 *out, unsigned int num_bytes);
170asmlinkage void aes_ctr_enc_256_avx_by8(const u8 *in, u8 *iv,
171 void *keys, u8 *out, unsigned int num_bytes);
172
173
174
175
176
177
178asmlinkage void aesni_gcm_precomp_avx_gen2(void *my_ctx_data, u8 *hash_subkey);
179
180asmlinkage void aesni_gcm_enc_avx_gen2(void *ctx, u8 *out,
181 const u8 *in, unsigned long plaintext_len, u8 *iv,
182 const u8 *aad, unsigned long aad_len,
183 u8 *auth_tag, unsigned long auth_tag_len);
184
185asmlinkage void aesni_gcm_dec_avx_gen2(void *ctx, u8 *out,
186 const u8 *in, unsigned long ciphertext_len, u8 *iv,
187 const u8 *aad, unsigned long aad_len,
188 u8 *auth_tag, unsigned long auth_tag_len);
189
190static void aesni_gcm_enc_avx(void *ctx, u8 *out,
191 const u8 *in, unsigned long plaintext_len, u8 *iv,
192 u8 *hash_subkey, const u8 *aad, unsigned long aad_len,
193 u8 *auth_tag, unsigned long auth_tag_len)
194{
195 struct crypto_aes_ctx *aes_ctx = (struct crypto_aes_ctx*)ctx;
196 if ((plaintext_len < AVX_GEN2_OPTSIZE) || (aes_ctx->key_length != AES_KEYSIZE_128)){
197 aesni_gcm_enc(ctx, out, in, plaintext_len, iv, hash_subkey, aad,
198 aad_len, auth_tag, auth_tag_len);
199 } else {
200 aesni_gcm_precomp_avx_gen2(ctx, hash_subkey);
201 aesni_gcm_enc_avx_gen2(ctx, out, in, plaintext_len, iv, aad,
202 aad_len, auth_tag, auth_tag_len);
203 }
204}
205
206static void aesni_gcm_dec_avx(void *ctx, u8 *out,
207 const u8 *in, unsigned long ciphertext_len, u8 *iv,
208 u8 *hash_subkey, const u8 *aad, unsigned long aad_len,
209 u8 *auth_tag, unsigned long auth_tag_len)
210{
211 struct crypto_aes_ctx *aes_ctx = (struct crypto_aes_ctx*)ctx;
212 if ((ciphertext_len < AVX_GEN2_OPTSIZE) || (aes_ctx->key_length != AES_KEYSIZE_128)) {
213 aesni_gcm_dec(ctx, out, in, ciphertext_len, iv, hash_subkey, aad,
214 aad_len, auth_tag, auth_tag_len);
215 } else {
216 aesni_gcm_precomp_avx_gen2(ctx, hash_subkey);
217 aesni_gcm_dec_avx_gen2(ctx, out, in, ciphertext_len, iv, aad,
218 aad_len, auth_tag, auth_tag_len);
219 }
220}
221#endif
222
223#ifdef CONFIG_AS_AVX2
224
225
226
227
228
229asmlinkage void aesni_gcm_precomp_avx_gen4(void *my_ctx_data, u8 *hash_subkey);
230
231asmlinkage void aesni_gcm_enc_avx_gen4(void *ctx, u8 *out,
232 const u8 *in, unsigned long plaintext_len, u8 *iv,
233 const u8 *aad, unsigned long aad_len,
234 u8 *auth_tag, unsigned long auth_tag_len);
235
236asmlinkage void aesni_gcm_dec_avx_gen4(void *ctx, u8 *out,
237 const u8 *in, unsigned long ciphertext_len, u8 *iv,
238 const u8 *aad, unsigned long aad_len,
239 u8 *auth_tag, unsigned long auth_tag_len);
240
241static void aesni_gcm_enc_avx2(void *ctx, u8 *out,
242 const u8 *in, unsigned long plaintext_len, u8 *iv,
243 u8 *hash_subkey, const u8 *aad, unsigned long aad_len,
244 u8 *auth_tag, unsigned long auth_tag_len)
245{
246 struct crypto_aes_ctx *aes_ctx = (struct crypto_aes_ctx*)ctx;
247 if ((plaintext_len < AVX_GEN2_OPTSIZE) || (aes_ctx->key_length != AES_KEYSIZE_128)) {
248 aesni_gcm_enc(ctx, out, in, plaintext_len, iv, hash_subkey, aad,
249 aad_len, auth_tag, auth_tag_len);
250 } else if (plaintext_len < AVX_GEN4_OPTSIZE) {
251 aesni_gcm_precomp_avx_gen2(ctx, hash_subkey);
252 aesni_gcm_enc_avx_gen2(ctx, out, in, plaintext_len, iv, aad,
253 aad_len, auth_tag, auth_tag_len);
254 } else {
255 aesni_gcm_precomp_avx_gen4(ctx, hash_subkey);
256 aesni_gcm_enc_avx_gen4(ctx, out, in, plaintext_len, iv, aad,
257 aad_len, auth_tag, auth_tag_len);
258 }
259}
260
261static void aesni_gcm_dec_avx2(void *ctx, u8 *out,
262 const u8 *in, unsigned long ciphertext_len, u8 *iv,
263 u8 *hash_subkey, const u8 *aad, unsigned long aad_len,
264 u8 *auth_tag, unsigned long auth_tag_len)
265{
266 struct crypto_aes_ctx *aes_ctx = (struct crypto_aes_ctx*)ctx;
267 if ((ciphertext_len < AVX_GEN2_OPTSIZE) || (aes_ctx->key_length != AES_KEYSIZE_128)) {
268 aesni_gcm_dec(ctx, out, in, ciphertext_len, iv, hash_subkey,
269 aad, aad_len, auth_tag, auth_tag_len);
270 } else if (ciphertext_len < AVX_GEN4_OPTSIZE) {
271 aesni_gcm_precomp_avx_gen2(ctx, hash_subkey);
272 aesni_gcm_dec_avx_gen2(ctx, out, in, ciphertext_len, iv, aad,
273 aad_len, auth_tag, auth_tag_len);
274 } else {
275 aesni_gcm_precomp_avx_gen4(ctx, hash_subkey);
276 aesni_gcm_dec_avx_gen4(ctx, out, in, ciphertext_len, iv, aad,
277 aad_len, auth_tag, auth_tag_len);
278 }
279}
280#endif
281
282static void (*aesni_gcm_enc_tfm)(void *ctx, u8 *out,
283 const u8 *in, unsigned long plaintext_len, u8 *iv,
284 u8 *hash_subkey, const u8 *aad, unsigned long aad_len,
285 u8 *auth_tag, unsigned long auth_tag_len);
286
287static void (*aesni_gcm_dec_tfm)(void *ctx, u8 *out,
288 const u8 *in, unsigned long ciphertext_len, u8 *iv,
289 u8 *hash_subkey, const u8 *aad, unsigned long aad_len,
290 u8 *auth_tag, unsigned long auth_tag_len);
291
292static inline struct
293aesni_rfc4106_gcm_ctx *aesni_rfc4106_gcm_ctx_get(struct crypto_aead *tfm)
294{
295 return
296 (struct aesni_rfc4106_gcm_ctx *)
297 PTR_ALIGN((u8 *)
298 crypto_tfm_ctx(crypto_aead_tfm(tfm)), AESNI_ALIGN);
299}
300
301static inline struct
302generic_gcmaes_ctx *generic_gcmaes_ctx_get(struct crypto_aead *tfm)
303{
304 unsigned long align = AESNI_ALIGN;
305
306 if (align <= crypto_tfm_ctx_alignment())
307 align = 1;
308 return PTR_ALIGN(crypto_aead_ctx(tfm), align);
309}
310#endif
311
312static inline struct crypto_aes_ctx *aes_ctx(void *raw_ctx)
313{
314 unsigned long addr = (unsigned long)raw_ctx;
315 unsigned long align = AESNI_ALIGN;
316
317 if (align <= crypto_tfm_ctx_alignment())
318 align = 1;
319 return (struct crypto_aes_ctx *)ALIGN(addr, align);
320}
321
322static int aes_set_key_common(struct crypto_tfm *tfm, void *raw_ctx,
323 const u8 *in_key, unsigned int key_len)
324{
325 struct crypto_aes_ctx *ctx = aes_ctx(raw_ctx);
326 u32 *flags = &tfm->crt_flags;
327 int err;
328
329 if (key_len != AES_KEYSIZE_128 && key_len != AES_KEYSIZE_192 &&
330 key_len != AES_KEYSIZE_256) {
331 *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
332 return -EINVAL;
333 }
334
335 if (!irq_fpu_usable())
336 err = crypto_aes_expand_key(ctx, in_key, key_len);
337 else {
338 kernel_fpu_begin();
339 err = aesni_set_key(ctx, in_key, key_len);
340 kernel_fpu_end();
341 }
342
343 return err;
344}
345
346static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
347 unsigned int key_len)
348{
349 return aes_set_key_common(tfm, crypto_tfm_ctx(tfm), in_key, key_len);
350}
351
352static void aes_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
353{
354 struct crypto_aes_ctx *ctx = aes_ctx(crypto_tfm_ctx(tfm));
355
356 if (!irq_fpu_usable())
357 crypto_aes_encrypt_x86(ctx, dst, src);
358 else {
359 kernel_fpu_begin();
360 aesni_enc(ctx, dst, src);
361 kernel_fpu_end();
362 }
363}
364
365static void aes_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
366{
367 struct crypto_aes_ctx *ctx = aes_ctx(crypto_tfm_ctx(tfm));
368
369 if (!irq_fpu_usable())
370 crypto_aes_decrypt_x86(ctx, dst, src);
371 else {
372 kernel_fpu_begin();
373 aesni_dec(ctx, dst, src);
374 kernel_fpu_end();
375 }
376}
377
378static void __aes_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
379{
380 struct crypto_aes_ctx *ctx = aes_ctx(crypto_tfm_ctx(tfm));
381
382 aesni_enc(ctx, dst, src);
383}
384
385static void __aes_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
386{
387 struct crypto_aes_ctx *ctx = aes_ctx(crypto_tfm_ctx(tfm));
388
389 aesni_dec(ctx, dst, src);
390}
391
392static int ecb_encrypt(struct blkcipher_desc *desc,
393 struct scatterlist *dst, struct scatterlist *src,
394 unsigned int nbytes)
395{
396 struct crypto_aes_ctx *ctx = aes_ctx(crypto_blkcipher_ctx(desc->tfm));
397 struct blkcipher_walk walk;
398 int err;
399
400 blkcipher_walk_init(&walk, dst, src, nbytes);
401 err = blkcipher_walk_virt(desc, &walk);
402 desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
403
404 kernel_fpu_begin();
405 while ((nbytes = walk.nbytes)) {
406 aesni_ecb_enc(ctx, walk.dst.virt.addr, walk.src.virt.addr,
407 nbytes & AES_BLOCK_MASK);
408 nbytes &= AES_BLOCK_SIZE - 1;
409 err = blkcipher_walk_done(desc, &walk, nbytes);
410 }
411 kernel_fpu_end();
412
413 return err;
414}
415
416static int ecb_decrypt(struct blkcipher_desc *desc,
417 struct scatterlist *dst, struct scatterlist *src,
418 unsigned int nbytes)
419{
420 struct crypto_aes_ctx *ctx = aes_ctx(crypto_blkcipher_ctx(desc->tfm));
421 struct blkcipher_walk walk;
422 int err;
423
424 blkcipher_walk_init(&walk, dst, src, nbytes);
425 err = blkcipher_walk_virt(desc, &walk);
426 desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
427
428 kernel_fpu_begin();
429 while ((nbytes = walk.nbytes)) {
430 aesni_ecb_dec(ctx, walk.dst.virt.addr, walk.src.virt.addr,
431 nbytes & AES_BLOCK_MASK);
432 nbytes &= AES_BLOCK_SIZE - 1;
433 err = blkcipher_walk_done(desc, &walk, nbytes);
434 }
435 kernel_fpu_end();
436
437 return err;
438}
439
440static int cbc_encrypt(struct blkcipher_desc *desc,
441 struct scatterlist *dst, struct scatterlist *src,
442 unsigned int nbytes)
443{
444 struct crypto_aes_ctx *ctx = aes_ctx(crypto_blkcipher_ctx(desc->tfm));
445 struct blkcipher_walk walk;
446 int err;
447
448 blkcipher_walk_init(&walk, dst, src, nbytes);
449 err = blkcipher_walk_virt(desc, &walk);
450 desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
451
452 kernel_fpu_begin();
453 while ((nbytes = walk.nbytes)) {
454 aesni_cbc_enc(ctx, walk.dst.virt.addr, walk.src.virt.addr,
455 nbytes & AES_BLOCK_MASK, walk.iv);
456 nbytes &= AES_BLOCK_SIZE - 1;
457 err = blkcipher_walk_done(desc, &walk, nbytes);
458 }
459 kernel_fpu_end();
460
461 return err;
462}
463
464static int cbc_decrypt(struct blkcipher_desc *desc,
465 struct scatterlist *dst, struct scatterlist *src,
466 unsigned int nbytes)
467{
468 struct crypto_aes_ctx *ctx = aes_ctx(crypto_blkcipher_ctx(desc->tfm));
469 struct blkcipher_walk walk;
470 int err;
471
472 blkcipher_walk_init(&walk, dst, src, nbytes);
473 err = blkcipher_walk_virt(desc, &walk);
474 desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
475
476 kernel_fpu_begin();
477 while ((nbytes = walk.nbytes)) {
478 aesni_cbc_dec(ctx, walk.dst.virt.addr, walk.src.virt.addr,
479 nbytes & AES_BLOCK_MASK, walk.iv);
480 nbytes &= AES_BLOCK_SIZE - 1;
481 err = blkcipher_walk_done(desc, &walk, nbytes);
482 }
483 kernel_fpu_end();
484
485 return err;
486}
487
488#ifdef CONFIG_X86_64
489static void ctr_crypt_final(struct crypto_aes_ctx *ctx,
490 struct blkcipher_walk *walk)
491{
492 u8 *ctrblk = walk->iv;
493 u8 keystream[AES_BLOCK_SIZE];
494 u8 *src = walk->src.virt.addr;
495 u8 *dst = walk->dst.virt.addr;
496 unsigned int nbytes = walk->nbytes;
497
498 aesni_enc(ctx, keystream, ctrblk);
499 crypto_xor(keystream, src, nbytes);
500 memcpy(dst, keystream, nbytes);
501 crypto_inc(ctrblk, AES_BLOCK_SIZE);
502}
503
504#ifdef CONFIG_AS_AVX
505static void aesni_ctr_enc_avx_tfm(struct crypto_aes_ctx *ctx, u8 *out,
506 const u8 *in, unsigned int len, u8 *iv)
507{
508
509
510
511
512
513
514 if (ctx->key_length == AES_KEYSIZE_128)
515 aes_ctr_enc_128_avx_by8(in, iv, (void *)ctx, out, len);
516 else if (ctx->key_length == AES_KEYSIZE_192)
517 aes_ctr_enc_192_avx_by8(in, iv, (void *)ctx, out, len);
518 else
519 aes_ctr_enc_256_avx_by8(in, iv, (void *)ctx, out, len);
520}
521#endif
522
523static int ctr_crypt(struct blkcipher_desc *desc,
524 struct scatterlist *dst, struct scatterlist *src,
525 unsigned int nbytes)
526{
527 struct crypto_aes_ctx *ctx = aes_ctx(crypto_blkcipher_ctx(desc->tfm));
528 struct blkcipher_walk walk;
529 int err;
530
531 blkcipher_walk_init(&walk, dst, src, nbytes);
532 err = blkcipher_walk_virt_block(desc, &walk, AES_BLOCK_SIZE);
533 desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
534
535 kernel_fpu_begin();
536 while ((nbytes = walk.nbytes) >= AES_BLOCK_SIZE) {
537 aesni_ctr_enc_tfm(ctx, walk.dst.virt.addr, walk.src.virt.addr,
538 nbytes & AES_BLOCK_MASK, walk.iv);
539 nbytes &= AES_BLOCK_SIZE - 1;
540 err = blkcipher_walk_done(desc, &walk, nbytes);
541 }
542 if (walk.nbytes) {
543 ctr_crypt_final(ctx, &walk);
544 err = blkcipher_walk_done(desc, &walk, 0);
545 }
546 kernel_fpu_end();
547
548 return err;
549}
550#endif
551
552static int ablk_ecb_init(struct crypto_tfm *tfm)
553{
554 return ablk_init_common(tfm, "__driver-ecb-aes-aesni");
555}
556
557static int ablk_cbc_init(struct crypto_tfm *tfm)
558{
559 return ablk_init_common(tfm, "__driver-cbc-aes-aesni");
560}
561
562#ifdef CONFIG_X86_64
563static int ablk_ctr_init(struct crypto_tfm *tfm)
564{
565 return ablk_init_common(tfm, "__driver-ctr-aes-aesni");
566}
567
568#endif
569
570#ifdef HAS_PCBC
571static int ablk_pcbc_init(struct crypto_tfm *tfm)
572{
573 return ablk_init_common(tfm, "fpu(pcbc(__driver-aes-aesni))");
574}
575#endif
576
577static void lrw_xts_encrypt_callback(void *ctx, u8 *blks, unsigned int nbytes)
578{
579 aesni_ecb_enc(ctx, blks, blks, nbytes);
580}
581
582static void lrw_xts_decrypt_callback(void *ctx, u8 *blks, unsigned int nbytes)
583{
584 aesni_ecb_dec(ctx, blks, blks, nbytes);
585}
586
587static int lrw_aesni_setkey(struct crypto_tfm *tfm, const u8 *key,
588 unsigned int keylen)
589{
590 struct aesni_lrw_ctx *ctx = crypto_tfm_ctx(tfm);
591 int err;
592
593 err = aes_set_key_common(tfm, ctx->raw_aes_ctx, key,
594 keylen - AES_BLOCK_SIZE);
595 if (err)
596 return err;
597
598 return lrw_init_table(&ctx->lrw_table, key + keylen - AES_BLOCK_SIZE);
599}
600
601static void lrw_aesni_exit_tfm(struct crypto_tfm *tfm)
602{
603 struct aesni_lrw_ctx *ctx = crypto_tfm_ctx(tfm);
604
605 lrw_free_table(&ctx->lrw_table);
606}
607
608static int lrw_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
609 struct scatterlist *src, unsigned int nbytes)
610{
611 struct aesni_lrw_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
612 be128 buf[8];
613 struct lrw_crypt_req req = {
614 .tbuf = buf,
615 .tbuflen = sizeof(buf),
616
617 .table_ctx = &ctx->lrw_table,
618 .crypt_ctx = aes_ctx(ctx->raw_aes_ctx),
619 .crypt_fn = lrw_xts_encrypt_callback,
620 };
621 int ret;
622
623 desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
624
625 kernel_fpu_begin();
626 ret = lrw_crypt(desc, dst, src, nbytes, &req);
627 kernel_fpu_end();
628
629 return ret;
630}
631
632static int lrw_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
633 struct scatterlist *src, unsigned int nbytes)
634{
635 struct aesni_lrw_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
636 be128 buf[8];
637 struct lrw_crypt_req req = {
638 .tbuf = buf,
639 .tbuflen = sizeof(buf),
640
641 .table_ctx = &ctx->lrw_table,
642 .crypt_ctx = aes_ctx(ctx->raw_aes_ctx),
643 .crypt_fn = lrw_xts_decrypt_callback,
644 };
645 int ret;
646
647 desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
648
649 kernel_fpu_begin();
650 ret = lrw_crypt(desc, dst, src, nbytes, &req);
651 kernel_fpu_end();
652
653 return ret;
654}
655
656static int xts_aesni_setkey(struct crypto_tfm *tfm, const u8 *key,
657 unsigned int keylen)
658{
659 struct aesni_xts_ctx *ctx = crypto_tfm_ctx(tfm);
660 int err;
661
662 err = xts_check_key(tfm, key, keylen);
663 if (err)
664 return err;
665
666
667 err = aes_set_key_common(tfm, ctx->raw_crypt_ctx, key, keylen / 2);
668 if (err)
669 return err;
670
671
672 return aes_set_key_common(tfm, ctx->raw_tweak_ctx, key + keylen / 2,
673 keylen / 2);
674}
675
676
677static void aesni_xts_tweak(void *ctx, u8 *out, const u8 *in)
678{
679 aesni_enc(ctx, out, in);
680}
681
682#ifdef CONFIG_X86_64
683
684static void aesni_xts_enc(void *ctx, u128 *dst, const u128 *src, le128 *iv)
685{
686 glue_xts_crypt_128bit_one(ctx, dst, src, iv, GLUE_FUNC_CAST(aesni_enc));
687}
688
689static void aesni_xts_dec(void *ctx, u128 *dst, const u128 *src, le128 *iv)
690{
691 glue_xts_crypt_128bit_one(ctx, dst, src, iv, GLUE_FUNC_CAST(aesni_dec));
692}
693
694static void aesni_xts_enc8(void *ctx, u128 *dst, const u128 *src, le128 *iv)
695{
696 aesni_xts_crypt8(ctx, (u8 *)dst, (const u8 *)src, true, (u8 *)iv);
697}
698
699static void aesni_xts_dec8(void *ctx, u128 *dst, const u128 *src, le128 *iv)
700{
701 aesni_xts_crypt8(ctx, (u8 *)dst, (const u8 *)src, false, (u8 *)iv);
702}
703
704static const struct common_glue_ctx aesni_enc_xts = {
705 .num_funcs = 2,
706 .fpu_blocks_limit = 1,
707
708 .funcs = { {
709 .num_blocks = 8,
710 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(aesni_xts_enc8) }
711 }, {
712 .num_blocks = 1,
713 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(aesni_xts_enc) }
714 } }
715};
716
717static const struct common_glue_ctx aesni_dec_xts = {
718 .num_funcs = 2,
719 .fpu_blocks_limit = 1,
720
721 .funcs = { {
722 .num_blocks = 8,
723 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(aesni_xts_dec8) }
724 }, {
725 .num_blocks = 1,
726 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(aesni_xts_dec) }
727 } }
728};
729
730static int xts_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
731 struct scatterlist *src, unsigned int nbytes)
732{
733 struct aesni_xts_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
734
735 return glue_xts_crypt_128bit(&aesni_enc_xts, desc, dst, src, nbytes,
736 XTS_TWEAK_CAST(aesni_xts_tweak),
737 aes_ctx(ctx->raw_tweak_ctx),
738 aes_ctx(ctx->raw_crypt_ctx));
739}
740
741static int xts_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
742 struct scatterlist *src, unsigned int nbytes)
743{
744 struct aesni_xts_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
745
746 return glue_xts_crypt_128bit(&aesni_dec_xts, desc, dst, src, nbytes,
747 XTS_TWEAK_CAST(aesni_xts_tweak),
748 aes_ctx(ctx->raw_tweak_ctx),
749 aes_ctx(ctx->raw_crypt_ctx));
750}
751
752#else
753
754static int xts_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
755 struct scatterlist *src, unsigned int nbytes)
756{
757 struct aesni_xts_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
758 be128 buf[8];
759 struct xts_crypt_req req = {
760 .tbuf = buf,
761 .tbuflen = sizeof(buf),
762
763 .tweak_ctx = aes_ctx(ctx->raw_tweak_ctx),
764 .tweak_fn = aesni_xts_tweak,
765 .crypt_ctx = aes_ctx(ctx->raw_crypt_ctx),
766 .crypt_fn = lrw_xts_encrypt_callback,
767 };
768 int ret;
769
770 desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
771
772 kernel_fpu_begin();
773 ret = xts_crypt(desc, dst, src, nbytes, &req);
774 kernel_fpu_end();
775
776 return ret;
777}
778
779static int xts_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
780 struct scatterlist *src, unsigned int nbytes)
781{
782 struct aesni_xts_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
783 be128 buf[8];
784 struct xts_crypt_req req = {
785 .tbuf = buf,
786 .tbuflen = sizeof(buf),
787
788 .tweak_ctx = aes_ctx(ctx->raw_tweak_ctx),
789 .tweak_fn = aesni_xts_tweak,
790 .crypt_ctx = aes_ctx(ctx->raw_crypt_ctx),
791 .crypt_fn = lrw_xts_decrypt_callback,
792 };
793 int ret;
794
795 desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
796
797 kernel_fpu_begin();
798 ret = xts_crypt(desc, dst, src, nbytes, &req);
799 kernel_fpu_end();
800
801 return ret;
802}
803
804#endif
805
806#ifdef CONFIG_X86_64
807static int rfc4106_init(struct crypto_tfm *tfm)
808{
809 struct cryptd_aead *cryptd_tfm;
810 struct aesni_rfc4106_gcm_ctx *ctx = (struct aesni_rfc4106_gcm_ctx *)
811 PTR_ALIGN((u8 *)crypto_tfm_ctx(tfm), AESNI_ALIGN);
812 struct crypto_aead *cryptd_child;
813 struct aesni_rfc4106_gcm_ctx *child_ctx;
814 cryptd_tfm = cryptd_alloc_aead("__driver-gcm-aes-aesni",
815 CRYPTO_ALG_INTERNAL,
816 CRYPTO_ALG_INTERNAL);
817 if (IS_ERR(cryptd_tfm))
818 return PTR_ERR(cryptd_tfm);
819
820 cryptd_child = cryptd_aead_child(cryptd_tfm);
821 child_ctx = aesni_rfc4106_gcm_ctx_get(cryptd_child);
822 memcpy(child_ctx, ctx, sizeof(*ctx));
823 ctx->cryptd_tfm = cryptd_tfm;
824 tfm->crt_aead.reqsize = sizeof(struct aead_request)
825 + crypto_aead_reqsize(&cryptd_tfm->base);
826 return 0;
827}
828
829static void rfc4106_exit(struct crypto_tfm *tfm)
830{
831 struct aesni_rfc4106_gcm_ctx *ctx =
832 (struct aesni_rfc4106_gcm_ctx *)
833 PTR_ALIGN((u8 *)crypto_tfm_ctx(tfm), AESNI_ALIGN);
834 if (!IS_ERR(ctx->cryptd_tfm))
835 cryptd_free_aead(ctx->cryptd_tfm);
836 return;
837}
838
839static void
840rfc4106_set_hash_subkey_done(struct crypto_async_request *req, int err)
841{
842 struct aesni_gcm_set_hash_subkey_result *result = req->data;
843
844 if (err == -EINPROGRESS)
845 return;
846 result->err = err;
847 complete(&result->completion);
848}
849
850static int
851rfc4106_set_hash_subkey(u8 *hash_subkey, const u8 *key, unsigned int key_len)
852{
853 struct crypto_ablkcipher *ctr_tfm;
854 struct ablkcipher_request *req;
855 int ret = -EINVAL;
856 struct aesni_hash_subkey_req_data *req_data;
857
858 ctr_tfm = crypto_alloc_ablkcipher("ctr(aes)", 0, 0);
859 if (IS_ERR(ctr_tfm))
860 return PTR_ERR(ctr_tfm);
861
862 crypto_ablkcipher_clear_flags(ctr_tfm, ~0);
863
864 ret = crypto_ablkcipher_setkey(ctr_tfm, key, key_len);
865 if (ret)
866 goto out_free_ablkcipher;
867
868 ret = -ENOMEM;
869 req = ablkcipher_request_alloc(ctr_tfm, GFP_KERNEL);
870 if (!req)
871 goto out_free_ablkcipher;
872
873 req_data = kmalloc(sizeof(*req_data), GFP_KERNEL);
874 if (!req_data)
875 goto out_free_request;
876
877 memset(req_data->iv, 0, sizeof(req_data->iv));
878
879
880
881 memset(hash_subkey, 0, RFC4106_HASH_SUBKEY_SIZE);
882
883 init_completion(&req_data->result.completion);
884 sg_init_one(&req_data->sg, hash_subkey, RFC4106_HASH_SUBKEY_SIZE);
885 ablkcipher_request_set_tfm(req, ctr_tfm);
886 ablkcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_SLEEP |
887 CRYPTO_TFM_REQ_MAY_BACKLOG,
888 rfc4106_set_hash_subkey_done,
889 &req_data->result);
890
891 ablkcipher_request_set_crypt(req, &req_data->sg,
892 &req_data->sg, RFC4106_HASH_SUBKEY_SIZE, req_data->iv);
893
894 ret = crypto_ablkcipher_encrypt(req);
895 if (ret == -EINPROGRESS || ret == -EBUSY) {
896 ret = wait_for_completion_interruptible
897 (&req_data->result.completion);
898 if (!ret)
899 ret = req_data->result.err;
900 }
901 kfree(req_data);
902out_free_request:
903 ablkcipher_request_free(req);
904out_free_ablkcipher:
905 crypto_free_ablkcipher(ctr_tfm);
906 return ret;
907}
908
909static int rfc4106_set_key(struct crypto_aead *parent, const u8 *key,
910 unsigned int key_len)
911{
912 int ret = 0;
913 struct crypto_tfm *tfm = crypto_aead_tfm(parent);
914 struct aesni_rfc4106_gcm_ctx *ctx = aesni_rfc4106_gcm_ctx_get(parent);
915 struct crypto_aead *cryptd_child = cryptd_aead_child(ctx->cryptd_tfm);
916 struct aesni_rfc4106_gcm_ctx *child_ctx =
917 aesni_rfc4106_gcm_ctx_get(cryptd_child);
918 u8 *new_key_align, *new_key_mem = NULL;
919
920 if (key_len < 4) {
921 crypto_tfm_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
922 return -EINVAL;
923 }
924
925 key_len -= 4;
926 if (key_len != AES_KEYSIZE_128 && key_len != AES_KEYSIZE_192 &&
927 key_len != AES_KEYSIZE_256) {
928 crypto_tfm_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
929 return -EINVAL;
930 }
931
932 memcpy(ctx->nonce, key + key_len, sizeof(ctx->nonce));
933
934 if ((unsigned long)(&(ctx->aes_key_expanded.key_enc[0])) % AESNI_ALIGN)
935 return -EINVAL;
936
937 if ((unsigned long)key % AESNI_ALIGN) {
938
939 new_key_mem = kmalloc(key_len+AESNI_ALIGN, GFP_KERNEL);
940 if (!new_key_mem)
941 return -ENOMEM;
942
943 new_key_align = PTR_ALIGN(new_key_mem, AESNI_ALIGN);
944 memcpy(new_key_align, key, key_len);
945 key = new_key_align;
946 }
947
948 if (!irq_fpu_usable())
949 ret = crypto_aes_expand_key(&(ctx->aes_key_expanded),
950 key, key_len);
951 else {
952 kernel_fpu_begin();
953 ret = aesni_set_key(&(ctx->aes_key_expanded), key, key_len);
954 kernel_fpu_end();
955 }
956
957 if ((unsigned long)(&(ctx->hash_subkey[0])) % AESNI_ALIGN) {
958 ret = -EINVAL;
959 goto exit;
960 }
961 ret = rfc4106_set_hash_subkey(ctx->hash_subkey, key, key_len);
962 memcpy(child_ctx, ctx, sizeof(*ctx));
963exit:
964 kfree(new_key_mem);
965 return ret;
966}
967
968
969
970static int rfc4106_set_authsize(struct crypto_aead *parent,
971 unsigned int authsize)
972{
973 struct aesni_rfc4106_gcm_ctx *ctx = aesni_rfc4106_gcm_ctx_get(parent);
974 struct crypto_aead *cryptd_child = cryptd_aead_child(ctx->cryptd_tfm);
975
976 switch (authsize) {
977 case 8:
978 case 12:
979 case 16:
980 break;
981 default:
982 return -EINVAL;
983 }
984 crypto_aead_crt(parent)->authsize = authsize;
985 crypto_aead_crt(cryptd_child)->authsize = authsize;
986 return 0;
987}
988
989static int generic_gcmaes_set_authsize(struct crypto_aead *tfm,
990 unsigned int authsize)
991{
992 switch (authsize) {
993 case 4:
994 case 8:
995 case 12:
996 case 13:
997 case 14:
998 case 15:
999 case 16:
1000 break;
1001 default:
1002 return -EINVAL;
1003 }
1004
1005 return 0;
1006}
1007
1008static int rfc4106_encrypt(struct aead_request *req)
1009{
1010 int ret;
1011 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
1012 struct aesni_rfc4106_gcm_ctx *ctx = aesni_rfc4106_gcm_ctx_get(tfm);
1013
1014 if (!irq_fpu_usable()) {
1015 struct aead_request *cryptd_req =
1016 (struct aead_request *) aead_request_ctx(req);
1017 memcpy(cryptd_req, req, sizeof(*req));
1018 aead_request_set_tfm(cryptd_req, &ctx->cryptd_tfm->base);
1019 return crypto_aead_encrypt(cryptd_req);
1020 } else {
1021 struct crypto_aead *cryptd_child = cryptd_aead_child(ctx->cryptd_tfm);
1022 kernel_fpu_begin();
1023 ret = cryptd_child->base.crt_aead.encrypt(req);
1024 kernel_fpu_end();
1025 return ret;
1026 }
1027}
1028
1029static int rfc4106_decrypt(struct aead_request *req)
1030{
1031 int ret;
1032 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
1033 struct aesni_rfc4106_gcm_ctx *ctx = aesni_rfc4106_gcm_ctx_get(tfm);
1034
1035 if (!irq_fpu_usable()) {
1036 struct aead_request *cryptd_req =
1037 (struct aead_request *) aead_request_ctx(req);
1038 memcpy(cryptd_req, req, sizeof(*req));
1039 aead_request_set_tfm(cryptd_req, &ctx->cryptd_tfm->base);
1040 return crypto_aead_decrypt(cryptd_req);
1041 } else {
1042 struct crypto_aead *cryptd_child = cryptd_aead_child(ctx->cryptd_tfm);
1043 kernel_fpu_begin();
1044 ret = cryptd_child->base.crt_aead.decrypt(req);
1045 kernel_fpu_end();
1046 return ret;
1047 }
1048}
1049
1050static int __driver_rfc4106_encrypt(struct aead_request *req)
1051{
1052 u8 one_entry_in_sg = 0;
1053 u8 *src, *dst, *assoc;
1054 __be32 counter = cpu_to_be32(1);
1055 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
1056 struct aesni_rfc4106_gcm_ctx *ctx = aesni_rfc4106_gcm_ctx_get(tfm);
1057 u32 key_len = ctx->aes_key_expanded.key_length;
1058 void *aes_ctx = &(ctx->aes_key_expanded);
1059 unsigned long auth_tag_len = crypto_aead_authsize(tfm);
1060 u8 iv_tab[16+AESNI_ALIGN];
1061 u8* iv = (u8 *) PTR_ALIGN((u8 *)iv_tab, AESNI_ALIGN);
1062 struct scatter_walk src_sg_walk;
1063 struct scatter_walk assoc_sg_walk;
1064 struct scatter_walk dst_sg_walk;
1065 unsigned int i;
1066
1067
1068
1069
1070 if (unlikely(req->assoclen != 8 && req->assoclen != 12))
1071 return -EINVAL;
1072 if (unlikely(auth_tag_len != 8 && auth_tag_len != 12 && auth_tag_len != 16))
1073 return -EINVAL;
1074 if (unlikely(key_len != AES_KEYSIZE_128 &&
1075 key_len != AES_KEYSIZE_192 &&
1076 key_len != AES_KEYSIZE_256))
1077 return -EINVAL;
1078
1079
1080 for (i = 0; i < 4; i++)
1081 *(iv+i) = ctx->nonce[i];
1082 for (i = 0; i < 8; i++)
1083 *(iv+4+i) = req->iv[i];
1084 *((__be32 *)(iv+12)) = counter;
1085
1086 if ((sg_is_last(req->src)) && (sg_is_last(req->assoc))) {
1087 one_entry_in_sg = 1;
1088 scatterwalk_start(&src_sg_walk, req->src);
1089 scatterwalk_start(&assoc_sg_walk, req->assoc);
1090 src = scatterwalk_map(&src_sg_walk);
1091 assoc = scatterwalk_map(&assoc_sg_walk);
1092 dst = src;
1093 if (unlikely(req->src != req->dst)) {
1094 scatterwalk_start(&dst_sg_walk, req->dst);
1095 dst = scatterwalk_map(&dst_sg_walk);
1096 }
1097
1098 } else {
1099
1100 src = kmalloc(req->cryptlen + auth_tag_len + req->assoclen,
1101 GFP_ATOMIC);
1102 if (unlikely(!src))
1103 return -ENOMEM;
1104 assoc = (src + req->cryptlen + auth_tag_len);
1105 scatterwalk_map_and_copy(src, req->src, 0, req->cryptlen, 0);
1106 scatterwalk_map_and_copy(assoc, req->assoc, 0,
1107 req->assoclen, 0);
1108 dst = src;
1109 }
1110
1111 aesni_gcm_enc_tfm(aes_ctx, dst, src, (unsigned long)req->cryptlen, iv,
1112 ctx->hash_subkey, assoc, (unsigned long)req->assoclen, dst
1113 + ((unsigned long)req->cryptlen), auth_tag_len);
1114
1115
1116
1117 if (one_entry_in_sg) {
1118 if (unlikely(req->src != req->dst)) {
1119 scatterwalk_unmap(dst);
1120 scatterwalk_done(&dst_sg_walk, 0, 0);
1121 }
1122 scatterwalk_unmap(src);
1123 scatterwalk_unmap(assoc);
1124 scatterwalk_done(&src_sg_walk, 0, 0);
1125 scatterwalk_done(&assoc_sg_walk, 0, 0);
1126 } else {
1127 scatterwalk_map_and_copy(dst, req->dst, 0,
1128 req->cryptlen + auth_tag_len, 1);
1129 kfree(src);
1130 }
1131 return 0;
1132}
1133
1134static int __driver_rfc4106_decrypt(struct aead_request *req)
1135{
1136 u8 one_entry_in_sg = 0;
1137 u8 *src, *dst, *assoc;
1138 unsigned long tempCipherLen = 0;
1139 __be32 counter = cpu_to_be32(1);
1140 int retval = 0;
1141 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
1142 struct aesni_rfc4106_gcm_ctx *ctx = aesni_rfc4106_gcm_ctx_get(tfm);
1143 u32 key_len = ctx->aes_key_expanded.key_length;
1144 void *aes_ctx = &(ctx->aes_key_expanded);
1145 unsigned long auth_tag_len = crypto_aead_authsize(tfm);
1146 u8 iv_and_authTag[32+AESNI_ALIGN];
1147 u8 *iv = (u8 *) PTR_ALIGN((u8 *)iv_and_authTag, AESNI_ALIGN);
1148 u8 *authTag = iv + 16;
1149 struct scatter_walk src_sg_walk;
1150 struct scatter_walk assoc_sg_walk;
1151 struct scatter_walk dst_sg_walk;
1152 unsigned int i;
1153
1154 if (unlikely((req->cryptlen < auth_tag_len) ||
1155 (req->assoclen != 8 && req->assoclen != 12)))
1156 return -EINVAL;
1157 if (unlikely(auth_tag_len != 8 && auth_tag_len != 12 && auth_tag_len != 16))
1158 return -EINVAL;
1159 if (unlikely(key_len != AES_KEYSIZE_128 &&
1160 key_len != AES_KEYSIZE_192 &&
1161 key_len != AES_KEYSIZE_256))
1162 return -EINVAL;
1163
1164
1165
1166
1167
1168 tempCipherLen = (unsigned long)(req->cryptlen - auth_tag_len);
1169
1170 for (i = 0; i < 4; i++)
1171 *(iv+i) = ctx->nonce[i];
1172 for (i = 0; i < 8; i++)
1173 *(iv+4+i) = req->iv[i];
1174 *((__be32 *)(iv+12)) = counter;
1175
1176 if ((sg_is_last(req->src)) && (sg_is_last(req->assoc))) {
1177 one_entry_in_sg = 1;
1178 scatterwalk_start(&src_sg_walk, req->src);
1179 scatterwalk_start(&assoc_sg_walk, req->assoc);
1180 src = scatterwalk_map(&src_sg_walk);
1181 assoc = scatterwalk_map(&assoc_sg_walk);
1182 dst = src;
1183 if (unlikely(req->src != req->dst)) {
1184 scatterwalk_start(&dst_sg_walk, req->dst);
1185 dst = scatterwalk_map(&dst_sg_walk);
1186 }
1187
1188 } else {
1189
1190 src = kmalloc(req->cryptlen + req->assoclen, GFP_ATOMIC);
1191 if (!src)
1192 return -ENOMEM;
1193 assoc = (src + req->cryptlen);
1194 scatterwalk_map_and_copy(src, req->src, 0, req->cryptlen, 0);
1195 scatterwalk_map_and_copy(assoc, req->assoc, 0,
1196 req->assoclen, 0);
1197 dst = src;
1198 }
1199
1200 aesni_gcm_dec_tfm(aes_ctx, dst, src, tempCipherLen, iv,
1201 ctx->hash_subkey, assoc, (unsigned long)req->assoclen,
1202 authTag, auth_tag_len);
1203
1204
1205 retval = memcmp(src + tempCipherLen, authTag, auth_tag_len) ?
1206 -EBADMSG : 0;
1207
1208 if (one_entry_in_sg) {
1209 if (unlikely(req->src != req->dst)) {
1210 scatterwalk_unmap(dst);
1211 scatterwalk_done(&dst_sg_walk, 0, 0);
1212 }
1213 scatterwalk_unmap(src);
1214 scatterwalk_unmap(assoc);
1215 scatterwalk_done(&src_sg_walk, 0, 0);
1216 scatterwalk_done(&assoc_sg_walk, 0, 0);
1217 } else {
1218 scatterwalk_map_and_copy(dst, req->dst, 0, tempCipherLen, 1);
1219 kfree(src);
1220 }
1221 return retval;
1222}
1223
1224static int gcmaes_encrypt(struct aead_request *req, unsigned int assoclen, void *hash_subkey, u8 *iv, struct crypto_aes_ctx *aes_ctx)
1225{
1226 u8 one_entry_in_sg = 0;
1227 u8 *src, *dst, *assoc;
1228 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
1229 u32 key_len = aes_ctx->key_length;
1230 unsigned long auth_tag_len = crypto_aead_authsize(tfm);
1231 struct scatter_walk src_sg_walk;
1232 struct scatter_walk assoc_sg_walk;
1233 struct scatter_walk dst_sg_walk;
1234
1235 if (unlikely(key_len != AES_KEYSIZE_128 &&
1236 key_len != AES_KEYSIZE_192 &&
1237 key_len != AES_KEYSIZE_256))
1238 return -EINVAL;
1239
1240 if ((sg_is_last(req->src)) && (sg_is_last(req->assoc))) {
1241 scatterwalk_start(&src_sg_walk, req->src);
1242 scatterwalk_start(&assoc_sg_walk, req->assoc);
1243 src = scatterwalk_map(&src_sg_walk);
1244 assoc = scatterwalk_map(&assoc_sg_walk);
1245 if (assoc + req->assoc->length != src)
1246 goto unmap;
1247 dst = src;
1248 if (unlikely(req->src != req->dst)) {
1249 scatterwalk_start(&dst_sg_walk, req->dst);
1250 dst = scatterwalk_map(&dst_sg_walk);
1251 if (assoc + req->assoc->length != dst) {
1252 scatterwalk_unmap(dst);
1253 scatterwalk_done(&dst_sg_walk, 0, 0);
1254unmap:
1255 scatterwalk_unmap(src);
1256 scatterwalk_unmap(assoc);
1257 scatterwalk_done(&src_sg_walk, 0, 0);
1258 scatterwalk_done(&assoc_sg_walk, 0, 0);
1259 goto slow;
1260 }
1261 }
1262 one_entry_in_sg = 1;
1263 } else {
1264slow:
1265
1266 src = kmalloc(req->cryptlen + auth_tag_len + req->assoclen,
1267 GFP_ATOMIC);
1268 if (unlikely(!src))
1269 return -ENOMEM;
1270 assoc = (src + req->cryptlen + auth_tag_len);
1271 scatterwalk_map_and_copy(src, req->src, 0, req->cryptlen, 0);
1272 scatterwalk_map_and_copy(assoc, req->assoc, 0,
1273 req->assoclen, 0);
1274 dst = src;
1275 }
1276
1277 aesni_gcm_enc_tfm(aes_ctx, dst, src, (unsigned long)req->cryptlen, iv,
1278 hash_subkey, assoc, (unsigned long)req->assoclen, dst
1279 + ((unsigned long)req->cryptlen), auth_tag_len);
1280
1281
1282
1283 if (one_entry_in_sg) {
1284 if (unlikely(req->src != req->dst)) {
1285 scatterwalk_unmap(dst);
1286 scatterwalk_done(&dst_sg_walk, 0, 0);
1287 }
1288 scatterwalk_unmap(src);
1289 scatterwalk_unmap(assoc);
1290 scatterwalk_done(&src_sg_walk, 0, 0);
1291 scatterwalk_done(&assoc_sg_walk, 0, 0);
1292 } else {
1293 scatterwalk_map_and_copy(dst, req->dst, 0,
1294 req->cryptlen + auth_tag_len, 1);
1295 kfree(src);
1296 }
1297 return 0;
1298}
1299
1300static int gcmaes_decrypt(struct aead_request *req, unsigned int assoclen, void *hash_subkey, u8 *iv, struct crypto_aes_ctx *aes_ctx)
1301{
1302 u8 one_entry_in_sg = 0;
1303 u8 auth_tag_align[16+AESNI_ALIGN];
1304 u8 *authTag = (u8 *) PTR_ALIGN((u8 *)auth_tag_align, AESNI_ALIGN);
1305 u8 *src, *dst, *assoc;
1306 int retval = 0;
1307 u32 key_len = aes_ctx->key_length;
1308 struct scatter_walk src_sg_walk;
1309 struct scatter_walk assoc_sg_walk;
1310 struct scatter_walk dst_sg_walk;
1311 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
1312 unsigned long auth_tag_len = crypto_aead_authsize(tfm);
1313 unsigned long tempCipherLen = 0;
1314
1315 if (unlikely(req->cryptlen < auth_tag_len))
1316 return -EINVAL;
1317 if (unlikely(key_len != AES_KEYSIZE_128 &&
1318 key_len != AES_KEYSIZE_192 &&
1319 key_len != AES_KEYSIZE_256))
1320 return -EINVAL;
1321
1322 tempCipherLen = (unsigned long)(req->cryptlen - auth_tag_len);
1323
1324 if ((sg_is_last(req->src)) && (sg_is_last(req->assoc))) {
1325 scatterwalk_start(&src_sg_walk, req->src);
1326 scatterwalk_start(&assoc_sg_walk, req->assoc);
1327 src = scatterwalk_map(&src_sg_walk);
1328 assoc = scatterwalk_map(&assoc_sg_walk);
1329 if (assoc + req->assoc->length != src)
1330 goto unmap;
1331 dst = src;
1332 if (unlikely(req->src != req->dst)) {
1333 scatterwalk_start(&dst_sg_walk, req->dst);
1334 dst = scatterwalk_map(&dst_sg_walk);
1335 if (assoc + req->assoc->length != dst) {
1336 scatterwalk_unmap(dst);
1337 scatterwalk_done(&dst_sg_walk, 0, 0);
1338unmap:
1339 scatterwalk_unmap(src);
1340 scatterwalk_unmap(assoc);
1341 scatterwalk_done(&src_sg_walk, 0, 0);
1342 scatterwalk_done(&assoc_sg_walk, 0, 0);
1343 goto slow;
1344 }
1345 }
1346 one_entry_in_sg = 1;
1347 } else {
1348slow:
1349
1350 src = kmalloc(req->cryptlen + req->assoclen, GFP_ATOMIC);
1351 if (!src)
1352 return -ENOMEM;
1353 assoc = (src + req->cryptlen);
1354 scatterwalk_map_and_copy(src, req->src, 0, req->cryptlen, 0);
1355 scatterwalk_map_and_copy(assoc, req->assoc, 0,
1356 req->assoclen, 0);
1357 dst = src;
1358 }
1359
1360 aesni_gcm_dec_tfm(aes_ctx, dst, src, tempCipherLen, iv,
1361 hash_subkey, assoc, (unsigned long)req->assoclen,
1362 authTag, auth_tag_len);
1363
1364
1365 retval = memcmp(src + tempCipherLen, authTag, auth_tag_len) ?
1366 -EBADMSG : 0;
1367
1368 if (one_entry_in_sg) {
1369 if (unlikely(req->src != req->dst)) {
1370 scatterwalk_unmap(dst);
1371 scatterwalk_done(&dst_sg_walk, 0, 0);
1372 }
1373 scatterwalk_unmap(src);
1374 scatterwalk_unmap(assoc);
1375 scatterwalk_done(&src_sg_walk, 0, 0);
1376 scatterwalk_done(&assoc_sg_walk, 0, 0);
1377 } else {
1378 scatterwalk_map_and_copy(dst, req->dst, 0, tempCipherLen, 1);
1379 kfree(src);
1380 }
1381 return retval;
1382}
1383
1384static int generic_gcmaes_init(struct crypto_tfm *tfm)
1385{
1386 struct cryptd_aead *cryptd_tfm;
1387 struct generic_gcmaes_ctx *ctx = (struct generic_gcmaes_ctx *)
1388 PTR_ALIGN((u8 *)crypto_tfm_ctx(tfm), AESNI_ALIGN);
1389 struct crypto_aead *cryptd_child;
1390 struct generic_gcmaes_ctx *child_ctx;
1391 cryptd_tfm = cryptd_alloc_aead("__driver-generic-gcm-aes-aesni",
1392 CRYPTO_ALG_INTERNAL,
1393 CRYPTO_ALG_INTERNAL);
1394 if (IS_ERR(cryptd_tfm))
1395 return PTR_ERR(cryptd_tfm);
1396
1397 cryptd_child = cryptd_aead_child(cryptd_tfm);
1398 child_ctx = generic_gcmaes_ctx_get(cryptd_child);
1399 memcpy(child_ctx, ctx, sizeof(*ctx));
1400 ctx->cryptd_tfm = cryptd_tfm;
1401 tfm->crt_aead.reqsize = sizeof(struct aead_request)
1402 + crypto_aead_reqsize(&cryptd_tfm->base);
1403 return 0;
1404}
1405
1406static void generic_gcmaes_exit(struct crypto_tfm *tfm)
1407{
1408 struct generic_gcmaes_ctx *ctx =
1409 (struct generic_gcmaes_ctx *)
1410 PTR_ALIGN((u8 *)crypto_tfm_ctx(tfm), AESNI_ALIGN);
1411 if (!IS_ERR(ctx->cryptd_tfm))
1412 cryptd_free_aead(ctx->cryptd_tfm);
1413 return;
1414}
1415
1416static int generic_gcmaes_set_key(struct crypto_aead *aead, const u8 *key,
1417 unsigned int key_len)
1418{
1419 struct generic_gcmaes_ctx *ctx = generic_gcmaes_ctx_get(aead);
1420
1421 return aes_set_key_common(crypto_aead_tfm(aead),
1422 &ctx->aes_key_expanded, key, key_len) ?:
1423 rfc4106_set_hash_subkey(ctx->hash_subkey, key, key_len);
1424}
1425
1426static int __generic_gcmaes_encrypt(struct aead_request *req)
1427{
1428 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
1429 struct generic_gcmaes_ctx *ctx = generic_gcmaes_ctx_get(tfm);
1430 void *aes_ctx = &(ctx->aes_key_expanded);
1431 u8 iv[16] __attribute__ ((__aligned__(AESNI_ALIGN)));
1432 __be32 counter = cpu_to_be32(1);
1433
1434 memcpy(iv, req->iv, 12);
1435 *((__be32 *)(iv+12)) = counter;
1436
1437 return gcmaes_encrypt(req, req->assoclen, ctx->hash_subkey, iv,
1438 aes_ctx);
1439}
1440
1441static int __generic_gcmaes_decrypt(struct aead_request *req)
1442{
1443 __be32 counter = cpu_to_be32(1);
1444 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
1445 struct generic_gcmaes_ctx *ctx = generic_gcmaes_ctx_get(tfm);
1446 void *aes_ctx = &(ctx->aes_key_expanded);
1447 u8 iv[16] __attribute__ ((__aligned__(AESNI_ALIGN)));
1448
1449 memcpy(iv, req->iv, 12);
1450 *((__be32 *)(iv+12)) = counter;
1451
1452 return gcmaes_decrypt(req, req->assoclen, ctx->hash_subkey, iv,
1453 aes_ctx);
1454}
1455
1456static int generic_gcmaes_encrypt(struct aead_request *req)
1457{
1458 int ret;
1459 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
1460 struct generic_gcmaes_ctx *ctx = generic_gcmaes_ctx_get(tfm);
1461
1462 if (!irq_fpu_usable()) {
1463 struct aead_request *cryptd_req =
1464 (struct aead_request *) aead_request_ctx(req);
1465 memcpy(cryptd_req, req, sizeof(*req));
1466 aead_request_set_tfm(cryptd_req, &ctx->cryptd_tfm->base);
1467 return crypto_aead_encrypt(cryptd_req);
1468 } else {
1469 struct crypto_aead *cryptd_child = cryptd_aead_child(ctx->cryptd_tfm);
1470 kernel_fpu_begin();
1471 ret = cryptd_child->base.crt_aead.encrypt(req);
1472 kernel_fpu_end();
1473 return ret;
1474 }
1475}
1476
1477static int generic_gcmaes_decrypt(struct aead_request *req)
1478{
1479 int ret;
1480 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
1481 struct generic_gcmaes_ctx *ctx = generic_gcmaes_ctx_get(tfm);
1482
1483 if (!irq_fpu_usable()) {
1484 struct aead_request *cryptd_req =
1485 (struct aead_request *) aead_request_ctx(req);
1486 memcpy(cryptd_req, req, sizeof(*req));
1487 aead_request_set_tfm(cryptd_req, &ctx->cryptd_tfm->base);
1488 return crypto_aead_decrypt(cryptd_req);
1489 } else {
1490 struct crypto_aead *cryptd_child = cryptd_aead_child(ctx->cryptd_tfm);
1491 kernel_fpu_begin();
1492 ret = cryptd_child->base.crt_aead.decrypt(req);
1493 kernel_fpu_end();
1494 return ret;
1495 }
1496}
1497#endif
1498
1499static struct crypto_alg aesni_algs[] = { {
1500 .cra_name = "aes",
1501 .cra_driver_name = "aes-aesni",
1502 .cra_priority = 300,
1503 .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
1504 .cra_blocksize = AES_BLOCK_SIZE,
1505 .cra_ctxsize = sizeof(struct crypto_aes_ctx) +
1506 AESNI_ALIGN - 1,
1507 .cra_alignmask = 0,
1508 .cra_module = THIS_MODULE,
1509 .cra_u = {
1510 .cipher = {
1511 .cia_min_keysize = AES_MIN_KEY_SIZE,
1512 .cia_max_keysize = AES_MAX_KEY_SIZE,
1513 .cia_setkey = aes_set_key,
1514 .cia_encrypt = aes_encrypt,
1515 .cia_decrypt = aes_decrypt
1516 }
1517 }
1518}, {
1519 .cra_name = "__aes-aesni",
1520 .cra_driver_name = "__driver-aes-aesni",
1521 .cra_priority = 0,
1522 .cra_flags = CRYPTO_ALG_TYPE_CIPHER | CRYPTO_ALG_INTERNAL,
1523 .cra_blocksize = AES_BLOCK_SIZE,
1524 .cra_ctxsize = sizeof(struct crypto_aes_ctx) +
1525 AESNI_ALIGN - 1,
1526 .cra_alignmask = 0,
1527 .cra_module = THIS_MODULE,
1528 .cra_u = {
1529 .cipher = {
1530 .cia_min_keysize = AES_MIN_KEY_SIZE,
1531 .cia_max_keysize = AES_MAX_KEY_SIZE,
1532 .cia_setkey = aes_set_key,
1533 .cia_encrypt = __aes_encrypt,
1534 .cia_decrypt = __aes_decrypt
1535 }
1536 }
1537}, {
1538 .cra_name = "__ecb-aes-aesni",
1539 .cra_driver_name = "__driver-ecb-aes-aesni",
1540 .cra_priority = 0,
1541 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
1542 CRYPTO_ALG_INTERNAL,
1543 .cra_blocksize = AES_BLOCK_SIZE,
1544 .cra_ctxsize = sizeof(struct crypto_aes_ctx) +
1545 AESNI_ALIGN - 1,
1546 .cra_alignmask = 0,
1547 .cra_type = &crypto_blkcipher_type,
1548 .cra_module = THIS_MODULE,
1549 .cra_u = {
1550 .blkcipher = {
1551 .min_keysize = AES_MIN_KEY_SIZE,
1552 .max_keysize = AES_MAX_KEY_SIZE,
1553 .setkey = aes_set_key,
1554 .encrypt = ecb_encrypt,
1555 .decrypt = ecb_decrypt,
1556 },
1557 },
1558}, {
1559 .cra_name = "__cbc-aes-aesni",
1560 .cra_driver_name = "__driver-cbc-aes-aesni",
1561 .cra_priority = 0,
1562 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
1563 CRYPTO_ALG_INTERNAL,
1564 .cra_blocksize = AES_BLOCK_SIZE,
1565 .cra_ctxsize = sizeof(struct crypto_aes_ctx) +
1566 AESNI_ALIGN - 1,
1567 .cra_alignmask = 0,
1568 .cra_type = &crypto_blkcipher_type,
1569 .cra_module = THIS_MODULE,
1570 .cra_u = {
1571 .blkcipher = {
1572 .min_keysize = AES_MIN_KEY_SIZE,
1573 .max_keysize = AES_MAX_KEY_SIZE,
1574 .setkey = aes_set_key,
1575 .encrypt = cbc_encrypt,
1576 .decrypt = cbc_decrypt,
1577 },
1578 },
1579}, {
1580 .cra_name = "ecb(aes)",
1581 .cra_driver_name = "ecb-aes-aesni",
1582 .cra_priority = 400,
1583 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
1584 .cra_blocksize = AES_BLOCK_SIZE,
1585 .cra_ctxsize = sizeof(struct async_helper_ctx),
1586 .cra_alignmask = 0,
1587 .cra_type = &crypto_ablkcipher_type,
1588 .cra_module = THIS_MODULE,
1589 .cra_init = ablk_ecb_init,
1590 .cra_exit = ablk_exit,
1591 .cra_u = {
1592 .ablkcipher = {
1593 .min_keysize = AES_MIN_KEY_SIZE,
1594 .max_keysize = AES_MAX_KEY_SIZE,
1595 .setkey = ablk_set_key,
1596 .encrypt = ablk_encrypt,
1597 .decrypt = ablk_decrypt,
1598 },
1599 },
1600}, {
1601 .cra_name = "cbc(aes)",
1602 .cra_driver_name = "cbc-aes-aesni",
1603 .cra_priority = 400,
1604 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
1605 .cra_blocksize = AES_BLOCK_SIZE,
1606 .cra_ctxsize = sizeof(struct async_helper_ctx),
1607 .cra_alignmask = 0,
1608 .cra_type = &crypto_ablkcipher_type,
1609 .cra_module = THIS_MODULE,
1610 .cra_init = ablk_cbc_init,
1611 .cra_exit = ablk_exit,
1612 .cra_u = {
1613 .ablkcipher = {
1614 .min_keysize = AES_MIN_KEY_SIZE,
1615 .max_keysize = AES_MAX_KEY_SIZE,
1616 .ivsize = AES_BLOCK_SIZE,
1617 .setkey = ablk_set_key,
1618 .encrypt = ablk_encrypt,
1619 .decrypt = ablk_decrypt,
1620 },
1621 },
1622#ifdef CONFIG_X86_64
1623}, {
1624 .cra_name = "__ctr-aes-aesni",
1625 .cra_driver_name = "__driver-ctr-aes-aesni",
1626 .cra_priority = 0,
1627 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
1628 CRYPTO_ALG_INTERNAL,
1629 .cra_blocksize = 1,
1630 .cra_ctxsize = sizeof(struct crypto_aes_ctx) +
1631 AESNI_ALIGN - 1,
1632 .cra_alignmask = 0,
1633 .cra_type = &crypto_blkcipher_type,
1634 .cra_module = THIS_MODULE,
1635 .cra_u = {
1636 .blkcipher = {
1637 .min_keysize = AES_MIN_KEY_SIZE,
1638 .max_keysize = AES_MAX_KEY_SIZE,
1639 .ivsize = AES_BLOCK_SIZE,
1640 .setkey = aes_set_key,
1641 .encrypt = ctr_crypt,
1642 .decrypt = ctr_crypt,
1643 },
1644 },
1645}, {
1646 .cra_name = "ctr(aes)",
1647 .cra_driver_name = "ctr-aes-aesni",
1648 .cra_priority = 400,
1649 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
1650 .cra_blocksize = 1,
1651 .cra_ctxsize = sizeof(struct async_helper_ctx),
1652 .cra_alignmask = 0,
1653 .cra_type = &crypto_ablkcipher_type,
1654 .cra_module = THIS_MODULE,
1655 .cra_init = ablk_ctr_init,
1656 .cra_exit = ablk_exit,
1657 .cra_u = {
1658 .ablkcipher = {
1659 .min_keysize = AES_MIN_KEY_SIZE,
1660 .max_keysize = AES_MAX_KEY_SIZE,
1661 .ivsize = AES_BLOCK_SIZE,
1662 .setkey = ablk_set_key,
1663 .encrypt = ablk_encrypt,
1664 .decrypt = ablk_encrypt,
1665 .geniv = "chainiv",
1666 },
1667 },
1668}, {
1669 .cra_name = "__gcm-aes-aesni",
1670 .cra_driver_name = "__driver-gcm-aes-aesni",
1671 .cra_priority = 0,
1672 .cra_flags = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_INTERNAL,
1673 .cra_blocksize = 1,
1674 .cra_ctxsize = sizeof(struct aesni_rfc4106_gcm_ctx) +
1675 AESNI_ALIGN,
1676 .cra_alignmask = 0,
1677 .cra_type = &crypto_aead_type,
1678 .cra_module = THIS_MODULE,
1679 .cra_u = {
1680 .aead = {
1681 .encrypt = __driver_rfc4106_encrypt,
1682 .decrypt = __driver_rfc4106_decrypt,
1683 },
1684 },
1685}, {
1686 .cra_name = "rfc4106(gcm(aes))",
1687 .cra_driver_name = "rfc4106-gcm-aesni",
1688 .cra_priority = 400,
1689 .cra_flags = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_ASYNC,
1690 .cra_blocksize = 1,
1691 .cra_ctxsize = sizeof(struct aesni_rfc4106_gcm_ctx) +
1692 AESNI_ALIGN,
1693 .cra_alignmask = 0,
1694 .cra_type = &crypto_nivaead_type,
1695 .cra_module = THIS_MODULE,
1696 .cra_init = rfc4106_init,
1697 .cra_exit = rfc4106_exit,
1698 .cra_u = {
1699 .aead = {
1700 .setkey = rfc4106_set_key,
1701 .setauthsize = rfc4106_set_authsize,
1702 .encrypt = rfc4106_encrypt,
1703 .decrypt = rfc4106_decrypt,
1704 .geniv = "seqiv",
1705 .ivsize = 8,
1706 .maxauthsize = 16,
1707 },
1708 },
1709}, {
1710 .cra_name = "__generic-gcm-aes-aesni",
1711 .cra_driver_name = "__driver-generic-gcm-aes-aesni",
1712 .cra_priority = 0,
1713 .cra_flags = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_INTERNAL,
1714 .cra_blocksize = 1,
1715 .cra_ctxsize = sizeof(struct generic_gcmaes_ctx) +
1716 AESNI_ALIGN,
1717 .cra_alignmask = 0,
1718 .cra_type = &crypto_aead_type,
1719 .cra_module = THIS_MODULE,
1720 .cra_u = {
1721 .aead = {
1722 .encrypt = __generic_gcmaes_encrypt,
1723 .decrypt = __generic_gcmaes_decrypt,
1724 },
1725 },
1726}, {
1727 .cra_name = "gcm(aes)",
1728 .cra_driver_name = "generic-gcm-aesni",
1729 .cra_priority = 400,
1730 .cra_flags = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_ASYNC,
1731 .cra_blocksize = 1,
1732 .cra_ctxsize = sizeof(struct generic_gcmaes_ctx) +
1733 AESNI_ALIGN,
1734 .cra_alignmask = 0,
1735 .cra_type = &crypto_nivaead_type,
1736 .cra_module = THIS_MODULE,
1737 .cra_init = generic_gcmaes_init,
1738 .cra_exit = generic_gcmaes_exit,
1739 .cra_u = {
1740 .aead = {
1741 .setkey = generic_gcmaes_set_key,
1742 .setauthsize = generic_gcmaes_set_authsize,
1743 .encrypt = generic_gcmaes_encrypt,
1744 .decrypt = generic_gcmaes_decrypt,
1745 .geniv = "seqiv",
1746 .ivsize = 16,
1747 .maxauthsize = 16,
1748 },
1749 },
1750#endif
1751#ifdef HAS_PCBC
1752}, {
1753 .cra_name = "pcbc(aes)",
1754 .cra_driver_name = "pcbc-aes-aesni",
1755 .cra_priority = 400,
1756 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
1757 .cra_blocksize = AES_BLOCK_SIZE,
1758 .cra_ctxsize = sizeof(struct async_helper_ctx),
1759 .cra_alignmask = 0,
1760 .cra_type = &crypto_ablkcipher_type,
1761 .cra_module = THIS_MODULE,
1762 .cra_init = ablk_pcbc_init,
1763 .cra_exit = ablk_exit,
1764 .cra_u = {
1765 .ablkcipher = {
1766 .min_keysize = AES_MIN_KEY_SIZE,
1767 .max_keysize = AES_MAX_KEY_SIZE,
1768 .ivsize = AES_BLOCK_SIZE,
1769 .setkey = ablk_set_key,
1770 .encrypt = ablk_encrypt,
1771 .decrypt = ablk_decrypt,
1772 },
1773 },
1774#endif
1775}, {
1776 .cra_name = "__lrw-aes-aesni",
1777 .cra_driver_name = "__driver-lrw-aes-aesni",
1778 .cra_priority = 0,
1779 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
1780 CRYPTO_ALG_INTERNAL,
1781 .cra_blocksize = AES_BLOCK_SIZE,
1782 .cra_ctxsize = sizeof(struct aesni_lrw_ctx),
1783 .cra_alignmask = 0,
1784 .cra_type = &crypto_blkcipher_type,
1785 .cra_module = THIS_MODULE,
1786 .cra_exit = lrw_aesni_exit_tfm,
1787 .cra_u = {
1788 .blkcipher = {
1789 .min_keysize = AES_MIN_KEY_SIZE + AES_BLOCK_SIZE,
1790 .max_keysize = AES_MAX_KEY_SIZE + AES_BLOCK_SIZE,
1791 .ivsize = AES_BLOCK_SIZE,
1792 .setkey = lrw_aesni_setkey,
1793 .encrypt = lrw_encrypt,
1794 .decrypt = lrw_decrypt,
1795 },
1796 },
1797}, {
1798 .cra_name = "__xts-aes-aesni",
1799 .cra_driver_name = "__driver-xts-aes-aesni",
1800 .cra_priority = 0,
1801 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
1802 CRYPTO_ALG_INTERNAL,
1803 .cra_blocksize = AES_BLOCK_SIZE,
1804 .cra_ctxsize = sizeof(struct aesni_xts_ctx),
1805 .cra_alignmask = 0,
1806 .cra_type = &crypto_blkcipher_type,
1807 .cra_module = THIS_MODULE,
1808 .cra_u = {
1809 .blkcipher = {
1810 .min_keysize = 2 * AES_MIN_KEY_SIZE,
1811 .max_keysize = 2 * AES_MAX_KEY_SIZE,
1812 .ivsize = AES_BLOCK_SIZE,
1813 .setkey = xts_aesni_setkey,
1814 .encrypt = xts_encrypt,
1815 .decrypt = xts_decrypt,
1816 },
1817 },
1818}, {
1819 .cra_name = "lrw(aes)",
1820 .cra_driver_name = "lrw-aes-aesni",
1821 .cra_priority = 400,
1822 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
1823 .cra_blocksize = AES_BLOCK_SIZE,
1824 .cra_ctxsize = sizeof(struct async_helper_ctx),
1825 .cra_alignmask = 0,
1826 .cra_type = &crypto_ablkcipher_type,
1827 .cra_module = THIS_MODULE,
1828 .cra_init = ablk_init,
1829 .cra_exit = ablk_exit,
1830 .cra_u = {
1831 .ablkcipher = {
1832 .min_keysize = AES_MIN_KEY_SIZE + AES_BLOCK_SIZE,
1833 .max_keysize = AES_MAX_KEY_SIZE + AES_BLOCK_SIZE,
1834 .ivsize = AES_BLOCK_SIZE,
1835 .setkey = ablk_set_key,
1836 .encrypt = ablk_encrypt,
1837 .decrypt = ablk_decrypt,
1838 },
1839 },
1840}, {
1841 .cra_name = "xts(aes)",
1842 .cra_driver_name = "xts-aes-aesni",
1843 .cra_priority = 400,
1844 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
1845 .cra_blocksize = AES_BLOCK_SIZE,
1846 .cra_ctxsize = sizeof(struct async_helper_ctx),
1847 .cra_alignmask = 0,
1848 .cra_type = &crypto_ablkcipher_type,
1849 .cra_module = THIS_MODULE,
1850 .cra_init = ablk_init,
1851 .cra_exit = ablk_exit,
1852 .cra_u = {
1853 .ablkcipher = {
1854 .min_keysize = 2 * AES_MIN_KEY_SIZE,
1855 .max_keysize = 2 * AES_MAX_KEY_SIZE,
1856 .ivsize = AES_BLOCK_SIZE,
1857 .setkey = ablk_set_key,
1858 .encrypt = ablk_encrypt,
1859 .decrypt = ablk_decrypt,
1860 },
1861 },
1862} };
1863
1864
1865static const struct x86_cpu_id aesni_cpu_id[] = {
1866 X86_FEATURE_MATCH(X86_FEATURE_AES),
1867 {}
1868};
1869MODULE_DEVICE_TABLE(x86cpu, aesni_cpu_id);
1870
1871static int __init aesni_init(void)
1872{
1873 int err;
1874
1875 if (!x86_match_cpu(aesni_cpu_id))
1876 return -ENODEV;
1877#ifdef CONFIG_X86_64
1878#ifdef CONFIG_AS_AVX2
1879 if (boot_cpu_has(X86_FEATURE_AVX2)) {
1880 pr_info("AVX2 version of gcm_enc/dec engaged.\n");
1881 aesni_gcm_enc_tfm = aesni_gcm_enc_avx2;
1882 aesni_gcm_dec_tfm = aesni_gcm_dec_avx2;
1883 } else
1884#endif
1885#ifdef CONFIG_AS_AVX
1886 if (boot_cpu_has(X86_FEATURE_AVX)) {
1887 pr_info("AVX version of gcm_enc/dec engaged.\n");
1888 aesni_gcm_enc_tfm = aesni_gcm_enc_avx;
1889 aesni_gcm_dec_tfm = aesni_gcm_dec_avx;
1890 } else
1891#endif
1892 {
1893 pr_info("SSE version of gcm_enc/dec engaged.\n");
1894 aesni_gcm_enc_tfm = aesni_gcm_enc;
1895 aesni_gcm_dec_tfm = aesni_gcm_dec;
1896 }
1897 aesni_ctr_enc_tfm = aesni_ctr_enc;
1898#ifdef CONFIG_AS_AVX
1899 if (cpu_has_avx) {
1900
1901 aesni_ctr_enc_tfm = aesni_ctr_enc_avx_tfm;
1902 pr_info("AES CTR mode by8 optimization enabled\n");
1903 }
1904#endif
1905#endif
1906
1907 err = crypto_fpu_init();
1908 if (err)
1909 return err;
1910
1911 return crypto_register_algs(aesni_algs, ARRAY_SIZE(aesni_algs));
1912}
1913
1914static void __exit aesni_exit(void)
1915{
1916 crypto_unregister_algs(aesni_algs, ARRAY_SIZE(aesni_algs));
1917
1918 crypto_fpu_exit();
1919}
1920
1921module_init(aesni_init);
1922module_exit(aesni_exit);
1923
1924MODULE_DESCRIPTION("Rijndael (AES) Cipher Algorithm, Intel AES-NI instructions optimized");
1925MODULE_LICENSE("GPL");
1926MODULE_ALIAS_CRYPTO("aes");
1927