1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18#include <linux/hardirq.h>
19#include <linux/types.h>
20#include <linux/module.h>
21#include <linux/err.h>
22#include <crypto/algapi.h>
23#include <crypto/aes.h>
24#include <crypto/ctr.h>
25#include <crypto/b128ops.h>
26#include <crypto/gcm.h>
27#include <crypto/xts.h>
28#include <asm/cpu_device_id.h>
29#include <asm/crypto/aes.h>
30#include <asm/simd.h>
31#include <crypto/scatterwalk.h>
32#include <crypto/internal/aead.h>
33#include <crypto/internal/simd.h>
34#include <crypto/internal/skcipher.h>
35#include <linux/workqueue.h>
36#include <linux/spinlock.h>
37#ifdef CONFIG_X86_64
38#include <asm/crypto/glue_helper.h>
39#endif
40
41
42#define AESNI_ALIGN 16
43#define AESNI_ALIGN_ATTR __attribute__ ((__aligned__(AESNI_ALIGN)))
44#define AES_BLOCK_MASK (~(AES_BLOCK_SIZE - 1))
45#define RFC4106_HASH_SUBKEY_SIZE 16
46#define AESNI_ALIGN_EXTRA ((AESNI_ALIGN - 1) & ~(CRYPTO_MINALIGN - 1))
47#define CRYPTO_AES_CTX_SIZE (sizeof(struct crypto_aes_ctx) + AESNI_ALIGN_EXTRA)
48#define XTS_AES_CTX_SIZE (sizeof(struct aesni_xts_ctx) + AESNI_ALIGN_EXTRA)
49
50
51
52
53
54struct aesni_rfc4106_gcm_ctx {
55 u8 hash_subkey[16] AESNI_ALIGN_ATTR;
56 struct crypto_aes_ctx aes_key_expanded AESNI_ALIGN_ATTR;
57 u8 nonce[4];
58};
59
60struct generic_gcmaes_ctx {
61 u8 hash_subkey[16] AESNI_ALIGN_ATTR;
62 struct crypto_aes_ctx aes_key_expanded AESNI_ALIGN_ATTR;
63};
64
65struct aesni_xts_ctx {
66 u8 raw_tweak_ctx[sizeof(struct crypto_aes_ctx)] AESNI_ALIGN_ATTR;
67 u8 raw_crypt_ctx[sizeof(struct crypto_aes_ctx)] AESNI_ALIGN_ATTR;
68};
69
70#define GCM_BLOCK_LEN 16
71
72struct gcm_context_data {
73
74 u8 aad_hash[GCM_BLOCK_LEN];
75 u64 aad_length;
76 u64 in_length;
77 u8 partial_block_enc_key[GCM_BLOCK_LEN];
78 u8 orig_IV[GCM_BLOCK_LEN];
79 u8 current_counter[GCM_BLOCK_LEN];
80 u64 partial_block_len;
81 u64 unused;
82 u8 hash_keys[GCM_BLOCK_LEN * 16];
83};
84
85asmlinkage int aesni_set_key(struct crypto_aes_ctx *ctx, const u8 *in_key,
86 unsigned int key_len);
87asmlinkage void aesni_enc(struct crypto_aes_ctx *ctx, u8 *out,
88 const u8 *in);
89asmlinkage void aesni_dec(struct crypto_aes_ctx *ctx, u8 *out,
90 const u8 *in);
91asmlinkage void aesni_ecb_enc(struct crypto_aes_ctx *ctx, u8 *out,
92 const u8 *in, unsigned int len);
93asmlinkage void aesni_ecb_dec(struct crypto_aes_ctx *ctx, u8 *out,
94 const u8 *in, unsigned int len);
95asmlinkage void aesni_cbc_enc(struct crypto_aes_ctx *ctx, u8 *out,
96 const u8 *in, unsigned int len, u8 *iv);
97asmlinkage void aesni_cbc_dec(struct crypto_aes_ctx *ctx, u8 *out,
98 const u8 *in, unsigned int len, u8 *iv);
99
100#define AVX_GEN2_OPTSIZE 640
101#define AVX_GEN4_OPTSIZE 4096
102
103#ifdef CONFIG_X86_64
104
105static void (*aesni_ctr_enc_tfm)(struct crypto_aes_ctx *ctx, u8 *out,
106 const u8 *in, unsigned int len, u8 *iv);
107asmlinkage void aesni_ctr_enc(struct crypto_aes_ctx *ctx, u8 *out,
108 const u8 *in, unsigned int len, u8 *iv);
109
110asmlinkage void aesni_xts_crypt8(struct crypto_aes_ctx *ctx, u8 *out,
111 const u8 *in, bool enc, u8 *iv);
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128asmlinkage void aesni_gcm_enc(void *ctx,
129 struct gcm_context_data *gdata, u8 *out,
130 const u8 *in, unsigned long plaintext_len, u8 *iv,
131 u8 *hash_subkey, const u8 *aad, unsigned long aad_len,
132 u8 *auth_tag, unsigned long auth_tag_len);
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150asmlinkage void aesni_gcm_dec(void *ctx,
151 struct gcm_context_data *gdata, u8 *out,
152 const u8 *in, unsigned long ciphertext_len, u8 *iv,
153 u8 *hash_subkey, const u8 *aad, unsigned long aad_len,
154 u8 *auth_tag, unsigned long auth_tag_len);
155
156
157asmlinkage void aesni_gcm_init(void *ctx,
158 struct gcm_context_data *gdata,
159 u8 *iv,
160 u8 *hash_subkey, const u8 *aad,
161 unsigned long aad_len);
162asmlinkage void aesni_gcm_enc_update(void *ctx,
163 struct gcm_context_data *gdata, u8 *out,
164 const u8 *in, unsigned long plaintext_len);
165asmlinkage void aesni_gcm_dec_update(void *ctx,
166 struct gcm_context_data *gdata, u8 *out,
167 const u8 *in,
168 unsigned long ciphertext_len);
169asmlinkage void aesni_gcm_finalize(void *ctx,
170 struct gcm_context_data *gdata,
171 u8 *auth_tag, unsigned long auth_tag_len);
172
173static const struct aesni_gcm_tfm_s {
174 void (*init)(void *ctx, struct gcm_context_data *gdata, u8 *iv,
175 u8 *hash_subkey, const u8 *aad, unsigned long aad_len);
176 void (*enc_update)(void *ctx, struct gcm_context_data *gdata, u8 *out,
177 const u8 *in, unsigned long plaintext_len);
178 void (*dec_update)(void *ctx, struct gcm_context_data *gdata, u8 *out,
179 const u8 *in, unsigned long ciphertext_len);
180 void (*finalize)(void *ctx, struct gcm_context_data *gdata,
181 u8 *auth_tag, unsigned long auth_tag_len);
182} *aesni_gcm_tfm;
183
184static const struct aesni_gcm_tfm_s aesni_gcm_tfm_sse = {
185 .init = &aesni_gcm_init,
186 .enc_update = &aesni_gcm_enc_update,
187 .dec_update = &aesni_gcm_dec_update,
188 .finalize = &aesni_gcm_finalize,
189};
190
191#ifdef CONFIG_AS_AVX
192asmlinkage void aes_ctr_enc_128_avx_by8(const u8 *in, u8 *iv,
193 void *keys, u8 *out, unsigned int num_bytes);
194asmlinkage void aes_ctr_enc_192_avx_by8(const u8 *in, u8 *iv,
195 void *keys, u8 *out, unsigned int num_bytes);
196asmlinkage void aes_ctr_enc_256_avx_by8(const u8 *in, u8 *iv,
197 void *keys, u8 *out, unsigned int num_bytes);
198
199
200
201
202
203asmlinkage void aesni_gcm_init_avx_gen2(void *my_ctx_data,
204 struct gcm_context_data *gdata,
205 u8 *iv,
206 u8 *hash_subkey,
207 const u8 *aad,
208 unsigned long aad_len);
209
210asmlinkage void aesni_gcm_enc_update_avx_gen2(void *ctx,
211 struct gcm_context_data *gdata, u8 *out,
212 const u8 *in, unsigned long plaintext_len);
213asmlinkage void aesni_gcm_dec_update_avx_gen2(void *ctx,
214 struct gcm_context_data *gdata, u8 *out,
215 const u8 *in,
216 unsigned long ciphertext_len);
217asmlinkage void aesni_gcm_finalize_avx_gen2(void *ctx,
218 struct gcm_context_data *gdata,
219 u8 *auth_tag, unsigned long auth_tag_len);
220
221asmlinkage void aesni_gcm_enc_avx_gen2(void *ctx,
222 struct gcm_context_data *gdata, u8 *out,
223 const u8 *in, unsigned long plaintext_len, u8 *iv,
224 const u8 *aad, unsigned long aad_len,
225 u8 *auth_tag, unsigned long auth_tag_len);
226
227asmlinkage void aesni_gcm_dec_avx_gen2(void *ctx,
228 struct gcm_context_data *gdata, u8 *out,
229 const u8 *in, unsigned long ciphertext_len, u8 *iv,
230 const u8 *aad, unsigned long aad_len,
231 u8 *auth_tag, unsigned long auth_tag_len);
232
233static const struct aesni_gcm_tfm_s aesni_gcm_tfm_avx_gen2 = {
234 .init = &aesni_gcm_init_avx_gen2,
235 .enc_update = &aesni_gcm_enc_update_avx_gen2,
236 .dec_update = &aesni_gcm_dec_update_avx_gen2,
237 .finalize = &aesni_gcm_finalize_avx_gen2,
238};
239
240#endif
241
242#ifdef CONFIG_AS_AVX2
243
244
245
246
247
248asmlinkage void aesni_gcm_init_avx_gen4(void *my_ctx_data,
249 struct gcm_context_data *gdata,
250 u8 *iv,
251 u8 *hash_subkey,
252 const u8 *aad,
253 unsigned long aad_len);
254
255asmlinkage void aesni_gcm_enc_update_avx_gen4(void *ctx,
256 struct gcm_context_data *gdata, u8 *out,
257 const u8 *in, unsigned long plaintext_len);
258asmlinkage void aesni_gcm_dec_update_avx_gen4(void *ctx,
259 struct gcm_context_data *gdata, u8 *out,
260 const u8 *in,
261 unsigned long ciphertext_len);
262asmlinkage void aesni_gcm_finalize_avx_gen4(void *ctx,
263 struct gcm_context_data *gdata,
264 u8 *auth_tag, unsigned long auth_tag_len);
265
266asmlinkage void aesni_gcm_enc_avx_gen4(void *ctx,
267 struct gcm_context_data *gdata, u8 *out,
268 const u8 *in, unsigned long plaintext_len, u8 *iv,
269 const u8 *aad, unsigned long aad_len,
270 u8 *auth_tag, unsigned long auth_tag_len);
271
272asmlinkage void aesni_gcm_dec_avx_gen4(void *ctx,
273 struct gcm_context_data *gdata, u8 *out,
274 const u8 *in, unsigned long ciphertext_len, u8 *iv,
275 const u8 *aad, unsigned long aad_len,
276 u8 *auth_tag, unsigned long auth_tag_len);
277
278static const struct aesni_gcm_tfm_s aesni_gcm_tfm_avx_gen4 = {
279 .init = &aesni_gcm_init_avx_gen4,
280 .enc_update = &aesni_gcm_enc_update_avx_gen4,
281 .dec_update = &aesni_gcm_dec_update_avx_gen4,
282 .finalize = &aesni_gcm_finalize_avx_gen4,
283};
284
285#endif
286
287static inline struct
288aesni_rfc4106_gcm_ctx *aesni_rfc4106_gcm_ctx_get(struct crypto_aead *tfm)
289{
290 unsigned long align = AESNI_ALIGN;
291
292 if (align <= crypto_tfm_ctx_alignment())
293 align = 1;
294 return PTR_ALIGN(crypto_aead_ctx(tfm), align);
295}
296
297static inline struct
298generic_gcmaes_ctx *generic_gcmaes_ctx_get(struct crypto_aead *tfm)
299{
300 unsigned long align = AESNI_ALIGN;
301
302 if (align <= crypto_tfm_ctx_alignment())
303 align = 1;
304 return PTR_ALIGN(crypto_aead_ctx(tfm), align);
305}
306#endif
307
308static inline struct crypto_aes_ctx *aes_ctx(void *raw_ctx)
309{
310 unsigned long addr = (unsigned long)raw_ctx;
311 unsigned long align = AESNI_ALIGN;
312
313 if (align <= crypto_tfm_ctx_alignment())
314 align = 1;
315 return (struct crypto_aes_ctx *)ALIGN(addr, align);
316}
317
318static int aes_set_key_common(struct crypto_tfm *tfm, void *raw_ctx,
319 const u8 *in_key, unsigned int key_len)
320{
321 struct crypto_aes_ctx *ctx = aes_ctx(raw_ctx);
322 u32 *flags = &tfm->crt_flags;
323 int err;
324
325 if (key_len != AES_KEYSIZE_128 && key_len != AES_KEYSIZE_192 &&
326 key_len != AES_KEYSIZE_256) {
327 *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
328 return -EINVAL;
329 }
330
331 if (!crypto_simd_usable())
332 err = crypto_aes_expand_key(ctx, in_key, key_len);
333 else {
334 kernel_fpu_begin();
335 err = aesni_set_key(ctx, in_key, key_len);
336 kernel_fpu_end();
337 }
338
339 return err;
340}
341
342static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
343 unsigned int key_len)
344{
345 return aes_set_key_common(tfm, crypto_tfm_ctx(tfm), in_key, key_len);
346}
347
348static void aes_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
349{
350 struct crypto_aes_ctx *ctx = aes_ctx(crypto_tfm_ctx(tfm));
351
352 if (!crypto_simd_usable())
353 crypto_aes_encrypt_x86(ctx, dst, src);
354 else {
355 kernel_fpu_begin();
356 aesni_enc(ctx, dst, src);
357 kernel_fpu_end();
358 }
359}
360
361static void aes_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
362{
363 struct crypto_aes_ctx *ctx = aes_ctx(crypto_tfm_ctx(tfm));
364
365 if (!crypto_simd_usable())
366 crypto_aes_decrypt_x86(ctx, dst, src);
367 else {
368 kernel_fpu_begin();
369 aesni_dec(ctx, dst, src);
370 kernel_fpu_end();
371 }
372}
373
374static int aesni_skcipher_setkey(struct crypto_skcipher *tfm, const u8 *key,
375 unsigned int len)
376{
377 return aes_set_key_common(crypto_skcipher_tfm(tfm),
378 crypto_skcipher_ctx(tfm), key, len);
379}
380
381static int ecb_encrypt(struct skcipher_request *req)
382{
383 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
384 struct crypto_aes_ctx *ctx = aes_ctx(crypto_skcipher_ctx(tfm));
385 struct skcipher_walk walk;
386 unsigned int nbytes;
387 int err;
388
389 err = skcipher_walk_virt(&walk, req, true);
390
391 kernel_fpu_begin();
392 while ((nbytes = walk.nbytes)) {
393 aesni_ecb_enc(ctx, walk.dst.virt.addr, walk.src.virt.addr,
394 nbytes & AES_BLOCK_MASK);
395 nbytes &= AES_BLOCK_SIZE - 1;
396 err = skcipher_walk_done(&walk, nbytes);
397 }
398 kernel_fpu_end();
399
400 return err;
401}
402
403static int ecb_decrypt(struct skcipher_request *req)
404{
405 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
406 struct crypto_aes_ctx *ctx = aes_ctx(crypto_skcipher_ctx(tfm));
407 struct skcipher_walk walk;
408 unsigned int nbytes;
409 int err;
410
411 err = skcipher_walk_virt(&walk, req, true);
412
413 kernel_fpu_begin();
414 while ((nbytes = walk.nbytes)) {
415 aesni_ecb_dec(ctx, walk.dst.virt.addr, walk.src.virt.addr,
416 nbytes & AES_BLOCK_MASK);
417 nbytes &= AES_BLOCK_SIZE - 1;
418 err = skcipher_walk_done(&walk, nbytes);
419 }
420 kernel_fpu_end();
421
422 return err;
423}
424
425static int cbc_encrypt(struct skcipher_request *req)
426{
427 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
428 struct crypto_aes_ctx *ctx = aes_ctx(crypto_skcipher_ctx(tfm));
429 struct skcipher_walk walk;
430 unsigned int nbytes;
431 int err;
432
433 err = skcipher_walk_virt(&walk, req, true);
434
435 kernel_fpu_begin();
436 while ((nbytes = walk.nbytes)) {
437 aesni_cbc_enc(ctx, walk.dst.virt.addr, walk.src.virt.addr,
438 nbytes & AES_BLOCK_MASK, walk.iv);
439 nbytes &= AES_BLOCK_SIZE - 1;
440 err = skcipher_walk_done(&walk, nbytes);
441 }
442 kernel_fpu_end();
443
444 return err;
445}
446
447static int cbc_decrypt(struct skcipher_request *req)
448{
449 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
450 struct crypto_aes_ctx *ctx = aes_ctx(crypto_skcipher_ctx(tfm));
451 struct skcipher_walk walk;
452 unsigned int nbytes;
453 int err;
454
455 err = skcipher_walk_virt(&walk, req, true);
456
457 kernel_fpu_begin();
458 while ((nbytes = walk.nbytes)) {
459 aesni_cbc_dec(ctx, walk.dst.virt.addr, walk.src.virt.addr,
460 nbytes & AES_BLOCK_MASK, walk.iv);
461 nbytes &= AES_BLOCK_SIZE - 1;
462 err = skcipher_walk_done(&walk, nbytes);
463 }
464 kernel_fpu_end();
465
466 return err;
467}
468
469#ifdef CONFIG_X86_64
470static void ctr_crypt_final(struct crypto_aes_ctx *ctx,
471 struct skcipher_walk *walk)
472{
473 u8 *ctrblk = walk->iv;
474 u8 keystream[AES_BLOCK_SIZE];
475 u8 *src = walk->src.virt.addr;
476 u8 *dst = walk->dst.virt.addr;
477 unsigned int nbytes = walk->nbytes;
478
479 aesni_enc(ctx, keystream, ctrblk);
480 crypto_xor_cpy(dst, keystream, src, nbytes);
481
482 crypto_inc(ctrblk, AES_BLOCK_SIZE);
483}
484
485#ifdef CONFIG_AS_AVX
486static void aesni_ctr_enc_avx_tfm(struct crypto_aes_ctx *ctx, u8 *out,
487 const u8 *in, unsigned int len, u8 *iv)
488{
489
490
491
492
493
494
495 if (ctx->key_length == AES_KEYSIZE_128)
496 aes_ctr_enc_128_avx_by8(in, iv, (void *)ctx, out, len);
497 else if (ctx->key_length == AES_KEYSIZE_192)
498 aes_ctr_enc_192_avx_by8(in, iv, (void *)ctx, out, len);
499 else
500 aes_ctr_enc_256_avx_by8(in, iv, (void *)ctx, out, len);
501}
502#endif
503
504static int ctr_crypt(struct skcipher_request *req)
505{
506 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
507 struct crypto_aes_ctx *ctx = aes_ctx(crypto_skcipher_ctx(tfm));
508 struct skcipher_walk walk;
509 unsigned int nbytes;
510 int err;
511
512 err = skcipher_walk_virt(&walk, req, true);
513
514 kernel_fpu_begin();
515 while ((nbytes = walk.nbytes) >= AES_BLOCK_SIZE) {
516 aesni_ctr_enc_tfm(ctx, walk.dst.virt.addr, walk.src.virt.addr,
517 nbytes & AES_BLOCK_MASK, walk.iv);
518 nbytes &= AES_BLOCK_SIZE - 1;
519 err = skcipher_walk_done(&walk, nbytes);
520 }
521 if (walk.nbytes) {
522 ctr_crypt_final(ctx, &walk);
523 err = skcipher_walk_done(&walk, 0);
524 }
525 kernel_fpu_end();
526
527 return err;
528}
529
530static int xts_aesni_setkey(struct crypto_skcipher *tfm, const u8 *key,
531 unsigned int keylen)
532{
533 struct aesni_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
534 int err;
535
536 err = xts_verify_key(tfm, key, keylen);
537 if (err)
538 return err;
539
540 keylen /= 2;
541
542
543 err = aes_set_key_common(crypto_skcipher_tfm(tfm), ctx->raw_crypt_ctx,
544 key, keylen);
545 if (err)
546 return err;
547
548
549 return aes_set_key_common(crypto_skcipher_tfm(tfm), ctx->raw_tweak_ctx,
550 key + keylen, keylen);
551}
552
553
554static void aesni_xts_tweak(void *ctx, u8 *out, const u8 *in)
555{
556 aesni_enc(ctx, out, in);
557}
558
559static void aesni_xts_enc(void *ctx, u128 *dst, const u128 *src, le128 *iv)
560{
561 glue_xts_crypt_128bit_one(ctx, dst, src, iv, GLUE_FUNC_CAST(aesni_enc));
562}
563
564static void aesni_xts_dec(void *ctx, u128 *dst, const u128 *src, le128 *iv)
565{
566 glue_xts_crypt_128bit_one(ctx, dst, src, iv, GLUE_FUNC_CAST(aesni_dec));
567}
568
569static void aesni_xts_enc8(void *ctx, u128 *dst, const u128 *src, le128 *iv)
570{
571 aesni_xts_crypt8(ctx, (u8 *)dst, (const u8 *)src, true, (u8 *)iv);
572}
573
574static void aesni_xts_dec8(void *ctx, u128 *dst, const u128 *src, le128 *iv)
575{
576 aesni_xts_crypt8(ctx, (u8 *)dst, (const u8 *)src, false, (u8 *)iv);
577}
578
579static const struct common_glue_ctx aesni_enc_xts = {
580 .num_funcs = 2,
581 .fpu_blocks_limit = 1,
582
583 .funcs = { {
584 .num_blocks = 8,
585 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(aesni_xts_enc8) }
586 }, {
587 .num_blocks = 1,
588 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(aesni_xts_enc) }
589 } }
590};
591
592static const struct common_glue_ctx aesni_dec_xts = {
593 .num_funcs = 2,
594 .fpu_blocks_limit = 1,
595
596 .funcs = { {
597 .num_blocks = 8,
598 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(aesni_xts_dec8) }
599 }, {
600 .num_blocks = 1,
601 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(aesni_xts_dec) }
602 } }
603};
604
605static int xts_encrypt(struct skcipher_request *req)
606{
607 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
608 struct aesni_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
609
610 return glue_xts_req_128bit(&aesni_enc_xts, req,
611 XTS_TWEAK_CAST(aesni_xts_tweak),
612 aes_ctx(ctx->raw_tweak_ctx),
613 aes_ctx(ctx->raw_crypt_ctx));
614}
615
616static int xts_decrypt(struct skcipher_request *req)
617{
618 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
619 struct aesni_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
620
621 return glue_xts_req_128bit(&aesni_dec_xts, req,
622 XTS_TWEAK_CAST(aesni_xts_tweak),
623 aes_ctx(ctx->raw_tweak_ctx),
624 aes_ctx(ctx->raw_crypt_ctx));
625}
626
627static int
628rfc4106_set_hash_subkey(u8 *hash_subkey, const u8 *key, unsigned int key_len)
629{
630 struct crypto_cipher *tfm;
631 int ret;
632
633 tfm = crypto_alloc_cipher("aes", 0, 0);
634 if (IS_ERR(tfm))
635 return PTR_ERR(tfm);
636
637 ret = crypto_cipher_setkey(tfm, key, key_len);
638 if (ret)
639 goto out_free_cipher;
640
641
642
643 memset(hash_subkey, 0, RFC4106_HASH_SUBKEY_SIZE);
644
645 crypto_cipher_encrypt_one(tfm, hash_subkey, hash_subkey);
646
647out_free_cipher:
648 crypto_free_cipher(tfm);
649 return ret;
650}
651
652static int common_rfc4106_set_key(struct crypto_aead *aead, const u8 *key,
653 unsigned int key_len)
654{
655 struct aesni_rfc4106_gcm_ctx *ctx = aesni_rfc4106_gcm_ctx_get(aead);
656
657 if (key_len < 4) {
658 crypto_aead_set_flags(aead, CRYPTO_TFM_RES_BAD_KEY_LEN);
659 return -EINVAL;
660 }
661
662 key_len -= 4;
663
664 memcpy(ctx->nonce, key + key_len, sizeof(ctx->nonce));
665
666 return aes_set_key_common(crypto_aead_tfm(aead),
667 &ctx->aes_key_expanded, key, key_len) ?:
668 rfc4106_set_hash_subkey(ctx->hash_subkey, key, key_len);
669}
670
671
672
673static int common_rfc4106_set_authsize(struct crypto_aead *aead,
674 unsigned int authsize)
675{
676 switch (authsize) {
677 case 8:
678 case 12:
679 case 16:
680 break;
681 default:
682 return -EINVAL;
683 }
684
685 return 0;
686}
687
688static int generic_gcmaes_set_authsize(struct crypto_aead *tfm,
689 unsigned int authsize)
690{
691 switch (authsize) {
692 case 4:
693 case 8:
694 case 12:
695 case 13:
696 case 14:
697 case 15:
698 case 16:
699 break;
700 default:
701 return -EINVAL;
702 }
703
704 return 0;
705}
706
707static int gcmaes_crypt_by_sg(bool enc, struct aead_request *req,
708 unsigned int assoclen, u8 *hash_subkey,
709 u8 *iv, void *aes_ctx)
710{
711 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
712 unsigned long auth_tag_len = crypto_aead_authsize(tfm);
713 const struct aesni_gcm_tfm_s *gcm_tfm = aesni_gcm_tfm;
714 struct gcm_context_data data AESNI_ALIGN_ATTR;
715 struct scatter_walk dst_sg_walk = {};
716 unsigned long left = req->cryptlen;
717 unsigned long len, srclen, dstlen;
718 struct scatter_walk assoc_sg_walk;
719 struct scatter_walk src_sg_walk;
720 struct scatterlist src_start[2];
721 struct scatterlist dst_start[2];
722 struct scatterlist *src_sg;
723 struct scatterlist *dst_sg;
724 u8 *src, *dst, *assoc;
725 u8 *assocmem = NULL;
726 u8 authTag[16];
727
728 if (!enc)
729 left -= auth_tag_len;
730
731#ifdef CONFIG_AS_AVX2
732 if (left < AVX_GEN4_OPTSIZE && gcm_tfm == &aesni_gcm_tfm_avx_gen4)
733 gcm_tfm = &aesni_gcm_tfm_avx_gen2;
734#endif
735#ifdef CONFIG_AS_AVX
736 if (left < AVX_GEN2_OPTSIZE && gcm_tfm == &aesni_gcm_tfm_avx_gen2)
737 gcm_tfm = &aesni_gcm_tfm_sse;
738#endif
739
740
741 if (req->src->length >= assoclen && req->src->length &&
742 (!PageHighMem(sg_page(req->src)) ||
743 req->src->offset + req->src->length <= PAGE_SIZE)) {
744 scatterwalk_start(&assoc_sg_walk, req->src);
745 assoc = scatterwalk_map(&assoc_sg_walk);
746 } else {
747
748 assocmem = kmalloc(assoclen, GFP_ATOMIC);
749 if (unlikely(!assocmem))
750 return -ENOMEM;
751 assoc = assocmem;
752
753 scatterwalk_map_and_copy(assoc, req->src, 0, assoclen, 0);
754 }
755
756 if (left) {
757 src_sg = scatterwalk_ffwd(src_start, req->src, req->assoclen);
758 scatterwalk_start(&src_sg_walk, src_sg);
759 if (req->src != req->dst) {
760 dst_sg = scatterwalk_ffwd(dst_start, req->dst,
761 req->assoclen);
762 scatterwalk_start(&dst_sg_walk, dst_sg);
763 }
764 }
765
766 kernel_fpu_begin();
767 gcm_tfm->init(aes_ctx, &data, iv,
768 hash_subkey, assoc, assoclen);
769 if (req->src != req->dst) {
770 while (left) {
771 src = scatterwalk_map(&src_sg_walk);
772 dst = scatterwalk_map(&dst_sg_walk);
773 srclen = scatterwalk_clamp(&src_sg_walk, left);
774 dstlen = scatterwalk_clamp(&dst_sg_walk, left);
775 len = min(srclen, dstlen);
776 if (len) {
777 if (enc)
778 gcm_tfm->enc_update(aes_ctx, &data,
779 dst, src, len);
780 else
781 gcm_tfm->dec_update(aes_ctx, &data,
782 dst, src, len);
783 }
784 left -= len;
785
786 scatterwalk_unmap(src);
787 scatterwalk_unmap(dst);
788 scatterwalk_advance(&src_sg_walk, len);
789 scatterwalk_advance(&dst_sg_walk, len);
790 scatterwalk_done(&src_sg_walk, 0, left);
791 scatterwalk_done(&dst_sg_walk, 1, left);
792 }
793 } else {
794 while (left) {
795 dst = src = scatterwalk_map(&src_sg_walk);
796 len = scatterwalk_clamp(&src_sg_walk, left);
797 if (len) {
798 if (enc)
799 gcm_tfm->enc_update(aes_ctx, &data,
800 src, src, len);
801 else
802 gcm_tfm->dec_update(aes_ctx, &data,
803 src, src, len);
804 }
805 left -= len;
806 scatterwalk_unmap(src);
807 scatterwalk_advance(&src_sg_walk, len);
808 scatterwalk_done(&src_sg_walk, 1, left);
809 }
810 }
811 gcm_tfm->finalize(aes_ctx, &data, authTag, auth_tag_len);
812 kernel_fpu_end();
813
814 if (!assocmem)
815 scatterwalk_unmap(assoc);
816 else
817 kfree(assocmem);
818
819 if (!enc) {
820 u8 authTagMsg[16];
821
822
823 scatterwalk_map_and_copy(authTagMsg, req->src,
824 req->assoclen + req->cryptlen -
825 auth_tag_len,
826 auth_tag_len, 0);
827
828
829 return crypto_memneq(authTagMsg, authTag, auth_tag_len) ?
830 -EBADMSG : 0;
831 }
832
833
834 scatterwalk_map_and_copy(authTag, req->dst,
835 req->assoclen + req->cryptlen,
836 auth_tag_len, 1);
837
838 return 0;
839}
840
841static int gcmaes_encrypt(struct aead_request *req, unsigned int assoclen,
842 u8 *hash_subkey, u8 *iv, void *aes_ctx)
843{
844 return gcmaes_crypt_by_sg(true, req, assoclen, hash_subkey, iv,
845 aes_ctx);
846}
847
848static int gcmaes_decrypt(struct aead_request *req, unsigned int assoclen,
849 u8 *hash_subkey, u8 *iv, void *aes_ctx)
850{
851 return gcmaes_crypt_by_sg(false, req, assoclen, hash_subkey, iv,
852 aes_ctx);
853}
854
855static int helper_rfc4106_encrypt(struct aead_request *req)
856{
857 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
858 struct aesni_rfc4106_gcm_ctx *ctx = aesni_rfc4106_gcm_ctx_get(tfm);
859 void *aes_ctx = &(ctx->aes_key_expanded);
860 u8 iv[16] __attribute__ ((__aligned__(AESNI_ALIGN)));
861 unsigned int i;
862 __be32 counter = cpu_to_be32(1);
863
864
865
866
867 if (unlikely(req->assoclen != 16 && req->assoclen != 20))
868 return -EINVAL;
869
870
871 for (i = 0; i < 4; i++)
872 *(iv+i) = ctx->nonce[i];
873 for (i = 0; i < 8; i++)
874 *(iv+4+i) = req->iv[i];
875 *((__be32 *)(iv+12)) = counter;
876
877 return gcmaes_encrypt(req, req->assoclen - 8, ctx->hash_subkey, iv,
878 aes_ctx);
879}
880
881static int helper_rfc4106_decrypt(struct aead_request *req)
882{
883 __be32 counter = cpu_to_be32(1);
884 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
885 struct aesni_rfc4106_gcm_ctx *ctx = aesni_rfc4106_gcm_ctx_get(tfm);
886 void *aes_ctx = &(ctx->aes_key_expanded);
887 u8 iv[16] __attribute__ ((__aligned__(AESNI_ALIGN)));
888 unsigned int i;
889
890 if (unlikely(req->assoclen != 16 && req->assoclen != 20))
891 return -EINVAL;
892
893
894
895
896
897
898 for (i = 0; i < 4; i++)
899 *(iv+i) = ctx->nonce[i];
900 for (i = 0; i < 8; i++)
901 *(iv+4+i) = req->iv[i];
902 *((__be32 *)(iv+12)) = counter;
903
904 return gcmaes_decrypt(req, req->assoclen - 8, ctx->hash_subkey, iv,
905 aes_ctx);
906}
907#endif
908
909static struct crypto_alg aesni_cipher_alg = {
910 .cra_name = "aes",
911 .cra_driver_name = "aes-aesni",
912 .cra_priority = 300,
913 .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
914 .cra_blocksize = AES_BLOCK_SIZE,
915 .cra_ctxsize = CRYPTO_AES_CTX_SIZE,
916 .cra_module = THIS_MODULE,
917 .cra_u = {
918 .cipher = {
919 .cia_min_keysize = AES_MIN_KEY_SIZE,
920 .cia_max_keysize = AES_MAX_KEY_SIZE,
921 .cia_setkey = aes_set_key,
922 .cia_encrypt = aes_encrypt,
923 .cia_decrypt = aes_decrypt
924 }
925 }
926};
927
928static struct skcipher_alg aesni_skciphers[] = {
929 {
930 .base = {
931 .cra_name = "__ecb(aes)",
932 .cra_driver_name = "__ecb-aes-aesni",
933 .cra_priority = 400,
934 .cra_flags = CRYPTO_ALG_INTERNAL,
935 .cra_blocksize = AES_BLOCK_SIZE,
936 .cra_ctxsize = CRYPTO_AES_CTX_SIZE,
937 .cra_module = THIS_MODULE,
938 },
939 .min_keysize = AES_MIN_KEY_SIZE,
940 .max_keysize = AES_MAX_KEY_SIZE,
941 .setkey = aesni_skcipher_setkey,
942 .encrypt = ecb_encrypt,
943 .decrypt = ecb_decrypt,
944 }, {
945 .base = {
946 .cra_name = "__cbc(aes)",
947 .cra_driver_name = "__cbc-aes-aesni",
948 .cra_priority = 400,
949 .cra_flags = CRYPTO_ALG_INTERNAL,
950 .cra_blocksize = AES_BLOCK_SIZE,
951 .cra_ctxsize = CRYPTO_AES_CTX_SIZE,
952 .cra_module = THIS_MODULE,
953 },
954 .min_keysize = AES_MIN_KEY_SIZE,
955 .max_keysize = AES_MAX_KEY_SIZE,
956 .ivsize = AES_BLOCK_SIZE,
957 .setkey = aesni_skcipher_setkey,
958 .encrypt = cbc_encrypt,
959 .decrypt = cbc_decrypt,
960#ifdef CONFIG_X86_64
961 }, {
962 .base = {
963 .cra_name = "__ctr(aes)",
964 .cra_driver_name = "__ctr-aes-aesni",
965 .cra_priority = 400,
966 .cra_flags = CRYPTO_ALG_INTERNAL,
967 .cra_blocksize = 1,
968 .cra_ctxsize = CRYPTO_AES_CTX_SIZE,
969 .cra_module = THIS_MODULE,
970 },
971 .min_keysize = AES_MIN_KEY_SIZE,
972 .max_keysize = AES_MAX_KEY_SIZE,
973 .ivsize = AES_BLOCK_SIZE,
974 .chunksize = AES_BLOCK_SIZE,
975 .setkey = aesni_skcipher_setkey,
976 .encrypt = ctr_crypt,
977 .decrypt = ctr_crypt,
978 }, {
979 .base = {
980 .cra_name = "__xts(aes)",
981 .cra_driver_name = "__xts-aes-aesni",
982 .cra_priority = 401,
983 .cra_flags = CRYPTO_ALG_INTERNAL,
984 .cra_blocksize = AES_BLOCK_SIZE,
985 .cra_ctxsize = XTS_AES_CTX_SIZE,
986 .cra_module = THIS_MODULE,
987 },
988 .min_keysize = 2 * AES_MIN_KEY_SIZE,
989 .max_keysize = 2 * AES_MAX_KEY_SIZE,
990 .ivsize = AES_BLOCK_SIZE,
991 .setkey = xts_aesni_setkey,
992 .encrypt = xts_encrypt,
993 .decrypt = xts_decrypt,
994#endif
995 }
996};
997
998static
999struct simd_skcipher_alg *aesni_simd_skciphers[ARRAY_SIZE(aesni_skciphers)];
1000
1001#ifdef CONFIG_X86_64
1002static int generic_gcmaes_set_key(struct crypto_aead *aead, const u8 *key,
1003 unsigned int key_len)
1004{
1005 struct generic_gcmaes_ctx *ctx = generic_gcmaes_ctx_get(aead);
1006
1007 return aes_set_key_common(crypto_aead_tfm(aead),
1008 &ctx->aes_key_expanded, key, key_len) ?:
1009 rfc4106_set_hash_subkey(ctx->hash_subkey, key, key_len);
1010}
1011
1012static int generic_gcmaes_encrypt(struct aead_request *req)
1013{
1014 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
1015 struct generic_gcmaes_ctx *ctx = generic_gcmaes_ctx_get(tfm);
1016 void *aes_ctx = &(ctx->aes_key_expanded);
1017 u8 iv[16] __attribute__ ((__aligned__(AESNI_ALIGN)));
1018 __be32 counter = cpu_to_be32(1);
1019
1020 memcpy(iv, req->iv, 12);
1021 *((__be32 *)(iv+12)) = counter;
1022
1023 return gcmaes_encrypt(req, req->assoclen, ctx->hash_subkey, iv,
1024 aes_ctx);
1025}
1026
1027static int generic_gcmaes_decrypt(struct aead_request *req)
1028{
1029 __be32 counter = cpu_to_be32(1);
1030 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
1031 struct generic_gcmaes_ctx *ctx = generic_gcmaes_ctx_get(tfm);
1032 void *aes_ctx = &(ctx->aes_key_expanded);
1033 u8 iv[16] __attribute__ ((__aligned__(AESNI_ALIGN)));
1034
1035 memcpy(iv, req->iv, 12);
1036 *((__be32 *)(iv+12)) = counter;
1037
1038 return gcmaes_decrypt(req, req->assoclen, ctx->hash_subkey, iv,
1039 aes_ctx);
1040}
1041
1042static struct aead_alg aesni_aeads[] = { {
1043 .setkey = common_rfc4106_set_key,
1044 .setauthsize = common_rfc4106_set_authsize,
1045 .encrypt = helper_rfc4106_encrypt,
1046 .decrypt = helper_rfc4106_decrypt,
1047 .ivsize = GCM_RFC4106_IV_SIZE,
1048 .maxauthsize = 16,
1049 .base = {
1050 .cra_name = "__rfc4106(gcm(aes))",
1051 .cra_driver_name = "__rfc4106-gcm-aesni",
1052 .cra_priority = 400,
1053 .cra_flags = CRYPTO_ALG_INTERNAL,
1054 .cra_blocksize = 1,
1055 .cra_ctxsize = sizeof(struct aesni_rfc4106_gcm_ctx),
1056 .cra_alignmask = AESNI_ALIGN - 1,
1057 .cra_module = THIS_MODULE,
1058 },
1059}, {
1060 .setkey = generic_gcmaes_set_key,
1061 .setauthsize = generic_gcmaes_set_authsize,
1062 .encrypt = generic_gcmaes_encrypt,
1063 .decrypt = generic_gcmaes_decrypt,
1064 .ivsize = GCM_AES_IV_SIZE,
1065 .maxauthsize = 16,
1066 .base = {
1067 .cra_name = "__gcm(aes)",
1068 .cra_driver_name = "__generic-gcm-aesni",
1069 .cra_priority = 400,
1070 .cra_flags = CRYPTO_ALG_INTERNAL,
1071 .cra_blocksize = 1,
1072 .cra_ctxsize = sizeof(struct generic_gcmaes_ctx),
1073 .cra_alignmask = AESNI_ALIGN - 1,
1074 .cra_module = THIS_MODULE,
1075 },
1076} };
1077#else
1078static struct aead_alg aesni_aeads[0];
1079#endif
1080
1081static struct simd_aead_alg *aesni_simd_aeads[ARRAY_SIZE(aesni_aeads)];
1082
1083static const struct x86_cpu_id aesni_cpu_id[] = {
1084 X86_FEATURE_MATCH(X86_FEATURE_AES),
1085 {}
1086};
1087MODULE_DEVICE_TABLE(x86cpu, aesni_cpu_id);
1088
1089static int __init aesni_init(void)
1090{
1091 int err;
1092
1093 if (!x86_match_cpu(aesni_cpu_id))
1094 return -ENODEV;
1095#ifdef CONFIG_X86_64
1096#ifdef CONFIG_AS_AVX2
1097 if (boot_cpu_has(X86_FEATURE_AVX2)) {
1098 pr_info("AVX2 version of gcm_enc/dec engaged.\n");
1099 aesni_gcm_tfm = &aesni_gcm_tfm_avx_gen4;
1100 } else
1101#endif
1102#ifdef CONFIG_AS_AVX
1103 if (boot_cpu_has(X86_FEATURE_AVX)) {
1104 pr_info("AVX version of gcm_enc/dec engaged.\n");
1105 aesni_gcm_tfm = &aesni_gcm_tfm_avx_gen2;
1106 } else
1107#endif
1108 {
1109 pr_info("SSE version of gcm_enc/dec engaged.\n");
1110 aesni_gcm_tfm = &aesni_gcm_tfm_sse;
1111 }
1112 aesni_ctr_enc_tfm = aesni_ctr_enc;
1113#ifdef CONFIG_AS_AVX
1114 if (boot_cpu_has(X86_FEATURE_AVX)) {
1115
1116 aesni_ctr_enc_tfm = aesni_ctr_enc_avx_tfm;
1117 pr_info("AES CTR mode by8 optimization enabled\n");
1118 }
1119#endif
1120#endif
1121
1122 err = crypto_register_alg(&aesni_cipher_alg);
1123 if (err)
1124 return err;
1125
1126 err = simd_register_skciphers_compat(aesni_skciphers,
1127 ARRAY_SIZE(aesni_skciphers),
1128 aesni_simd_skciphers);
1129 if (err)
1130 goto unregister_cipher;
1131
1132 err = simd_register_aeads_compat(aesni_aeads, ARRAY_SIZE(aesni_aeads),
1133 aesni_simd_aeads);
1134 if (err)
1135 goto unregister_skciphers;
1136
1137 return 0;
1138
1139unregister_skciphers:
1140 simd_unregister_skciphers(aesni_skciphers, ARRAY_SIZE(aesni_skciphers),
1141 aesni_simd_skciphers);
1142unregister_cipher:
1143 crypto_unregister_alg(&aesni_cipher_alg);
1144 return err;
1145}
1146
1147static void __exit aesni_exit(void)
1148{
1149 simd_unregister_aeads(aesni_aeads, ARRAY_SIZE(aesni_aeads),
1150 aesni_simd_aeads);
1151 simd_unregister_skciphers(aesni_skciphers, ARRAY_SIZE(aesni_skciphers),
1152 aesni_simd_skciphers);
1153 crypto_unregister_alg(&aesni_cipher_alg);
1154}
1155
1156late_initcall(aesni_init);
1157module_exit(aesni_exit);
1158
1159MODULE_DESCRIPTION("Rijndael (AES) Cipher Algorithm, Intel AES-NI instructions optimized");
1160MODULE_LICENSE("GPL");
1161MODULE_ALIAS_CRYPTO("aes");
1162