1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49#include "compat.h"
50
51#include "regs.h"
52#include "intern.h"
53#include "desc_constr.h"
54#include "jr.h"
55#include "error.h"
56#include "sg_sw_sec4.h"
57#include "key_gen.h"
58#include "caamalg_desc.h"
59#include <crypto/engine.h>
60
61
62
63
64#define CAAM_CRA_PRIORITY 3000
65
66#define CAAM_MAX_KEY_SIZE (AES_MAX_KEY_SIZE + \
67 CTR_RFC3686_NONCE_SIZE + \
68 SHA512_DIGEST_SIZE * 2)
69
70#define AEAD_DESC_JOB_IO_LEN (DESC_JOB_IO_LEN + CAAM_CMD_SZ * 2)
71#define GCM_DESC_JOB_IO_LEN (AEAD_DESC_JOB_IO_LEN + \
72 CAAM_CMD_SZ * 4)
73#define AUTHENC_DESC_JOB_IO_LEN (AEAD_DESC_JOB_IO_LEN + \
74 CAAM_CMD_SZ * 5)
75
76#define CHACHAPOLY_DESC_JOB_IO_LEN (AEAD_DESC_JOB_IO_LEN + CAAM_CMD_SZ * 6)
77
78#define DESC_MAX_USED_BYTES (CAAM_DESC_BYTES_MAX - DESC_JOB_IO_LEN_MIN)
79#define DESC_MAX_USED_LEN (DESC_MAX_USED_BYTES / CAAM_CMD_SZ)
80
81struct caam_alg_entry {
82 int class1_alg_type;
83 int class2_alg_type;
84 bool rfc3686;
85 bool geniv;
86 bool nodkp;
87};
88
89struct caam_aead_alg {
90 struct aead_alg aead;
91 struct caam_alg_entry caam;
92 bool registered;
93};
94
95struct caam_skcipher_alg {
96 struct skcipher_alg skcipher;
97 struct caam_alg_entry caam;
98 bool registered;
99};
100
101
102
103
104struct caam_ctx {
105 struct crypto_engine_ctx enginectx;
106 u32 sh_desc_enc[DESC_MAX_USED_LEN];
107 u32 sh_desc_dec[DESC_MAX_USED_LEN];
108 u8 key[CAAM_MAX_KEY_SIZE];
109 dma_addr_t sh_desc_enc_dma;
110 dma_addr_t sh_desc_dec_dma;
111 dma_addr_t key_dma;
112 enum dma_data_direction dir;
113 struct device *jrdev;
114 struct alginfo adata;
115 struct alginfo cdata;
116 unsigned int authsize;
117};
118
119struct caam_skcipher_req_ctx {
120 struct skcipher_edesc *edesc;
121};
122
123struct caam_aead_req_ctx {
124 struct aead_edesc *edesc;
125};
126
127static int aead_null_set_sh_desc(struct crypto_aead *aead)
128{
129 struct caam_ctx *ctx = crypto_aead_ctx(aead);
130 struct device *jrdev = ctx->jrdev;
131 struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent);
132 u32 *desc;
133 int rem_bytes = CAAM_DESC_BYTES_MAX - AEAD_DESC_JOB_IO_LEN -
134 ctx->adata.keylen_pad;
135
136
137
138
139
140 if (rem_bytes >= DESC_AEAD_NULL_ENC_LEN) {
141 ctx->adata.key_inline = true;
142 ctx->adata.key_virt = ctx->key;
143 } else {
144 ctx->adata.key_inline = false;
145 ctx->adata.key_dma = ctx->key_dma;
146 }
147
148
149 desc = ctx->sh_desc_enc;
150 cnstr_shdsc_aead_null_encap(desc, &ctx->adata, ctx->authsize,
151 ctrlpriv->era);
152 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
153 desc_bytes(desc), ctx->dir);
154
155
156
157
158
159 if (rem_bytes >= DESC_AEAD_NULL_DEC_LEN) {
160 ctx->adata.key_inline = true;
161 ctx->adata.key_virt = ctx->key;
162 } else {
163 ctx->adata.key_inline = false;
164 ctx->adata.key_dma = ctx->key_dma;
165 }
166
167
168 desc = ctx->sh_desc_dec;
169 cnstr_shdsc_aead_null_decap(desc, &ctx->adata, ctx->authsize,
170 ctrlpriv->era);
171 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
172 desc_bytes(desc), ctx->dir);
173
174 return 0;
175}
176
177static int aead_set_sh_desc(struct crypto_aead *aead)
178{
179 struct caam_aead_alg *alg = container_of(crypto_aead_alg(aead),
180 struct caam_aead_alg, aead);
181 unsigned int ivsize = crypto_aead_ivsize(aead);
182 struct caam_ctx *ctx = crypto_aead_ctx(aead);
183 struct device *jrdev = ctx->jrdev;
184 struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent);
185 u32 ctx1_iv_off = 0;
186 u32 *desc, *nonce = NULL;
187 u32 inl_mask;
188 unsigned int data_len[2];
189 const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) ==
190 OP_ALG_AAI_CTR_MOD128);
191 const bool is_rfc3686 = alg->caam.rfc3686;
192
193 if (!ctx->authsize)
194 return 0;
195
196
197 if (!ctx->cdata.keylen)
198 return aead_null_set_sh_desc(aead);
199
200
201
202
203
204
205 if (ctr_mode)
206 ctx1_iv_off = 16;
207
208
209
210
211
212 if (is_rfc3686) {
213 ctx1_iv_off = 16 + CTR_RFC3686_NONCE_SIZE;
214 nonce = (u32 *)((void *)ctx->key + ctx->adata.keylen_pad +
215 ctx->cdata.keylen - CTR_RFC3686_NONCE_SIZE);
216 }
217
218
219
220
221
222
223
224 ctx->adata.key_virt = ctx->key;
225 ctx->adata.key_dma = ctx->key_dma;
226
227 ctx->cdata.key_virt = ctx->key + ctx->adata.keylen_pad;
228 ctx->cdata.key_dma = ctx->key_dma + ctx->adata.keylen_pad;
229
230 data_len[0] = ctx->adata.keylen_pad;
231 data_len[1] = ctx->cdata.keylen;
232
233 if (alg->caam.geniv)
234 goto skip_enc;
235
236
237
238
239
240 if (desc_inline_query(DESC_AEAD_ENC_LEN +
241 (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0),
242 AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask,
243 ARRAY_SIZE(data_len)) < 0)
244 return -EINVAL;
245
246 ctx->adata.key_inline = !!(inl_mask & 1);
247 ctx->cdata.key_inline = !!(inl_mask & 2);
248
249
250 desc = ctx->sh_desc_enc;
251 cnstr_shdsc_aead_encap(desc, &ctx->cdata, &ctx->adata, ivsize,
252 ctx->authsize, is_rfc3686, nonce, ctx1_iv_off,
253 false, ctrlpriv->era);
254 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
255 desc_bytes(desc), ctx->dir);
256
257skip_enc:
258
259
260
261
262 if (desc_inline_query(DESC_AEAD_DEC_LEN +
263 (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0),
264 AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask,
265 ARRAY_SIZE(data_len)) < 0)
266 return -EINVAL;
267
268 ctx->adata.key_inline = !!(inl_mask & 1);
269 ctx->cdata.key_inline = !!(inl_mask & 2);
270
271
272 desc = ctx->sh_desc_dec;
273 cnstr_shdsc_aead_decap(desc, &ctx->cdata, &ctx->adata, ivsize,
274 ctx->authsize, alg->caam.geniv, is_rfc3686,
275 nonce, ctx1_iv_off, false, ctrlpriv->era);
276 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
277 desc_bytes(desc), ctx->dir);
278
279 if (!alg->caam.geniv)
280 goto skip_givenc;
281
282
283
284
285
286 if (desc_inline_query(DESC_AEAD_GIVENC_LEN +
287 (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0),
288 AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask,
289 ARRAY_SIZE(data_len)) < 0)
290 return -EINVAL;
291
292 ctx->adata.key_inline = !!(inl_mask & 1);
293 ctx->cdata.key_inline = !!(inl_mask & 2);
294
295
296 desc = ctx->sh_desc_enc;
297 cnstr_shdsc_aead_givencap(desc, &ctx->cdata, &ctx->adata, ivsize,
298 ctx->authsize, is_rfc3686, nonce,
299 ctx1_iv_off, false, ctrlpriv->era);
300 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
301 desc_bytes(desc), ctx->dir);
302
303skip_givenc:
304 return 0;
305}
306
307static int aead_setauthsize(struct crypto_aead *authenc,
308 unsigned int authsize)
309{
310 struct caam_ctx *ctx = crypto_aead_ctx(authenc);
311
312 ctx->authsize = authsize;
313 aead_set_sh_desc(authenc);
314
315 return 0;
316}
317
318static int gcm_set_sh_desc(struct crypto_aead *aead)
319{
320 struct caam_ctx *ctx = crypto_aead_ctx(aead);
321 struct device *jrdev = ctx->jrdev;
322 unsigned int ivsize = crypto_aead_ivsize(aead);
323 u32 *desc;
324 int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN -
325 ctx->cdata.keylen;
326
327 if (!ctx->cdata.keylen || !ctx->authsize)
328 return 0;
329
330
331
332
333
334
335 if (rem_bytes >= DESC_GCM_ENC_LEN) {
336 ctx->cdata.key_inline = true;
337 ctx->cdata.key_virt = ctx->key;
338 } else {
339 ctx->cdata.key_inline = false;
340 ctx->cdata.key_dma = ctx->key_dma;
341 }
342
343 desc = ctx->sh_desc_enc;
344 cnstr_shdsc_gcm_encap(desc, &ctx->cdata, ivsize, ctx->authsize, false);
345 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
346 desc_bytes(desc), ctx->dir);
347
348
349
350
351
352 if (rem_bytes >= DESC_GCM_DEC_LEN) {
353 ctx->cdata.key_inline = true;
354 ctx->cdata.key_virt = ctx->key;
355 } else {
356 ctx->cdata.key_inline = false;
357 ctx->cdata.key_dma = ctx->key_dma;
358 }
359
360 desc = ctx->sh_desc_dec;
361 cnstr_shdsc_gcm_decap(desc, &ctx->cdata, ivsize, ctx->authsize, false);
362 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
363 desc_bytes(desc), ctx->dir);
364
365 return 0;
366}
367
368static int gcm_setauthsize(struct crypto_aead *authenc, unsigned int authsize)
369{
370 struct caam_ctx *ctx = crypto_aead_ctx(authenc);
371 int err;
372
373 err = crypto_gcm_check_authsize(authsize);
374 if (err)
375 return err;
376
377 ctx->authsize = authsize;
378 gcm_set_sh_desc(authenc);
379
380 return 0;
381}
382
383static int rfc4106_set_sh_desc(struct crypto_aead *aead)
384{
385 struct caam_ctx *ctx = crypto_aead_ctx(aead);
386 struct device *jrdev = ctx->jrdev;
387 unsigned int ivsize = crypto_aead_ivsize(aead);
388 u32 *desc;
389 int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN -
390 ctx->cdata.keylen;
391
392 if (!ctx->cdata.keylen || !ctx->authsize)
393 return 0;
394
395
396
397
398
399
400 if (rem_bytes >= DESC_RFC4106_ENC_LEN) {
401 ctx->cdata.key_inline = true;
402 ctx->cdata.key_virt = ctx->key;
403 } else {
404 ctx->cdata.key_inline = false;
405 ctx->cdata.key_dma = ctx->key_dma;
406 }
407
408 desc = ctx->sh_desc_enc;
409 cnstr_shdsc_rfc4106_encap(desc, &ctx->cdata, ivsize, ctx->authsize,
410 false);
411 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
412 desc_bytes(desc), ctx->dir);
413
414
415
416
417
418 if (rem_bytes >= DESC_RFC4106_DEC_LEN) {
419 ctx->cdata.key_inline = true;
420 ctx->cdata.key_virt = ctx->key;
421 } else {
422 ctx->cdata.key_inline = false;
423 ctx->cdata.key_dma = ctx->key_dma;
424 }
425
426 desc = ctx->sh_desc_dec;
427 cnstr_shdsc_rfc4106_decap(desc, &ctx->cdata, ivsize, ctx->authsize,
428 false);
429 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
430 desc_bytes(desc), ctx->dir);
431
432 return 0;
433}
434
435static int rfc4106_setauthsize(struct crypto_aead *authenc,
436 unsigned int authsize)
437{
438 struct caam_ctx *ctx = crypto_aead_ctx(authenc);
439 int err;
440
441 err = crypto_rfc4106_check_authsize(authsize);
442 if (err)
443 return err;
444
445 ctx->authsize = authsize;
446 rfc4106_set_sh_desc(authenc);
447
448 return 0;
449}
450
451static int rfc4543_set_sh_desc(struct crypto_aead *aead)
452{
453 struct caam_ctx *ctx = crypto_aead_ctx(aead);
454 struct device *jrdev = ctx->jrdev;
455 unsigned int ivsize = crypto_aead_ivsize(aead);
456 u32 *desc;
457 int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN -
458 ctx->cdata.keylen;
459
460 if (!ctx->cdata.keylen || !ctx->authsize)
461 return 0;
462
463
464
465
466
467
468 if (rem_bytes >= DESC_RFC4543_ENC_LEN) {
469 ctx->cdata.key_inline = true;
470 ctx->cdata.key_virt = ctx->key;
471 } else {
472 ctx->cdata.key_inline = false;
473 ctx->cdata.key_dma = ctx->key_dma;
474 }
475
476 desc = ctx->sh_desc_enc;
477 cnstr_shdsc_rfc4543_encap(desc, &ctx->cdata, ivsize, ctx->authsize,
478 false);
479 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
480 desc_bytes(desc), ctx->dir);
481
482
483
484
485
486 if (rem_bytes >= DESC_RFC4543_DEC_LEN) {
487 ctx->cdata.key_inline = true;
488 ctx->cdata.key_virt = ctx->key;
489 } else {
490 ctx->cdata.key_inline = false;
491 ctx->cdata.key_dma = ctx->key_dma;
492 }
493
494 desc = ctx->sh_desc_dec;
495 cnstr_shdsc_rfc4543_decap(desc, &ctx->cdata, ivsize, ctx->authsize,
496 false);
497 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
498 desc_bytes(desc), ctx->dir);
499
500 return 0;
501}
502
503static int rfc4543_setauthsize(struct crypto_aead *authenc,
504 unsigned int authsize)
505{
506 struct caam_ctx *ctx = crypto_aead_ctx(authenc);
507
508 if (authsize != 16)
509 return -EINVAL;
510
511 ctx->authsize = authsize;
512 rfc4543_set_sh_desc(authenc);
513
514 return 0;
515}
516
517static int chachapoly_set_sh_desc(struct crypto_aead *aead)
518{
519 struct caam_ctx *ctx = crypto_aead_ctx(aead);
520 struct device *jrdev = ctx->jrdev;
521 unsigned int ivsize = crypto_aead_ivsize(aead);
522 u32 *desc;
523
524 if (!ctx->cdata.keylen || !ctx->authsize)
525 return 0;
526
527 desc = ctx->sh_desc_enc;
528 cnstr_shdsc_chachapoly(desc, &ctx->cdata, &ctx->adata, ivsize,
529 ctx->authsize, true, false);
530 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
531 desc_bytes(desc), ctx->dir);
532
533 desc = ctx->sh_desc_dec;
534 cnstr_shdsc_chachapoly(desc, &ctx->cdata, &ctx->adata, ivsize,
535 ctx->authsize, false, false);
536 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
537 desc_bytes(desc), ctx->dir);
538
539 return 0;
540}
541
542static int chachapoly_setauthsize(struct crypto_aead *aead,
543 unsigned int authsize)
544{
545 struct caam_ctx *ctx = crypto_aead_ctx(aead);
546
547 if (authsize != POLY1305_DIGEST_SIZE)
548 return -EINVAL;
549
550 ctx->authsize = authsize;
551 return chachapoly_set_sh_desc(aead);
552}
553
554static int chachapoly_setkey(struct crypto_aead *aead, const u8 *key,
555 unsigned int keylen)
556{
557 struct caam_ctx *ctx = crypto_aead_ctx(aead);
558 unsigned int ivsize = crypto_aead_ivsize(aead);
559 unsigned int saltlen = CHACHAPOLY_IV_SIZE - ivsize;
560
561 if (keylen != CHACHA_KEY_SIZE + saltlen)
562 return -EINVAL;
563
564 ctx->cdata.key_virt = key;
565 ctx->cdata.keylen = keylen - saltlen;
566
567 return chachapoly_set_sh_desc(aead);
568}
569
570static int aead_setkey(struct crypto_aead *aead,
571 const u8 *key, unsigned int keylen)
572{
573 struct caam_ctx *ctx = crypto_aead_ctx(aead);
574 struct device *jrdev = ctx->jrdev;
575 struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent);
576 struct crypto_authenc_keys keys;
577 int ret = 0;
578
579 if (crypto_authenc_extractkeys(&keys, key, keylen) != 0)
580 goto badkey;
581
582 dev_dbg(jrdev, "keylen %d enckeylen %d authkeylen %d\n",
583 keys.authkeylen + keys.enckeylen, keys.enckeylen,
584 keys.authkeylen);
585 print_hex_dump_debug("key in @"__stringify(__LINE__)": ",
586 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
587
588
589
590
591
592 if (ctrlpriv->era >= 6) {
593 ctx->adata.keylen = keys.authkeylen;
594 ctx->adata.keylen_pad = split_key_len(ctx->adata.algtype &
595 OP_ALG_ALGSEL_MASK);
596
597 if (ctx->adata.keylen_pad + keys.enckeylen > CAAM_MAX_KEY_SIZE)
598 goto badkey;
599
600 memcpy(ctx->key, keys.authkey, keys.authkeylen);
601 memcpy(ctx->key + ctx->adata.keylen_pad, keys.enckey,
602 keys.enckeylen);
603 dma_sync_single_for_device(jrdev, ctx->key_dma,
604 ctx->adata.keylen_pad +
605 keys.enckeylen, ctx->dir);
606 goto skip_split_key;
607 }
608
609 ret = gen_split_key(ctx->jrdev, ctx->key, &ctx->adata, keys.authkey,
610 keys.authkeylen, CAAM_MAX_KEY_SIZE -
611 keys.enckeylen);
612 if (ret) {
613 goto badkey;
614 }
615
616
617 memcpy(ctx->key + ctx->adata.keylen_pad, keys.enckey, keys.enckeylen);
618 dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->adata.keylen_pad +
619 keys.enckeylen, ctx->dir);
620
621 print_hex_dump_debug("ctx.key@"__stringify(__LINE__)": ",
622 DUMP_PREFIX_ADDRESS, 16, 4, ctx->key,
623 ctx->adata.keylen_pad + keys.enckeylen, 1);
624
625skip_split_key:
626 ctx->cdata.keylen = keys.enckeylen;
627 memzero_explicit(&keys, sizeof(keys));
628 return aead_set_sh_desc(aead);
629badkey:
630 memzero_explicit(&keys, sizeof(keys));
631 return -EINVAL;
632}
633
634static int des3_aead_setkey(struct crypto_aead *aead, const u8 *key,
635 unsigned int keylen)
636{
637 struct crypto_authenc_keys keys;
638 int err;
639
640 err = crypto_authenc_extractkeys(&keys, key, keylen);
641 if (unlikely(err))
642 return err;
643
644 err = verify_aead_des3_key(aead, keys.enckey, keys.enckeylen) ?:
645 aead_setkey(aead, key, keylen);
646
647 memzero_explicit(&keys, sizeof(keys));
648 return err;
649}
650
651static int gcm_setkey(struct crypto_aead *aead,
652 const u8 *key, unsigned int keylen)
653{
654 struct caam_ctx *ctx = crypto_aead_ctx(aead);
655 struct device *jrdev = ctx->jrdev;
656 int err;
657
658 err = aes_check_keylen(keylen);
659 if (err)
660 return err;
661
662 print_hex_dump_debug("key in @"__stringify(__LINE__)": ",
663 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
664
665 memcpy(ctx->key, key, keylen);
666 dma_sync_single_for_device(jrdev, ctx->key_dma, keylen, ctx->dir);
667 ctx->cdata.keylen = keylen;
668
669 return gcm_set_sh_desc(aead);
670}
671
672static int rfc4106_setkey(struct crypto_aead *aead,
673 const u8 *key, unsigned int keylen)
674{
675 struct caam_ctx *ctx = crypto_aead_ctx(aead);
676 struct device *jrdev = ctx->jrdev;
677 int err;
678
679 err = aes_check_keylen(keylen - 4);
680 if (err)
681 return err;
682
683 print_hex_dump_debug("key in @"__stringify(__LINE__)": ",
684 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
685
686 memcpy(ctx->key, key, keylen);
687
688
689
690
691
692 ctx->cdata.keylen = keylen - 4;
693 dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->cdata.keylen,
694 ctx->dir);
695 return rfc4106_set_sh_desc(aead);
696}
697
698static int rfc4543_setkey(struct crypto_aead *aead,
699 const u8 *key, unsigned int keylen)
700{
701 struct caam_ctx *ctx = crypto_aead_ctx(aead);
702 struct device *jrdev = ctx->jrdev;
703 int err;
704
705 err = aes_check_keylen(keylen - 4);
706 if (err)
707 return err;
708
709 print_hex_dump_debug("key in @"__stringify(__LINE__)": ",
710 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
711
712 memcpy(ctx->key, key, keylen);
713
714
715
716
717
718 ctx->cdata.keylen = keylen - 4;
719 dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->cdata.keylen,
720 ctx->dir);
721 return rfc4543_set_sh_desc(aead);
722}
723
724static int skcipher_setkey(struct crypto_skcipher *skcipher, const u8 *key,
725 unsigned int keylen, const u32 ctx1_iv_off)
726{
727 struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
728 struct caam_skcipher_alg *alg =
729 container_of(crypto_skcipher_alg(skcipher), typeof(*alg),
730 skcipher);
731 struct device *jrdev = ctx->jrdev;
732 unsigned int ivsize = crypto_skcipher_ivsize(skcipher);
733 u32 *desc;
734 const bool is_rfc3686 = alg->caam.rfc3686;
735
736 print_hex_dump_debug("key in @"__stringify(__LINE__)": ",
737 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
738
739 ctx->cdata.keylen = keylen;
740 ctx->cdata.key_virt = key;
741 ctx->cdata.key_inline = true;
742
743
744 desc = ctx->sh_desc_enc;
745 cnstr_shdsc_skcipher_encap(desc, &ctx->cdata, ivsize, is_rfc3686,
746 ctx1_iv_off);
747 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
748 desc_bytes(desc), ctx->dir);
749
750
751 desc = ctx->sh_desc_dec;
752 cnstr_shdsc_skcipher_decap(desc, &ctx->cdata, ivsize, is_rfc3686,
753 ctx1_iv_off);
754 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
755 desc_bytes(desc), ctx->dir);
756
757 return 0;
758}
759
760static int aes_skcipher_setkey(struct crypto_skcipher *skcipher,
761 const u8 *key, unsigned int keylen)
762{
763 int err;
764
765 err = aes_check_keylen(keylen);
766 if (err)
767 return err;
768
769 return skcipher_setkey(skcipher, key, keylen, 0);
770}
771
772static int rfc3686_skcipher_setkey(struct crypto_skcipher *skcipher,
773 const u8 *key, unsigned int keylen)
774{
775 u32 ctx1_iv_off;
776 int err;
777
778
779
780
781
782
783 ctx1_iv_off = 16 + CTR_RFC3686_NONCE_SIZE;
784 keylen -= CTR_RFC3686_NONCE_SIZE;
785
786 err = aes_check_keylen(keylen);
787 if (err)
788 return err;
789
790 return skcipher_setkey(skcipher, key, keylen, ctx1_iv_off);
791}
792
793static int ctr_skcipher_setkey(struct crypto_skcipher *skcipher,
794 const u8 *key, unsigned int keylen)
795{
796 u32 ctx1_iv_off;
797 int err;
798
799
800
801
802
803
804 ctx1_iv_off = 16;
805
806 err = aes_check_keylen(keylen);
807 if (err)
808 return err;
809
810 return skcipher_setkey(skcipher, key, keylen, ctx1_iv_off);
811}
812
813static int arc4_skcipher_setkey(struct crypto_skcipher *skcipher,
814 const u8 *key, unsigned int keylen)
815{
816 return skcipher_setkey(skcipher, key, keylen, 0);
817}
818
819static int des_skcipher_setkey(struct crypto_skcipher *skcipher,
820 const u8 *key, unsigned int keylen)
821{
822 return verify_skcipher_des_key(skcipher, key) ?:
823 skcipher_setkey(skcipher, key, keylen, 0);
824}
825
826static int des3_skcipher_setkey(struct crypto_skcipher *skcipher,
827 const u8 *key, unsigned int keylen)
828{
829 return verify_skcipher_des3_key(skcipher, key) ?:
830 skcipher_setkey(skcipher, key, keylen, 0);
831}
832
833static int xts_skcipher_setkey(struct crypto_skcipher *skcipher, const u8 *key,
834 unsigned int keylen)
835{
836 struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
837 struct device *jrdev = ctx->jrdev;
838 u32 *desc;
839
840 if (keylen != 2 * AES_MIN_KEY_SIZE && keylen != 2 * AES_MAX_KEY_SIZE) {
841 dev_err(jrdev, "key size mismatch\n");
842 return -EINVAL;
843 }
844
845 ctx->cdata.keylen = keylen;
846 ctx->cdata.key_virt = key;
847 ctx->cdata.key_inline = true;
848
849
850 desc = ctx->sh_desc_enc;
851 cnstr_shdsc_xts_skcipher_encap(desc, &ctx->cdata);
852 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
853 desc_bytes(desc), ctx->dir);
854
855
856 desc = ctx->sh_desc_dec;
857 cnstr_shdsc_xts_skcipher_decap(desc, &ctx->cdata);
858 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
859 desc_bytes(desc), ctx->dir);
860
861 return 0;
862}
863
864
865
866
867
868
869
870
871
872
873
874
875
876struct aead_edesc {
877 int src_nents;
878 int dst_nents;
879 int mapped_src_nents;
880 int mapped_dst_nents;
881 int sec4_sg_bytes;
882 bool bklog;
883 dma_addr_t sec4_sg_dma;
884 struct sec4_sg_entry *sec4_sg;
885 u32 hw_desc[];
886};
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902struct skcipher_edesc {
903 int src_nents;
904 int dst_nents;
905 int mapped_src_nents;
906 int mapped_dst_nents;
907 dma_addr_t iv_dma;
908 int sec4_sg_bytes;
909 bool bklog;
910 dma_addr_t sec4_sg_dma;
911 struct sec4_sg_entry *sec4_sg;
912 u32 hw_desc[];
913};
914
915static void caam_unmap(struct device *dev, struct scatterlist *src,
916 struct scatterlist *dst, int src_nents,
917 int dst_nents,
918 dma_addr_t iv_dma, int ivsize, dma_addr_t sec4_sg_dma,
919 int sec4_sg_bytes)
920{
921 if (dst != src) {
922 if (src_nents)
923 dma_unmap_sg(dev, src, src_nents, DMA_TO_DEVICE);
924 if (dst_nents)
925 dma_unmap_sg(dev, dst, dst_nents, DMA_FROM_DEVICE);
926 } else {
927 dma_unmap_sg(dev, src, src_nents, DMA_BIDIRECTIONAL);
928 }
929
930 if (iv_dma)
931 dma_unmap_single(dev, iv_dma, ivsize, DMA_BIDIRECTIONAL);
932 if (sec4_sg_bytes)
933 dma_unmap_single(dev, sec4_sg_dma, sec4_sg_bytes,
934 DMA_TO_DEVICE);
935}
936
937static void aead_unmap(struct device *dev,
938 struct aead_edesc *edesc,
939 struct aead_request *req)
940{
941 caam_unmap(dev, req->src, req->dst,
942 edesc->src_nents, edesc->dst_nents, 0, 0,
943 edesc->sec4_sg_dma, edesc->sec4_sg_bytes);
944}
945
946static void skcipher_unmap(struct device *dev, struct skcipher_edesc *edesc,
947 struct skcipher_request *req)
948{
949 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
950 int ivsize = crypto_skcipher_ivsize(skcipher);
951
952 caam_unmap(dev, req->src, req->dst,
953 edesc->src_nents, edesc->dst_nents,
954 edesc->iv_dma, ivsize,
955 edesc->sec4_sg_dma, edesc->sec4_sg_bytes);
956}
957
958static void aead_crypt_done(struct device *jrdev, u32 *desc, u32 err,
959 void *context)
960{
961 struct aead_request *req = context;
962 struct caam_aead_req_ctx *rctx = aead_request_ctx(req);
963 struct caam_drv_private_jr *jrp = dev_get_drvdata(jrdev);
964 struct aead_edesc *edesc;
965 int ecode = 0;
966 bool has_bklog;
967
968 dev_dbg(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
969
970 edesc = rctx->edesc;
971 has_bklog = edesc->bklog;
972
973 if (err)
974 ecode = caam_jr_strstatus(jrdev, err);
975
976 aead_unmap(jrdev, edesc, req);
977
978 kfree(edesc);
979
980
981
982
983
984 if (!has_bklog)
985 aead_request_complete(req, ecode);
986 else
987 crypto_finalize_aead_request(jrp->engine, req, ecode);
988}
989
990static void skcipher_crypt_done(struct device *jrdev, u32 *desc, u32 err,
991 void *context)
992{
993 struct skcipher_request *req = context;
994 struct skcipher_edesc *edesc;
995 struct caam_skcipher_req_ctx *rctx = skcipher_request_ctx(req);
996 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
997 struct caam_drv_private_jr *jrp = dev_get_drvdata(jrdev);
998 int ivsize = crypto_skcipher_ivsize(skcipher);
999 int ecode = 0;
1000 bool has_bklog;
1001
1002 dev_dbg(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
1003
1004 edesc = rctx->edesc;
1005 has_bklog = edesc->bklog;
1006 if (err)
1007 ecode = caam_jr_strstatus(jrdev, err);
1008
1009 skcipher_unmap(jrdev, edesc, req);
1010
1011
1012
1013
1014
1015
1016 if (ivsize && !ecode) {
1017 memcpy(req->iv, (u8 *)edesc->sec4_sg + edesc->sec4_sg_bytes,
1018 ivsize);
1019
1020 print_hex_dump_debug("dstiv @" __stringify(__LINE__)": ",
1021 DUMP_PREFIX_ADDRESS, 16, 4, req->iv,
1022 ivsize, 1);
1023 }
1024
1025 caam_dump_sg("dst @" __stringify(__LINE__)": ",
1026 DUMP_PREFIX_ADDRESS, 16, 4, req->dst,
1027 edesc->dst_nents > 1 ? 100 : req->cryptlen, 1);
1028
1029 kfree(edesc);
1030
1031
1032
1033
1034
1035 if (!has_bklog)
1036 skcipher_request_complete(req, ecode);
1037 else
1038 crypto_finalize_skcipher_request(jrp->engine, req, ecode);
1039}
1040
1041
1042
1043
1044static void init_aead_job(struct aead_request *req,
1045 struct aead_edesc *edesc,
1046 bool all_contig, bool encrypt)
1047{
1048 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1049 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1050 int authsize = ctx->authsize;
1051 u32 *desc = edesc->hw_desc;
1052 u32 out_options, in_options;
1053 dma_addr_t dst_dma, src_dma;
1054 int len, sec4_sg_index = 0;
1055 dma_addr_t ptr;
1056 u32 *sh_desc;
1057
1058 sh_desc = encrypt ? ctx->sh_desc_enc : ctx->sh_desc_dec;
1059 ptr = encrypt ? ctx->sh_desc_enc_dma : ctx->sh_desc_dec_dma;
1060
1061 len = desc_len(sh_desc);
1062 init_job_desc_shared(desc, ptr, len, HDR_SHARE_DEFER | HDR_REVERSE);
1063
1064 if (all_contig) {
1065 src_dma = edesc->mapped_src_nents ? sg_dma_address(req->src) :
1066 0;
1067 in_options = 0;
1068 } else {
1069 src_dma = edesc->sec4_sg_dma;
1070 sec4_sg_index += edesc->mapped_src_nents;
1071 in_options = LDST_SGF;
1072 }
1073
1074 append_seq_in_ptr(desc, src_dma, req->assoclen + req->cryptlen,
1075 in_options);
1076
1077 dst_dma = src_dma;
1078 out_options = in_options;
1079
1080 if (unlikely(req->src != req->dst)) {
1081 if (!edesc->mapped_dst_nents) {
1082 dst_dma = 0;
1083 out_options = 0;
1084 } else if (edesc->mapped_dst_nents == 1) {
1085 dst_dma = sg_dma_address(req->dst);
1086 out_options = 0;
1087 } else {
1088 dst_dma = edesc->sec4_sg_dma +
1089 sec4_sg_index *
1090 sizeof(struct sec4_sg_entry);
1091 out_options = LDST_SGF;
1092 }
1093 }
1094
1095 if (encrypt)
1096 append_seq_out_ptr(desc, dst_dma,
1097 req->assoclen + req->cryptlen + authsize,
1098 out_options);
1099 else
1100 append_seq_out_ptr(desc, dst_dma,
1101 req->assoclen + req->cryptlen - authsize,
1102 out_options);
1103}
1104
1105static void init_gcm_job(struct aead_request *req,
1106 struct aead_edesc *edesc,
1107 bool all_contig, bool encrypt)
1108{
1109 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1110 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1111 unsigned int ivsize = crypto_aead_ivsize(aead);
1112 u32 *desc = edesc->hw_desc;
1113 bool generic_gcm = (ivsize == GCM_AES_IV_SIZE);
1114 unsigned int last;
1115
1116 init_aead_job(req, edesc, all_contig, encrypt);
1117 append_math_add_imm_u32(desc, REG3, ZERO, IMM, req->assoclen);
1118
1119
1120 last = 0;
1121 if (encrypt && generic_gcm && !(req->assoclen + req->cryptlen))
1122 last = FIFOLD_TYPE_LAST1;
1123
1124
1125 append_cmd(desc, CMD_FIFO_LOAD | FIFOLD_CLASS_CLASS1 | IMMEDIATE |
1126 FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1 | GCM_AES_IV_SIZE | last);
1127
1128 if (!generic_gcm)
1129 append_data(desc, ctx->key + ctx->cdata.keylen, 4);
1130
1131 append_data(desc, req->iv, ivsize);
1132
1133}
1134
1135static void init_chachapoly_job(struct aead_request *req,
1136 struct aead_edesc *edesc, bool all_contig,
1137 bool encrypt)
1138{
1139 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1140 unsigned int ivsize = crypto_aead_ivsize(aead);
1141 unsigned int assoclen = req->assoclen;
1142 u32 *desc = edesc->hw_desc;
1143 u32 ctx_iv_off = 4;
1144
1145 init_aead_job(req, edesc, all_contig, encrypt);
1146
1147 if (ivsize != CHACHAPOLY_IV_SIZE) {
1148
1149 ctx_iv_off += 4;
1150
1151
1152
1153
1154
1155 assoclen -= ivsize;
1156 }
1157
1158 append_math_add_imm_u32(desc, REG3, ZERO, IMM, assoclen);
1159
1160
1161
1162
1163
1164 append_load_as_imm(desc, req->iv, ivsize, LDST_CLASS_1_CCB |
1165 LDST_SRCDST_BYTE_CONTEXT |
1166 ctx_iv_off << LDST_OFFSET_SHIFT);
1167}
1168
1169static void init_authenc_job(struct aead_request *req,
1170 struct aead_edesc *edesc,
1171 bool all_contig, bool encrypt)
1172{
1173 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1174 struct caam_aead_alg *alg = container_of(crypto_aead_alg(aead),
1175 struct caam_aead_alg, aead);
1176 unsigned int ivsize = crypto_aead_ivsize(aead);
1177 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1178 struct caam_drv_private *ctrlpriv = dev_get_drvdata(ctx->jrdev->parent);
1179 const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) ==
1180 OP_ALG_AAI_CTR_MOD128);
1181 const bool is_rfc3686 = alg->caam.rfc3686;
1182 u32 *desc = edesc->hw_desc;
1183 u32 ivoffset = 0;
1184
1185
1186
1187
1188
1189
1190 if (ctr_mode)
1191 ivoffset = 16;
1192
1193
1194
1195
1196
1197 if (is_rfc3686)
1198 ivoffset = 16 + CTR_RFC3686_NONCE_SIZE;
1199
1200 init_aead_job(req, edesc, all_contig, encrypt);
1201
1202
1203
1204
1205
1206 if (ctrlpriv->era < 3)
1207 append_math_add_imm_u32(desc, REG3, ZERO, IMM, req->assoclen);
1208 else
1209 append_math_add_imm_u32(desc, DPOVRD, ZERO, IMM, req->assoclen);
1210
1211 if (ivsize && ((is_rfc3686 && encrypt) || !alg->caam.geniv))
1212 append_load_as_imm(desc, req->iv, ivsize,
1213 LDST_CLASS_1_CCB |
1214 LDST_SRCDST_BYTE_CONTEXT |
1215 (ivoffset << LDST_OFFSET_SHIFT));
1216}
1217
1218
1219
1220
1221static void init_skcipher_job(struct skcipher_request *req,
1222 struct skcipher_edesc *edesc,
1223 const bool encrypt)
1224{
1225 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
1226 struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
1227 struct device *jrdev = ctx->jrdev;
1228 int ivsize = crypto_skcipher_ivsize(skcipher);
1229 u32 *desc = edesc->hw_desc;
1230 u32 *sh_desc;
1231 u32 in_options = 0, out_options = 0;
1232 dma_addr_t src_dma, dst_dma, ptr;
1233 int len, sec4_sg_index = 0;
1234
1235 print_hex_dump_debug("presciv@"__stringify(__LINE__)": ",
1236 DUMP_PREFIX_ADDRESS, 16, 4, req->iv, ivsize, 1);
1237 dev_dbg(jrdev, "asked=%d, cryptlen%d\n",
1238 (int)edesc->src_nents > 1 ? 100 : req->cryptlen, req->cryptlen);
1239
1240 caam_dump_sg("src @" __stringify(__LINE__)": ",
1241 DUMP_PREFIX_ADDRESS, 16, 4, req->src,
1242 edesc->src_nents > 1 ? 100 : req->cryptlen, 1);
1243
1244 sh_desc = encrypt ? ctx->sh_desc_enc : ctx->sh_desc_dec;
1245 ptr = encrypt ? ctx->sh_desc_enc_dma : ctx->sh_desc_dec_dma;
1246
1247 len = desc_len(sh_desc);
1248 init_job_desc_shared(desc, ptr, len, HDR_SHARE_DEFER | HDR_REVERSE);
1249
1250 if (ivsize || edesc->mapped_src_nents > 1) {
1251 src_dma = edesc->sec4_sg_dma;
1252 sec4_sg_index = edesc->mapped_src_nents + !!ivsize;
1253 in_options = LDST_SGF;
1254 } else {
1255 src_dma = sg_dma_address(req->src);
1256 }
1257
1258 append_seq_in_ptr(desc, src_dma, req->cryptlen + ivsize, in_options);
1259
1260 if (likely(req->src == req->dst)) {
1261 dst_dma = src_dma + !!ivsize * sizeof(struct sec4_sg_entry);
1262 out_options = in_options;
1263 } else if (!ivsize && edesc->mapped_dst_nents == 1) {
1264 dst_dma = sg_dma_address(req->dst);
1265 } else {
1266 dst_dma = edesc->sec4_sg_dma + sec4_sg_index *
1267 sizeof(struct sec4_sg_entry);
1268 out_options = LDST_SGF;
1269 }
1270
1271 append_seq_out_ptr(desc, dst_dma, req->cryptlen + ivsize, out_options);
1272}
1273
1274
1275
1276
1277static struct aead_edesc *aead_edesc_alloc(struct aead_request *req,
1278 int desc_bytes, bool *all_contig_ptr,
1279 bool encrypt)
1280{
1281 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1282 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1283 struct device *jrdev = ctx->jrdev;
1284 struct caam_aead_req_ctx *rctx = aead_request_ctx(req);
1285 gfp_t flags = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ?
1286 GFP_KERNEL : GFP_ATOMIC;
1287 int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0;
1288 int src_len, dst_len = 0;
1289 struct aead_edesc *edesc;
1290 int sec4_sg_index, sec4_sg_len, sec4_sg_bytes;
1291 unsigned int authsize = ctx->authsize;
1292
1293 if (unlikely(req->dst != req->src)) {
1294 src_len = req->assoclen + req->cryptlen;
1295 dst_len = src_len + (encrypt ? authsize : (-authsize));
1296
1297 src_nents = sg_nents_for_len(req->src, src_len);
1298 if (unlikely(src_nents < 0)) {
1299 dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n",
1300 src_len);
1301 return ERR_PTR(src_nents);
1302 }
1303
1304 dst_nents = sg_nents_for_len(req->dst, dst_len);
1305 if (unlikely(dst_nents < 0)) {
1306 dev_err(jrdev, "Insufficient bytes (%d) in dst S/G\n",
1307 dst_len);
1308 return ERR_PTR(dst_nents);
1309 }
1310 } else {
1311 src_len = req->assoclen + req->cryptlen +
1312 (encrypt ? authsize : 0);
1313
1314 src_nents = sg_nents_for_len(req->src, src_len);
1315 if (unlikely(src_nents < 0)) {
1316 dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n",
1317 src_len);
1318 return ERR_PTR(src_nents);
1319 }
1320 }
1321
1322 if (likely(req->src == req->dst)) {
1323 mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents,
1324 DMA_BIDIRECTIONAL);
1325 if (unlikely(!mapped_src_nents)) {
1326 dev_err(jrdev, "unable to map source\n");
1327 return ERR_PTR(-ENOMEM);
1328 }
1329 } else {
1330
1331 if (src_nents) {
1332 mapped_src_nents = dma_map_sg(jrdev, req->src,
1333 src_nents, DMA_TO_DEVICE);
1334 if (unlikely(!mapped_src_nents)) {
1335 dev_err(jrdev, "unable to map source\n");
1336 return ERR_PTR(-ENOMEM);
1337 }
1338 } else {
1339 mapped_src_nents = 0;
1340 }
1341
1342
1343 if (dst_nents) {
1344 mapped_dst_nents = dma_map_sg(jrdev, req->dst,
1345 dst_nents,
1346 DMA_FROM_DEVICE);
1347 if (unlikely(!mapped_dst_nents)) {
1348 dev_err(jrdev, "unable to map destination\n");
1349 dma_unmap_sg(jrdev, req->src, src_nents,
1350 DMA_TO_DEVICE);
1351 return ERR_PTR(-ENOMEM);
1352 }
1353 } else {
1354 mapped_dst_nents = 0;
1355 }
1356 }
1357
1358
1359
1360
1361
1362 sec4_sg_len = mapped_src_nents > 1 ? mapped_src_nents : 0;
1363 if (mapped_dst_nents > 1)
1364 sec4_sg_len += pad_sg_nents(mapped_dst_nents);
1365 else
1366 sec4_sg_len = pad_sg_nents(sec4_sg_len);
1367
1368 sec4_sg_bytes = sec4_sg_len * sizeof(struct sec4_sg_entry);
1369
1370
1371 edesc = kzalloc(sizeof(*edesc) + desc_bytes + sec4_sg_bytes,
1372 GFP_DMA | flags);
1373 if (!edesc) {
1374 caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0,
1375 0, 0, 0);
1376 return ERR_PTR(-ENOMEM);
1377 }
1378
1379 edesc->src_nents = src_nents;
1380 edesc->dst_nents = dst_nents;
1381 edesc->mapped_src_nents = mapped_src_nents;
1382 edesc->mapped_dst_nents = mapped_dst_nents;
1383 edesc->sec4_sg = (void *)edesc + sizeof(struct aead_edesc) +
1384 desc_bytes;
1385
1386 rctx->edesc = edesc;
1387
1388 *all_contig_ptr = !(mapped_src_nents > 1);
1389
1390 sec4_sg_index = 0;
1391 if (mapped_src_nents > 1) {
1392 sg_to_sec4_sg_last(req->src, src_len,
1393 edesc->sec4_sg + sec4_sg_index, 0);
1394 sec4_sg_index += mapped_src_nents;
1395 }
1396 if (mapped_dst_nents > 1) {
1397 sg_to_sec4_sg_last(req->dst, dst_len,
1398 edesc->sec4_sg + sec4_sg_index, 0);
1399 }
1400
1401 if (!sec4_sg_bytes)
1402 return edesc;
1403
1404 edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg,
1405 sec4_sg_bytes, DMA_TO_DEVICE);
1406 if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) {
1407 dev_err(jrdev, "unable to map S/G table\n");
1408 aead_unmap(jrdev, edesc, req);
1409 kfree(edesc);
1410 return ERR_PTR(-ENOMEM);
1411 }
1412
1413 edesc->sec4_sg_bytes = sec4_sg_bytes;
1414
1415 return edesc;
1416}
1417
1418static int aead_enqueue_req(struct device *jrdev, struct aead_request *req)
1419{
1420 struct caam_drv_private_jr *jrpriv = dev_get_drvdata(jrdev);
1421 struct caam_aead_req_ctx *rctx = aead_request_ctx(req);
1422 struct aead_edesc *edesc = rctx->edesc;
1423 u32 *desc = edesc->hw_desc;
1424 int ret;
1425
1426
1427
1428
1429
1430
1431 if (req->base.flags & CRYPTO_TFM_REQ_MAY_BACKLOG)
1432 ret = crypto_transfer_aead_request_to_engine(jrpriv->engine,
1433 req);
1434 else
1435 ret = caam_jr_enqueue(jrdev, desc, aead_crypt_done, req);
1436
1437 if ((ret != -EINPROGRESS) && (ret != -EBUSY)) {
1438 aead_unmap(jrdev, edesc, req);
1439 kfree(rctx->edesc);
1440 }
1441
1442 return ret;
1443}
1444
1445static inline int chachapoly_crypt(struct aead_request *req, bool encrypt)
1446{
1447 struct aead_edesc *edesc;
1448 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1449 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1450 struct device *jrdev = ctx->jrdev;
1451 bool all_contig;
1452 u32 *desc;
1453
1454 edesc = aead_edesc_alloc(req, CHACHAPOLY_DESC_JOB_IO_LEN, &all_contig,
1455 encrypt);
1456 if (IS_ERR(edesc))
1457 return PTR_ERR(edesc);
1458
1459 desc = edesc->hw_desc;
1460
1461 init_chachapoly_job(req, edesc, all_contig, encrypt);
1462 print_hex_dump_debug("chachapoly jobdesc@" __stringify(__LINE__)": ",
1463 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc),
1464 1);
1465
1466 return aead_enqueue_req(jrdev, req);
1467}
1468
1469static int chachapoly_encrypt(struct aead_request *req)
1470{
1471 return chachapoly_crypt(req, true);
1472}
1473
1474static int chachapoly_decrypt(struct aead_request *req)
1475{
1476 return chachapoly_crypt(req, false);
1477}
1478
1479static inline int aead_crypt(struct aead_request *req, bool encrypt)
1480{
1481 struct aead_edesc *edesc;
1482 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1483 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1484 struct device *jrdev = ctx->jrdev;
1485 bool all_contig;
1486
1487
1488 edesc = aead_edesc_alloc(req, AUTHENC_DESC_JOB_IO_LEN,
1489 &all_contig, encrypt);
1490 if (IS_ERR(edesc))
1491 return PTR_ERR(edesc);
1492
1493
1494 init_authenc_job(req, edesc, all_contig, encrypt);
1495
1496 print_hex_dump_debug("aead jobdesc@"__stringify(__LINE__)": ",
1497 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1498 desc_bytes(edesc->hw_desc), 1);
1499
1500 return aead_enqueue_req(jrdev, req);
1501}
1502
1503static int aead_encrypt(struct aead_request *req)
1504{
1505 return aead_crypt(req, true);
1506}
1507
1508static int aead_decrypt(struct aead_request *req)
1509{
1510 return aead_crypt(req, false);
1511}
1512
1513static int aead_do_one_req(struct crypto_engine *engine, void *areq)
1514{
1515 struct aead_request *req = aead_request_cast(areq);
1516 struct caam_ctx *ctx = crypto_aead_ctx(crypto_aead_reqtfm(req));
1517 struct caam_aead_req_ctx *rctx = aead_request_ctx(req);
1518 u32 *desc = rctx->edesc->hw_desc;
1519 int ret;
1520
1521 rctx->edesc->bklog = true;
1522
1523 ret = caam_jr_enqueue(ctx->jrdev, desc, aead_crypt_done, req);
1524
1525 if (ret != -EINPROGRESS) {
1526 aead_unmap(ctx->jrdev, rctx->edesc, req);
1527 kfree(rctx->edesc);
1528 } else {
1529 ret = 0;
1530 }
1531
1532 return ret;
1533}
1534
1535static inline int gcm_crypt(struct aead_request *req, bool encrypt)
1536{
1537 struct aead_edesc *edesc;
1538 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1539 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1540 struct device *jrdev = ctx->jrdev;
1541 bool all_contig;
1542
1543
1544 edesc = aead_edesc_alloc(req, GCM_DESC_JOB_IO_LEN, &all_contig,
1545 encrypt);
1546 if (IS_ERR(edesc))
1547 return PTR_ERR(edesc);
1548
1549
1550 init_gcm_job(req, edesc, all_contig, encrypt);
1551
1552 print_hex_dump_debug("aead jobdesc@"__stringify(__LINE__)": ",
1553 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1554 desc_bytes(edesc->hw_desc), 1);
1555
1556 return aead_enqueue_req(jrdev, req);
1557}
1558
1559static int gcm_encrypt(struct aead_request *req)
1560{
1561 return gcm_crypt(req, true);
1562}
1563
1564static int gcm_decrypt(struct aead_request *req)
1565{
1566 return gcm_crypt(req, false);
1567}
1568
1569static int ipsec_gcm_encrypt(struct aead_request *req)
1570{
1571 return crypto_ipsec_check_assoclen(req->assoclen) ? : gcm_encrypt(req);
1572}
1573
1574static int ipsec_gcm_decrypt(struct aead_request *req)
1575{
1576 return crypto_ipsec_check_assoclen(req->assoclen) ? : gcm_decrypt(req);
1577}
1578
1579
1580
1581
1582static struct skcipher_edesc *skcipher_edesc_alloc(struct skcipher_request *req,
1583 int desc_bytes)
1584{
1585 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
1586 struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
1587 struct caam_skcipher_req_ctx *rctx = skcipher_request_ctx(req);
1588 struct device *jrdev = ctx->jrdev;
1589 gfp_t flags = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ?
1590 GFP_KERNEL : GFP_ATOMIC;
1591 int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0;
1592 struct skcipher_edesc *edesc;
1593 dma_addr_t iv_dma = 0;
1594 u8 *iv;
1595 int ivsize = crypto_skcipher_ivsize(skcipher);
1596 int dst_sg_idx, sec4_sg_ents, sec4_sg_bytes;
1597
1598 src_nents = sg_nents_for_len(req->src, req->cryptlen);
1599 if (unlikely(src_nents < 0)) {
1600 dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n",
1601 req->cryptlen);
1602 return ERR_PTR(src_nents);
1603 }
1604
1605 if (req->dst != req->src) {
1606 dst_nents = sg_nents_for_len(req->dst, req->cryptlen);
1607 if (unlikely(dst_nents < 0)) {
1608 dev_err(jrdev, "Insufficient bytes (%d) in dst S/G\n",
1609 req->cryptlen);
1610 return ERR_PTR(dst_nents);
1611 }
1612 }
1613
1614 if (likely(req->src == req->dst)) {
1615 mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents,
1616 DMA_BIDIRECTIONAL);
1617 if (unlikely(!mapped_src_nents)) {
1618 dev_err(jrdev, "unable to map source\n");
1619 return ERR_PTR(-ENOMEM);
1620 }
1621 } else {
1622 mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents,
1623 DMA_TO_DEVICE);
1624 if (unlikely(!mapped_src_nents)) {
1625 dev_err(jrdev, "unable to map source\n");
1626 return ERR_PTR(-ENOMEM);
1627 }
1628 mapped_dst_nents = dma_map_sg(jrdev, req->dst, dst_nents,
1629 DMA_FROM_DEVICE);
1630 if (unlikely(!mapped_dst_nents)) {
1631 dev_err(jrdev, "unable to map destination\n");
1632 dma_unmap_sg(jrdev, req->src, src_nents, DMA_TO_DEVICE);
1633 return ERR_PTR(-ENOMEM);
1634 }
1635 }
1636
1637 if (!ivsize && mapped_src_nents == 1)
1638 sec4_sg_ents = 0;
1639 else
1640 sec4_sg_ents = mapped_src_nents + !!ivsize;
1641 dst_sg_idx = sec4_sg_ents;
1642
1643
1644
1645
1646
1647
1648
1649
1650
1651
1652
1653
1654
1655 if (ivsize || mapped_dst_nents > 1) {
1656 if (req->src == req->dst)
1657 sec4_sg_ents = !!ivsize + pad_sg_nents(sec4_sg_ents);
1658 else
1659 sec4_sg_ents += pad_sg_nents(mapped_dst_nents +
1660 !!ivsize);
1661 } else {
1662 sec4_sg_ents = pad_sg_nents(sec4_sg_ents);
1663 }
1664
1665 sec4_sg_bytes = sec4_sg_ents * sizeof(struct sec4_sg_entry);
1666
1667
1668
1669
1670 edesc = kzalloc(sizeof(*edesc) + desc_bytes + sec4_sg_bytes + ivsize,
1671 GFP_DMA | flags);
1672 if (!edesc) {
1673 dev_err(jrdev, "could not allocate extended descriptor\n");
1674 caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0,
1675 0, 0, 0);
1676 return ERR_PTR(-ENOMEM);
1677 }
1678
1679 edesc->src_nents = src_nents;
1680 edesc->dst_nents = dst_nents;
1681 edesc->mapped_src_nents = mapped_src_nents;
1682 edesc->mapped_dst_nents = mapped_dst_nents;
1683 edesc->sec4_sg_bytes = sec4_sg_bytes;
1684 edesc->sec4_sg = (struct sec4_sg_entry *)((u8 *)edesc->hw_desc +
1685 desc_bytes);
1686 rctx->edesc = edesc;
1687
1688
1689 if (ivsize) {
1690 iv = (u8 *)edesc->sec4_sg + sec4_sg_bytes;
1691 memcpy(iv, req->iv, ivsize);
1692
1693 iv_dma = dma_map_single(jrdev, iv, ivsize, DMA_BIDIRECTIONAL);
1694 if (dma_mapping_error(jrdev, iv_dma)) {
1695 dev_err(jrdev, "unable to map IV\n");
1696 caam_unmap(jrdev, req->src, req->dst, src_nents,
1697 dst_nents, 0, 0, 0, 0);
1698 kfree(edesc);
1699 return ERR_PTR(-ENOMEM);
1700 }
1701
1702 dma_to_sec4_sg_one(edesc->sec4_sg, iv_dma, ivsize, 0);
1703 }
1704 if (dst_sg_idx)
1705 sg_to_sec4_sg(req->src, req->cryptlen, edesc->sec4_sg +
1706 !!ivsize, 0);
1707
1708 if (req->src != req->dst && (ivsize || mapped_dst_nents > 1))
1709 sg_to_sec4_sg(req->dst, req->cryptlen, edesc->sec4_sg +
1710 dst_sg_idx, 0);
1711
1712 if (ivsize)
1713 dma_to_sec4_sg_one(edesc->sec4_sg + dst_sg_idx +
1714 mapped_dst_nents, iv_dma, ivsize, 0);
1715
1716 if (ivsize || mapped_dst_nents > 1)
1717 sg_to_sec4_set_last(edesc->sec4_sg + dst_sg_idx +
1718 mapped_dst_nents - 1 + !!ivsize);
1719
1720 if (sec4_sg_bytes) {
1721 edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg,
1722 sec4_sg_bytes,
1723 DMA_TO_DEVICE);
1724 if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) {
1725 dev_err(jrdev, "unable to map S/G table\n");
1726 caam_unmap(jrdev, req->src, req->dst, src_nents,
1727 dst_nents, iv_dma, ivsize, 0, 0);
1728 kfree(edesc);
1729 return ERR_PTR(-ENOMEM);
1730 }
1731 }
1732
1733 edesc->iv_dma = iv_dma;
1734
1735 print_hex_dump_debug("skcipher sec4_sg@" __stringify(__LINE__)": ",
1736 DUMP_PREFIX_ADDRESS, 16, 4, edesc->sec4_sg,
1737 sec4_sg_bytes, 1);
1738
1739 return edesc;
1740}
1741
1742static int skcipher_do_one_req(struct crypto_engine *engine, void *areq)
1743{
1744 struct skcipher_request *req = skcipher_request_cast(areq);
1745 struct caam_ctx *ctx = crypto_skcipher_ctx(crypto_skcipher_reqtfm(req));
1746 struct caam_skcipher_req_ctx *rctx = skcipher_request_ctx(req);
1747 u32 *desc = rctx->edesc->hw_desc;
1748 int ret;
1749
1750 rctx->edesc->bklog = true;
1751
1752 ret = caam_jr_enqueue(ctx->jrdev, desc, skcipher_crypt_done, req);
1753
1754 if (ret != -EINPROGRESS) {
1755 skcipher_unmap(ctx->jrdev, rctx->edesc, req);
1756 kfree(rctx->edesc);
1757 } else {
1758 ret = 0;
1759 }
1760
1761 return ret;
1762}
1763
1764static inline int skcipher_crypt(struct skcipher_request *req, bool encrypt)
1765{
1766 struct skcipher_edesc *edesc;
1767 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
1768 struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
1769 struct device *jrdev = ctx->jrdev;
1770 struct caam_drv_private_jr *jrpriv = dev_get_drvdata(jrdev);
1771 u32 *desc;
1772 int ret = 0;
1773
1774 if (!req->cryptlen)
1775 return 0;
1776
1777
1778 edesc = skcipher_edesc_alloc(req, DESC_JOB_IO_LEN * CAAM_CMD_SZ);
1779 if (IS_ERR(edesc))
1780 return PTR_ERR(edesc);
1781
1782
1783 init_skcipher_job(req, edesc, encrypt);
1784
1785 print_hex_dump_debug("skcipher jobdesc@" __stringify(__LINE__)": ",
1786 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1787 desc_bytes(edesc->hw_desc), 1);
1788
1789 desc = edesc->hw_desc;
1790
1791
1792
1793
1794
1795 if (req->base.flags & CRYPTO_TFM_REQ_MAY_BACKLOG)
1796 ret = crypto_transfer_skcipher_request_to_engine(jrpriv->engine,
1797 req);
1798 else
1799 ret = caam_jr_enqueue(jrdev, desc, skcipher_crypt_done, req);
1800
1801 if ((ret != -EINPROGRESS) && (ret != -EBUSY)) {
1802 skcipher_unmap(jrdev, edesc, req);
1803 kfree(edesc);
1804 }
1805
1806 return ret;
1807}
1808
1809static int skcipher_encrypt(struct skcipher_request *req)
1810{
1811 return skcipher_crypt(req, true);
1812}
1813
1814static int skcipher_decrypt(struct skcipher_request *req)
1815{
1816 return skcipher_crypt(req, false);
1817}
1818
1819static struct caam_skcipher_alg driver_algs[] = {
1820 {
1821 .skcipher = {
1822 .base = {
1823 .cra_name = "cbc(aes)",
1824 .cra_driver_name = "cbc-aes-caam",
1825 .cra_blocksize = AES_BLOCK_SIZE,
1826 },
1827 .setkey = aes_skcipher_setkey,
1828 .encrypt = skcipher_encrypt,
1829 .decrypt = skcipher_decrypt,
1830 .min_keysize = AES_MIN_KEY_SIZE,
1831 .max_keysize = AES_MAX_KEY_SIZE,
1832 .ivsize = AES_BLOCK_SIZE,
1833 },
1834 .caam.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
1835 },
1836 {
1837 .skcipher = {
1838 .base = {
1839 .cra_name = "cbc(des3_ede)",
1840 .cra_driver_name = "cbc-3des-caam",
1841 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1842 },
1843 .setkey = des3_skcipher_setkey,
1844 .encrypt = skcipher_encrypt,
1845 .decrypt = skcipher_decrypt,
1846 .min_keysize = DES3_EDE_KEY_SIZE,
1847 .max_keysize = DES3_EDE_KEY_SIZE,
1848 .ivsize = DES3_EDE_BLOCK_SIZE,
1849 },
1850 .caam.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
1851 },
1852 {
1853 .skcipher = {
1854 .base = {
1855 .cra_name = "cbc(des)",
1856 .cra_driver_name = "cbc-des-caam",
1857 .cra_blocksize = DES_BLOCK_SIZE,
1858 },
1859 .setkey = des_skcipher_setkey,
1860 .encrypt = skcipher_encrypt,
1861 .decrypt = skcipher_decrypt,
1862 .min_keysize = DES_KEY_SIZE,
1863 .max_keysize = DES_KEY_SIZE,
1864 .ivsize = DES_BLOCK_SIZE,
1865 },
1866 .caam.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
1867 },
1868 {
1869 .skcipher = {
1870 .base = {
1871 .cra_name = "ctr(aes)",
1872 .cra_driver_name = "ctr-aes-caam",
1873 .cra_blocksize = 1,
1874 },
1875 .setkey = ctr_skcipher_setkey,
1876 .encrypt = skcipher_encrypt,
1877 .decrypt = skcipher_decrypt,
1878 .min_keysize = AES_MIN_KEY_SIZE,
1879 .max_keysize = AES_MAX_KEY_SIZE,
1880 .ivsize = AES_BLOCK_SIZE,
1881 .chunksize = AES_BLOCK_SIZE,
1882 },
1883 .caam.class1_alg_type = OP_ALG_ALGSEL_AES |
1884 OP_ALG_AAI_CTR_MOD128,
1885 },
1886 {
1887 .skcipher = {
1888 .base = {
1889 .cra_name = "rfc3686(ctr(aes))",
1890 .cra_driver_name = "rfc3686-ctr-aes-caam",
1891 .cra_blocksize = 1,
1892 },
1893 .setkey = rfc3686_skcipher_setkey,
1894 .encrypt = skcipher_encrypt,
1895 .decrypt = skcipher_decrypt,
1896 .min_keysize = AES_MIN_KEY_SIZE +
1897 CTR_RFC3686_NONCE_SIZE,
1898 .max_keysize = AES_MAX_KEY_SIZE +
1899 CTR_RFC3686_NONCE_SIZE,
1900 .ivsize = CTR_RFC3686_IV_SIZE,
1901 .chunksize = AES_BLOCK_SIZE,
1902 },
1903 .caam = {
1904 .class1_alg_type = OP_ALG_ALGSEL_AES |
1905 OP_ALG_AAI_CTR_MOD128,
1906 .rfc3686 = true,
1907 },
1908 },
1909 {
1910 .skcipher = {
1911 .base = {
1912 .cra_name = "xts(aes)",
1913 .cra_driver_name = "xts-aes-caam",
1914 .cra_blocksize = AES_BLOCK_SIZE,
1915 },
1916 .setkey = xts_skcipher_setkey,
1917 .encrypt = skcipher_encrypt,
1918 .decrypt = skcipher_decrypt,
1919 .min_keysize = 2 * AES_MIN_KEY_SIZE,
1920 .max_keysize = 2 * AES_MAX_KEY_SIZE,
1921 .ivsize = AES_BLOCK_SIZE,
1922 },
1923 .caam.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_XTS,
1924 },
1925 {
1926 .skcipher = {
1927 .base = {
1928 .cra_name = "ecb(des)",
1929 .cra_driver_name = "ecb-des-caam",
1930 .cra_blocksize = DES_BLOCK_SIZE,
1931 },
1932 .setkey = des_skcipher_setkey,
1933 .encrypt = skcipher_encrypt,
1934 .decrypt = skcipher_decrypt,
1935 .min_keysize = DES_KEY_SIZE,
1936 .max_keysize = DES_KEY_SIZE,
1937 },
1938 .caam.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_ECB,
1939 },
1940 {
1941 .skcipher = {
1942 .base = {
1943 .cra_name = "ecb(aes)",
1944 .cra_driver_name = "ecb-aes-caam",
1945 .cra_blocksize = AES_BLOCK_SIZE,
1946 },
1947 .setkey = aes_skcipher_setkey,
1948 .encrypt = skcipher_encrypt,
1949 .decrypt = skcipher_decrypt,
1950 .min_keysize = AES_MIN_KEY_SIZE,
1951 .max_keysize = AES_MAX_KEY_SIZE,
1952 },
1953 .caam.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_ECB,
1954 },
1955 {
1956 .skcipher = {
1957 .base = {
1958 .cra_name = "ecb(des3_ede)",
1959 .cra_driver_name = "ecb-des3-caam",
1960 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1961 },
1962 .setkey = des3_skcipher_setkey,
1963 .encrypt = skcipher_encrypt,
1964 .decrypt = skcipher_decrypt,
1965 .min_keysize = DES3_EDE_KEY_SIZE,
1966 .max_keysize = DES3_EDE_KEY_SIZE,
1967 },
1968 .caam.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_ECB,
1969 },
1970 {
1971 .skcipher = {
1972 .base = {
1973 .cra_name = "ecb(arc4)",
1974 .cra_driver_name = "ecb-arc4-caam",
1975 .cra_blocksize = ARC4_BLOCK_SIZE,
1976 },
1977 .setkey = arc4_skcipher_setkey,
1978 .encrypt = skcipher_encrypt,
1979 .decrypt = skcipher_decrypt,
1980 .min_keysize = ARC4_MIN_KEY_SIZE,
1981 .max_keysize = ARC4_MAX_KEY_SIZE,
1982 },
1983 .caam.class1_alg_type = OP_ALG_ALGSEL_ARC4 | OP_ALG_AAI_ECB,
1984 },
1985};
1986
1987static struct caam_aead_alg driver_aeads[] = {
1988 {
1989 .aead = {
1990 .base = {
1991 .cra_name = "rfc4106(gcm(aes))",
1992 .cra_driver_name = "rfc4106-gcm-aes-caam",
1993 .cra_blocksize = 1,
1994 },
1995 .setkey = rfc4106_setkey,
1996 .setauthsize = rfc4106_setauthsize,
1997 .encrypt = ipsec_gcm_encrypt,
1998 .decrypt = ipsec_gcm_decrypt,
1999 .ivsize = GCM_RFC4106_IV_SIZE,
2000 .maxauthsize = AES_BLOCK_SIZE,
2001 },
2002 .caam = {
2003 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
2004 .nodkp = true,
2005 },
2006 },
2007 {
2008 .aead = {
2009 .base = {
2010 .cra_name = "rfc4543(gcm(aes))",
2011 .cra_driver_name = "rfc4543-gcm-aes-caam",
2012 .cra_blocksize = 1,
2013 },
2014 .setkey = rfc4543_setkey,
2015 .setauthsize = rfc4543_setauthsize,
2016 .encrypt = ipsec_gcm_encrypt,
2017 .decrypt = ipsec_gcm_decrypt,
2018 .ivsize = GCM_RFC4543_IV_SIZE,
2019 .maxauthsize = AES_BLOCK_SIZE,
2020 },
2021 .caam = {
2022 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
2023 .nodkp = true,
2024 },
2025 },
2026
2027 {
2028 .aead = {
2029 .base = {
2030 .cra_name = "gcm(aes)",
2031 .cra_driver_name = "gcm-aes-caam",
2032 .cra_blocksize = 1,
2033 },
2034 .setkey = gcm_setkey,
2035 .setauthsize = gcm_setauthsize,
2036 .encrypt = gcm_encrypt,
2037 .decrypt = gcm_decrypt,
2038 .ivsize = GCM_AES_IV_SIZE,
2039 .maxauthsize = AES_BLOCK_SIZE,
2040 },
2041 .caam = {
2042 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
2043 .nodkp = true,
2044 },
2045 },
2046
2047 {
2048 .aead = {
2049 .base = {
2050 .cra_name = "authenc(hmac(md5),"
2051 "ecb(cipher_null))",
2052 .cra_driver_name = "authenc-hmac-md5-"
2053 "ecb-cipher_null-caam",
2054 .cra_blocksize = NULL_BLOCK_SIZE,
2055 },
2056 .setkey = aead_setkey,
2057 .setauthsize = aead_setauthsize,
2058 .encrypt = aead_encrypt,
2059 .decrypt = aead_decrypt,
2060 .ivsize = NULL_IV_SIZE,
2061 .maxauthsize = MD5_DIGEST_SIZE,
2062 },
2063 .caam = {
2064 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2065 OP_ALG_AAI_HMAC_PRECOMP,
2066 },
2067 },
2068 {
2069 .aead = {
2070 .base = {
2071 .cra_name = "authenc(hmac(sha1),"
2072 "ecb(cipher_null))",
2073 .cra_driver_name = "authenc-hmac-sha1-"
2074 "ecb-cipher_null-caam",
2075 .cra_blocksize = NULL_BLOCK_SIZE,
2076 },
2077 .setkey = aead_setkey,
2078 .setauthsize = aead_setauthsize,
2079 .encrypt = aead_encrypt,
2080 .decrypt = aead_decrypt,
2081 .ivsize = NULL_IV_SIZE,
2082 .maxauthsize = SHA1_DIGEST_SIZE,
2083 },
2084 .caam = {
2085 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2086 OP_ALG_AAI_HMAC_PRECOMP,
2087 },
2088 },
2089 {
2090 .aead = {
2091 .base = {
2092 .cra_name = "authenc(hmac(sha224),"
2093 "ecb(cipher_null))",
2094 .cra_driver_name = "authenc-hmac-sha224-"
2095 "ecb-cipher_null-caam",
2096 .cra_blocksize = NULL_BLOCK_SIZE,
2097 },
2098 .setkey = aead_setkey,
2099 .setauthsize = aead_setauthsize,
2100 .encrypt = aead_encrypt,
2101 .decrypt = aead_decrypt,
2102 .ivsize = NULL_IV_SIZE,
2103 .maxauthsize = SHA224_DIGEST_SIZE,
2104 },
2105 .caam = {
2106 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2107 OP_ALG_AAI_HMAC_PRECOMP,
2108 },
2109 },
2110 {
2111 .aead = {
2112 .base = {
2113 .cra_name = "authenc(hmac(sha256),"
2114 "ecb(cipher_null))",
2115 .cra_driver_name = "authenc-hmac-sha256-"
2116 "ecb-cipher_null-caam",
2117 .cra_blocksize = NULL_BLOCK_SIZE,
2118 },
2119 .setkey = aead_setkey,
2120 .setauthsize = aead_setauthsize,
2121 .encrypt = aead_encrypt,
2122 .decrypt = aead_decrypt,
2123 .ivsize = NULL_IV_SIZE,
2124 .maxauthsize = SHA256_DIGEST_SIZE,
2125 },
2126 .caam = {
2127 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2128 OP_ALG_AAI_HMAC_PRECOMP,
2129 },
2130 },
2131 {
2132 .aead = {
2133 .base = {
2134 .cra_name = "authenc(hmac(sha384),"
2135 "ecb(cipher_null))",
2136 .cra_driver_name = "authenc-hmac-sha384-"
2137 "ecb-cipher_null-caam",
2138 .cra_blocksize = NULL_BLOCK_SIZE,
2139 },
2140 .setkey = aead_setkey,
2141 .setauthsize = aead_setauthsize,
2142 .encrypt = aead_encrypt,
2143 .decrypt = aead_decrypt,
2144 .ivsize = NULL_IV_SIZE,
2145 .maxauthsize = SHA384_DIGEST_SIZE,
2146 },
2147 .caam = {
2148 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2149 OP_ALG_AAI_HMAC_PRECOMP,
2150 },
2151 },
2152 {
2153 .aead = {
2154 .base = {
2155 .cra_name = "authenc(hmac(sha512),"
2156 "ecb(cipher_null))",
2157 .cra_driver_name = "authenc-hmac-sha512-"
2158 "ecb-cipher_null-caam",
2159 .cra_blocksize = NULL_BLOCK_SIZE,
2160 },
2161 .setkey = aead_setkey,
2162 .setauthsize = aead_setauthsize,
2163 .encrypt = aead_encrypt,
2164 .decrypt = aead_decrypt,
2165 .ivsize = NULL_IV_SIZE,
2166 .maxauthsize = SHA512_DIGEST_SIZE,
2167 },
2168 .caam = {
2169 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2170 OP_ALG_AAI_HMAC_PRECOMP,
2171 },
2172 },
2173 {
2174 .aead = {
2175 .base = {
2176 .cra_name = "authenc(hmac(md5),cbc(aes))",
2177 .cra_driver_name = "authenc-hmac-md5-"
2178 "cbc-aes-caam",
2179 .cra_blocksize = AES_BLOCK_SIZE,
2180 },
2181 .setkey = aead_setkey,
2182 .setauthsize = aead_setauthsize,
2183 .encrypt = aead_encrypt,
2184 .decrypt = aead_decrypt,
2185 .ivsize = AES_BLOCK_SIZE,
2186 .maxauthsize = MD5_DIGEST_SIZE,
2187 },
2188 .caam = {
2189 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2190 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2191 OP_ALG_AAI_HMAC_PRECOMP,
2192 },
2193 },
2194 {
2195 .aead = {
2196 .base = {
2197 .cra_name = "echainiv(authenc(hmac(md5),"
2198 "cbc(aes)))",
2199 .cra_driver_name = "echainiv-authenc-hmac-md5-"
2200 "cbc-aes-caam",
2201 .cra_blocksize = AES_BLOCK_SIZE,
2202 },
2203 .setkey = aead_setkey,
2204 .setauthsize = aead_setauthsize,
2205 .encrypt = aead_encrypt,
2206 .decrypt = aead_decrypt,
2207 .ivsize = AES_BLOCK_SIZE,
2208 .maxauthsize = MD5_DIGEST_SIZE,
2209 },
2210 .caam = {
2211 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2212 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2213 OP_ALG_AAI_HMAC_PRECOMP,
2214 .geniv = true,
2215 },
2216 },
2217 {
2218 .aead = {
2219 .base = {
2220 .cra_name = "authenc(hmac(sha1),cbc(aes))",
2221 .cra_driver_name = "authenc-hmac-sha1-"
2222 "cbc-aes-caam",
2223 .cra_blocksize = AES_BLOCK_SIZE,
2224 },
2225 .setkey = aead_setkey,
2226 .setauthsize = aead_setauthsize,
2227 .encrypt = aead_encrypt,
2228 .decrypt = aead_decrypt,
2229 .ivsize = AES_BLOCK_SIZE,
2230 .maxauthsize = SHA1_DIGEST_SIZE,
2231 },
2232 .caam = {
2233 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2234 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2235 OP_ALG_AAI_HMAC_PRECOMP,
2236 },
2237 },
2238 {
2239 .aead = {
2240 .base = {
2241 .cra_name = "echainiv(authenc(hmac(sha1),"
2242 "cbc(aes)))",
2243 .cra_driver_name = "echainiv-authenc-"
2244 "hmac-sha1-cbc-aes-caam",
2245 .cra_blocksize = AES_BLOCK_SIZE,
2246 },
2247 .setkey = aead_setkey,
2248 .setauthsize = aead_setauthsize,
2249 .encrypt = aead_encrypt,
2250 .decrypt = aead_decrypt,
2251 .ivsize = AES_BLOCK_SIZE,
2252 .maxauthsize = SHA1_DIGEST_SIZE,
2253 },
2254 .caam = {
2255 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2256 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2257 OP_ALG_AAI_HMAC_PRECOMP,
2258 .geniv = true,
2259 },
2260 },
2261 {
2262 .aead = {
2263 .base = {
2264 .cra_name = "authenc(hmac(sha224),cbc(aes))",
2265 .cra_driver_name = "authenc-hmac-sha224-"
2266 "cbc-aes-caam",
2267 .cra_blocksize = AES_BLOCK_SIZE,
2268 },
2269 .setkey = aead_setkey,
2270 .setauthsize = aead_setauthsize,
2271 .encrypt = aead_encrypt,
2272 .decrypt = aead_decrypt,
2273 .ivsize = AES_BLOCK_SIZE,
2274 .maxauthsize = SHA224_DIGEST_SIZE,
2275 },
2276 .caam = {
2277 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2278 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2279 OP_ALG_AAI_HMAC_PRECOMP,
2280 },
2281 },
2282 {
2283 .aead = {
2284 .base = {
2285 .cra_name = "echainiv(authenc(hmac(sha224),"
2286 "cbc(aes)))",
2287 .cra_driver_name = "echainiv-authenc-"
2288 "hmac-sha224-cbc-aes-caam",
2289 .cra_blocksize = AES_BLOCK_SIZE,
2290 },
2291 .setkey = aead_setkey,
2292 .setauthsize = aead_setauthsize,
2293 .encrypt = aead_encrypt,
2294 .decrypt = aead_decrypt,
2295 .ivsize = AES_BLOCK_SIZE,
2296 .maxauthsize = SHA224_DIGEST_SIZE,
2297 },
2298 .caam = {
2299 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2300 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2301 OP_ALG_AAI_HMAC_PRECOMP,
2302 .geniv = true,
2303 },
2304 },
2305 {
2306 .aead = {
2307 .base = {
2308 .cra_name = "authenc(hmac(sha256),cbc(aes))",
2309 .cra_driver_name = "authenc-hmac-sha256-"
2310 "cbc-aes-caam",
2311 .cra_blocksize = AES_BLOCK_SIZE,
2312 },
2313 .setkey = aead_setkey,
2314 .setauthsize = aead_setauthsize,
2315 .encrypt = aead_encrypt,
2316 .decrypt = aead_decrypt,
2317 .ivsize = AES_BLOCK_SIZE,
2318 .maxauthsize = SHA256_DIGEST_SIZE,
2319 },
2320 .caam = {
2321 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2322 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2323 OP_ALG_AAI_HMAC_PRECOMP,
2324 },
2325 },
2326 {
2327 .aead = {
2328 .base = {
2329 .cra_name = "echainiv(authenc(hmac(sha256),"
2330 "cbc(aes)))",
2331 .cra_driver_name = "echainiv-authenc-"
2332 "hmac-sha256-cbc-aes-caam",
2333 .cra_blocksize = AES_BLOCK_SIZE,
2334 },
2335 .setkey = aead_setkey,
2336 .setauthsize = aead_setauthsize,
2337 .encrypt = aead_encrypt,
2338 .decrypt = aead_decrypt,
2339 .ivsize = AES_BLOCK_SIZE,
2340 .maxauthsize = SHA256_DIGEST_SIZE,
2341 },
2342 .caam = {
2343 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2344 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2345 OP_ALG_AAI_HMAC_PRECOMP,
2346 .geniv = true,
2347 },
2348 },
2349 {
2350 .aead = {
2351 .base = {
2352 .cra_name = "authenc(hmac(sha384),cbc(aes))",
2353 .cra_driver_name = "authenc-hmac-sha384-"
2354 "cbc-aes-caam",
2355 .cra_blocksize = AES_BLOCK_SIZE,
2356 },
2357 .setkey = aead_setkey,
2358 .setauthsize = aead_setauthsize,
2359 .encrypt = aead_encrypt,
2360 .decrypt = aead_decrypt,
2361 .ivsize = AES_BLOCK_SIZE,
2362 .maxauthsize = SHA384_DIGEST_SIZE,
2363 },
2364 .caam = {
2365 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2366 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2367 OP_ALG_AAI_HMAC_PRECOMP,
2368 },
2369 },
2370 {
2371 .aead = {
2372 .base = {
2373 .cra_name = "echainiv(authenc(hmac(sha384),"
2374 "cbc(aes)))",
2375 .cra_driver_name = "echainiv-authenc-"
2376 "hmac-sha384-cbc-aes-caam",
2377 .cra_blocksize = AES_BLOCK_SIZE,
2378 },
2379 .setkey = aead_setkey,
2380 .setauthsize = aead_setauthsize,
2381 .encrypt = aead_encrypt,
2382 .decrypt = aead_decrypt,
2383 .ivsize = AES_BLOCK_SIZE,
2384 .maxauthsize = SHA384_DIGEST_SIZE,
2385 },
2386 .caam = {
2387 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2388 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2389 OP_ALG_AAI_HMAC_PRECOMP,
2390 .geniv = true,
2391 },
2392 },
2393 {
2394 .aead = {
2395 .base = {
2396 .cra_name = "authenc(hmac(sha512),cbc(aes))",
2397 .cra_driver_name = "authenc-hmac-sha512-"
2398 "cbc-aes-caam",
2399 .cra_blocksize = AES_BLOCK_SIZE,
2400 },
2401 .setkey = aead_setkey,
2402 .setauthsize = aead_setauthsize,
2403 .encrypt = aead_encrypt,
2404 .decrypt = aead_decrypt,
2405 .ivsize = AES_BLOCK_SIZE,
2406 .maxauthsize = SHA512_DIGEST_SIZE,
2407 },
2408 .caam = {
2409 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2410 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2411 OP_ALG_AAI_HMAC_PRECOMP,
2412 },
2413 },
2414 {
2415 .aead = {
2416 .base = {
2417 .cra_name = "echainiv(authenc(hmac(sha512),"
2418 "cbc(aes)))",
2419 .cra_driver_name = "echainiv-authenc-"
2420 "hmac-sha512-cbc-aes-caam",
2421 .cra_blocksize = AES_BLOCK_SIZE,
2422 },
2423 .setkey = aead_setkey,
2424 .setauthsize = aead_setauthsize,
2425 .encrypt = aead_encrypt,
2426 .decrypt = aead_decrypt,
2427 .ivsize = AES_BLOCK_SIZE,
2428 .maxauthsize = SHA512_DIGEST_SIZE,
2429 },
2430 .caam = {
2431 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2432 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2433 OP_ALG_AAI_HMAC_PRECOMP,
2434 .geniv = true,
2435 },
2436 },
2437 {
2438 .aead = {
2439 .base = {
2440 .cra_name = "authenc(hmac(md5),cbc(des3_ede))",
2441 .cra_driver_name = "authenc-hmac-md5-"
2442 "cbc-des3_ede-caam",
2443 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2444 },
2445 .setkey = des3_aead_setkey,
2446 .setauthsize = aead_setauthsize,
2447 .encrypt = aead_encrypt,
2448 .decrypt = aead_decrypt,
2449 .ivsize = DES3_EDE_BLOCK_SIZE,
2450 .maxauthsize = MD5_DIGEST_SIZE,
2451 },
2452 .caam = {
2453 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2454 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2455 OP_ALG_AAI_HMAC_PRECOMP,
2456 }
2457 },
2458 {
2459 .aead = {
2460 .base = {
2461 .cra_name = "echainiv(authenc(hmac(md5),"
2462 "cbc(des3_ede)))",
2463 .cra_driver_name = "echainiv-authenc-hmac-md5-"
2464 "cbc-des3_ede-caam",
2465 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2466 },
2467 .setkey = des3_aead_setkey,
2468 .setauthsize = aead_setauthsize,
2469 .encrypt = aead_encrypt,
2470 .decrypt = aead_decrypt,
2471 .ivsize = DES3_EDE_BLOCK_SIZE,
2472 .maxauthsize = MD5_DIGEST_SIZE,
2473 },
2474 .caam = {
2475 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2476 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2477 OP_ALG_AAI_HMAC_PRECOMP,
2478 .geniv = true,
2479 }
2480 },
2481 {
2482 .aead = {
2483 .base = {
2484 .cra_name = "authenc(hmac(sha1),"
2485 "cbc(des3_ede))",
2486 .cra_driver_name = "authenc-hmac-sha1-"
2487 "cbc-des3_ede-caam",
2488 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2489 },
2490 .setkey = des3_aead_setkey,
2491 .setauthsize = aead_setauthsize,
2492 .encrypt = aead_encrypt,
2493 .decrypt = aead_decrypt,
2494 .ivsize = DES3_EDE_BLOCK_SIZE,
2495 .maxauthsize = SHA1_DIGEST_SIZE,
2496 },
2497 .caam = {
2498 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2499 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2500 OP_ALG_AAI_HMAC_PRECOMP,
2501 },
2502 },
2503 {
2504 .aead = {
2505 .base = {
2506 .cra_name = "echainiv(authenc(hmac(sha1),"
2507 "cbc(des3_ede)))",
2508 .cra_driver_name = "echainiv-authenc-"
2509 "hmac-sha1-"
2510 "cbc-des3_ede-caam",
2511 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2512 },
2513 .setkey = des3_aead_setkey,
2514 .setauthsize = aead_setauthsize,
2515 .encrypt = aead_encrypt,
2516 .decrypt = aead_decrypt,
2517 .ivsize = DES3_EDE_BLOCK_SIZE,
2518 .maxauthsize = SHA1_DIGEST_SIZE,
2519 },
2520 .caam = {
2521 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2522 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2523 OP_ALG_AAI_HMAC_PRECOMP,
2524 .geniv = true,
2525 },
2526 },
2527 {
2528 .aead = {
2529 .base = {
2530 .cra_name = "authenc(hmac(sha224),"
2531 "cbc(des3_ede))",
2532 .cra_driver_name = "authenc-hmac-sha224-"
2533 "cbc-des3_ede-caam",
2534 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2535 },
2536 .setkey = des3_aead_setkey,
2537 .setauthsize = aead_setauthsize,
2538 .encrypt = aead_encrypt,
2539 .decrypt = aead_decrypt,
2540 .ivsize = DES3_EDE_BLOCK_SIZE,
2541 .maxauthsize = SHA224_DIGEST_SIZE,
2542 },
2543 .caam = {
2544 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2545 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2546 OP_ALG_AAI_HMAC_PRECOMP,
2547 },
2548 },
2549 {
2550 .aead = {
2551 .base = {
2552 .cra_name = "echainiv(authenc(hmac(sha224),"
2553 "cbc(des3_ede)))",
2554 .cra_driver_name = "echainiv-authenc-"
2555 "hmac-sha224-"
2556 "cbc-des3_ede-caam",
2557 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2558 },
2559 .setkey = des3_aead_setkey,
2560 .setauthsize = aead_setauthsize,
2561 .encrypt = aead_encrypt,
2562 .decrypt = aead_decrypt,
2563 .ivsize = DES3_EDE_BLOCK_SIZE,
2564 .maxauthsize = SHA224_DIGEST_SIZE,
2565 },
2566 .caam = {
2567 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2568 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2569 OP_ALG_AAI_HMAC_PRECOMP,
2570 .geniv = true,
2571 },
2572 },
2573 {
2574 .aead = {
2575 .base = {
2576 .cra_name = "authenc(hmac(sha256),"
2577 "cbc(des3_ede))",
2578 .cra_driver_name = "authenc-hmac-sha256-"
2579 "cbc-des3_ede-caam",
2580 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2581 },
2582 .setkey = des3_aead_setkey,
2583 .setauthsize = aead_setauthsize,
2584 .encrypt = aead_encrypt,
2585 .decrypt = aead_decrypt,
2586 .ivsize = DES3_EDE_BLOCK_SIZE,
2587 .maxauthsize = SHA256_DIGEST_SIZE,
2588 },
2589 .caam = {
2590 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2591 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2592 OP_ALG_AAI_HMAC_PRECOMP,
2593 },
2594 },
2595 {
2596 .aead = {
2597 .base = {
2598 .cra_name = "echainiv(authenc(hmac(sha256),"
2599 "cbc(des3_ede)))",
2600 .cra_driver_name = "echainiv-authenc-"
2601 "hmac-sha256-"
2602 "cbc-des3_ede-caam",
2603 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2604 },
2605 .setkey = des3_aead_setkey,
2606 .setauthsize = aead_setauthsize,
2607 .encrypt = aead_encrypt,
2608 .decrypt = aead_decrypt,
2609 .ivsize = DES3_EDE_BLOCK_SIZE,
2610 .maxauthsize = SHA256_DIGEST_SIZE,
2611 },
2612 .caam = {
2613 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2614 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2615 OP_ALG_AAI_HMAC_PRECOMP,
2616 .geniv = true,
2617 },
2618 },
2619 {
2620 .aead = {
2621 .base = {
2622 .cra_name = "authenc(hmac(sha384),"
2623 "cbc(des3_ede))",
2624 .cra_driver_name = "authenc-hmac-sha384-"
2625 "cbc-des3_ede-caam",
2626 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2627 },
2628 .setkey = des3_aead_setkey,
2629 .setauthsize = aead_setauthsize,
2630 .encrypt = aead_encrypt,
2631 .decrypt = aead_decrypt,
2632 .ivsize = DES3_EDE_BLOCK_SIZE,
2633 .maxauthsize = SHA384_DIGEST_SIZE,
2634 },
2635 .caam = {
2636 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2637 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2638 OP_ALG_AAI_HMAC_PRECOMP,
2639 },
2640 },
2641 {
2642 .aead = {
2643 .base = {
2644 .cra_name = "echainiv(authenc(hmac(sha384),"
2645 "cbc(des3_ede)))",
2646 .cra_driver_name = "echainiv-authenc-"
2647 "hmac-sha384-"
2648 "cbc-des3_ede-caam",
2649 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2650 },
2651 .setkey = des3_aead_setkey,
2652 .setauthsize = aead_setauthsize,
2653 .encrypt = aead_encrypt,
2654 .decrypt = aead_decrypt,
2655 .ivsize = DES3_EDE_BLOCK_SIZE,
2656 .maxauthsize = SHA384_DIGEST_SIZE,
2657 },
2658 .caam = {
2659 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2660 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2661 OP_ALG_AAI_HMAC_PRECOMP,
2662 .geniv = true,
2663 },
2664 },
2665 {
2666 .aead = {
2667 .base = {
2668 .cra_name = "authenc(hmac(sha512),"
2669 "cbc(des3_ede))",
2670 .cra_driver_name = "authenc-hmac-sha512-"
2671 "cbc-des3_ede-caam",
2672 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2673 },
2674 .setkey = des3_aead_setkey,
2675 .setauthsize = aead_setauthsize,
2676 .encrypt = aead_encrypt,
2677 .decrypt = aead_decrypt,
2678 .ivsize = DES3_EDE_BLOCK_SIZE,
2679 .maxauthsize = SHA512_DIGEST_SIZE,
2680 },
2681 .caam = {
2682 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2683 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2684 OP_ALG_AAI_HMAC_PRECOMP,
2685 },
2686 },
2687 {
2688 .aead = {
2689 .base = {
2690 .cra_name = "echainiv(authenc(hmac(sha512),"
2691 "cbc(des3_ede)))",
2692 .cra_driver_name = "echainiv-authenc-"
2693 "hmac-sha512-"
2694 "cbc-des3_ede-caam",
2695 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2696 },
2697 .setkey = des3_aead_setkey,
2698 .setauthsize = aead_setauthsize,
2699 .encrypt = aead_encrypt,
2700 .decrypt = aead_decrypt,
2701 .ivsize = DES3_EDE_BLOCK_SIZE,
2702 .maxauthsize = SHA512_DIGEST_SIZE,
2703 },
2704 .caam = {
2705 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2706 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2707 OP_ALG_AAI_HMAC_PRECOMP,
2708 .geniv = true,
2709 },
2710 },
2711 {
2712 .aead = {
2713 .base = {
2714 .cra_name = "authenc(hmac(md5),cbc(des))",
2715 .cra_driver_name = "authenc-hmac-md5-"
2716 "cbc-des-caam",
2717 .cra_blocksize = DES_BLOCK_SIZE,
2718 },
2719 .setkey = aead_setkey,
2720 .setauthsize = aead_setauthsize,
2721 .encrypt = aead_encrypt,
2722 .decrypt = aead_decrypt,
2723 .ivsize = DES_BLOCK_SIZE,
2724 .maxauthsize = MD5_DIGEST_SIZE,
2725 },
2726 .caam = {
2727 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2728 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2729 OP_ALG_AAI_HMAC_PRECOMP,
2730 },
2731 },
2732 {
2733 .aead = {
2734 .base = {
2735 .cra_name = "echainiv(authenc(hmac(md5),"
2736 "cbc(des)))",
2737 .cra_driver_name = "echainiv-authenc-hmac-md5-"
2738 "cbc-des-caam",
2739 .cra_blocksize = DES_BLOCK_SIZE,
2740 },
2741 .setkey = aead_setkey,
2742 .setauthsize = aead_setauthsize,
2743 .encrypt = aead_encrypt,
2744 .decrypt = aead_decrypt,
2745 .ivsize = DES_BLOCK_SIZE,
2746 .maxauthsize = MD5_DIGEST_SIZE,
2747 },
2748 .caam = {
2749 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2750 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2751 OP_ALG_AAI_HMAC_PRECOMP,
2752 .geniv = true,
2753 },
2754 },
2755 {
2756 .aead = {
2757 .base = {
2758 .cra_name = "authenc(hmac(sha1),cbc(des))",
2759 .cra_driver_name = "authenc-hmac-sha1-"
2760 "cbc-des-caam",
2761 .cra_blocksize = DES_BLOCK_SIZE,
2762 },
2763 .setkey = aead_setkey,
2764 .setauthsize = aead_setauthsize,
2765 .encrypt = aead_encrypt,
2766 .decrypt = aead_decrypt,
2767 .ivsize = DES_BLOCK_SIZE,
2768 .maxauthsize = SHA1_DIGEST_SIZE,
2769 },
2770 .caam = {
2771 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2772 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2773 OP_ALG_AAI_HMAC_PRECOMP,
2774 },
2775 },
2776 {
2777 .aead = {
2778 .base = {
2779 .cra_name = "echainiv(authenc(hmac(sha1),"
2780 "cbc(des)))",
2781 .cra_driver_name = "echainiv-authenc-"
2782 "hmac-sha1-cbc-des-caam",
2783 .cra_blocksize = DES_BLOCK_SIZE,
2784 },
2785 .setkey = aead_setkey,
2786 .setauthsize = aead_setauthsize,
2787 .encrypt = aead_encrypt,
2788 .decrypt = aead_decrypt,
2789 .ivsize = DES_BLOCK_SIZE,
2790 .maxauthsize = SHA1_DIGEST_SIZE,
2791 },
2792 .caam = {
2793 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2794 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2795 OP_ALG_AAI_HMAC_PRECOMP,
2796 .geniv = true,
2797 },
2798 },
2799 {
2800 .aead = {
2801 .base = {
2802 .cra_name = "authenc(hmac(sha224),cbc(des))",
2803 .cra_driver_name = "authenc-hmac-sha224-"
2804 "cbc-des-caam",
2805 .cra_blocksize = DES_BLOCK_SIZE,
2806 },
2807 .setkey = aead_setkey,
2808 .setauthsize = aead_setauthsize,
2809 .encrypt = aead_encrypt,
2810 .decrypt = aead_decrypt,
2811 .ivsize = DES_BLOCK_SIZE,
2812 .maxauthsize = SHA224_DIGEST_SIZE,
2813 },
2814 .caam = {
2815 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2816 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2817 OP_ALG_AAI_HMAC_PRECOMP,
2818 },
2819 },
2820 {
2821 .aead = {
2822 .base = {
2823 .cra_name = "echainiv(authenc(hmac(sha224),"
2824 "cbc(des)))",
2825 .cra_driver_name = "echainiv-authenc-"
2826 "hmac-sha224-cbc-des-caam",
2827 .cra_blocksize = DES_BLOCK_SIZE,
2828 },
2829 .setkey = aead_setkey,
2830 .setauthsize = aead_setauthsize,
2831 .encrypt = aead_encrypt,
2832 .decrypt = aead_decrypt,
2833 .ivsize = DES_BLOCK_SIZE,
2834 .maxauthsize = SHA224_DIGEST_SIZE,
2835 },
2836 .caam = {
2837 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2838 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2839 OP_ALG_AAI_HMAC_PRECOMP,
2840 .geniv = true,
2841 },
2842 },
2843 {
2844 .aead = {
2845 .base = {
2846 .cra_name = "authenc(hmac(sha256),cbc(des))",
2847 .cra_driver_name = "authenc-hmac-sha256-"
2848 "cbc-des-caam",
2849 .cra_blocksize = DES_BLOCK_SIZE,
2850 },
2851 .setkey = aead_setkey,
2852 .setauthsize = aead_setauthsize,
2853 .encrypt = aead_encrypt,
2854 .decrypt = aead_decrypt,
2855 .ivsize = DES_BLOCK_SIZE,
2856 .maxauthsize = SHA256_DIGEST_SIZE,
2857 },
2858 .caam = {
2859 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2860 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2861 OP_ALG_AAI_HMAC_PRECOMP,
2862 },
2863 },
2864 {
2865 .aead = {
2866 .base = {
2867 .cra_name = "echainiv(authenc(hmac(sha256),"
2868 "cbc(des)))",
2869 .cra_driver_name = "echainiv-authenc-"
2870 "hmac-sha256-cbc-des-caam",
2871 .cra_blocksize = DES_BLOCK_SIZE,
2872 },
2873 .setkey = aead_setkey,
2874 .setauthsize = aead_setauthsize,
2875 .encrypt = aead_encrypt,
2876 .decrypt = aead_decrypt,
2877 .ivsize = DES_BLOCK_SIZE,
2878 .maxauthsize = SHA256_DIGEST_SIZE,
2879 },
2880 .caam = {
2881 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2882 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2883 OP_ALG_AAI_HMAC_PRECOMP,
2884 .geniv = true,
2885 },
2886 },
2887 {
2888 .aead = {
2889 .base = {
2890 .cra_name = "authenc(hmac(sha384),cbc(des))",
2891 .cra_driver_name = "authenc-hmac-sha384-"
2892 "cbc-des-caam",
2893 .cra_blocksize = DES_BLOCK_SIZE,
2894 },
2895 .setkey = aead_setkey,
2896 .setauthsize = aead_setauthsize,
2897 .encrypt = aead_encrypt,
2898 .decrypt = aead_decrypt,
2899 .ivsize = DES_BLOCK_SIZE,
2900 .maxauthsize = SHA384_DIGEST_SIZE,
2901 },
2902 .caam = {
2903 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2904 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2905 OP_ALG_AAI_HMAC_PRECOMP,
2906 },
2907 },
2908 {
2909 .aead = {
2910 .base = {
2911 .cra_name = "echainiv(authenc(hmac(sha384),"
2912 "cbc(des)))",
2913 .cra_driver_name = "echainiv-authenc-"
2914 "hmac-sha384-cbc-des-caam",
2915 .cra_blocksize = DES_BLOCK_SIZE,
2916 },
2917 .setkey = aead_setkey,
2918 .setauthsize = aead_setauthsize,
2919 .encrypt = aead_encrypt,
2920 .decrypt = aead_decrypt,
2921 .ivsize = DES_BLOCK_SIZE,
2922 .maxauthsize = SHA384_DIGEST_SIZE,
2923 },
2924 .caam = {
2925 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2926 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2927 OP_ALG_AAI_HMAC_PRECOMP,
2928 .geniv = true,
2929 },
2930 },
2931 {
2932 .aead = {
2933 .base = {
2934 .cra_name = "authenc(hmac(sha512),cbc(des))",
2935 .cra_driver_name = "authenc-hmac-sha512-"
2936 "cbc-des-caam",
2937 .cra_blocksize = DES_BLOCK_SIZE,
2938 },
2939 .setkey = aead_setkey,
2940 .setauthsize = aead_setauthsize,
2941 .encrypt = aead_encrypt,
2942 .decrypt = aead_decrypt,
2943 .ivsize = DES_BLOCK_SIZE,
2944 .maxauthsize = SHA512_DIGEST_SIZE,
2945 },
2946 .caam = {
2947 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2948 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2949 OP_ALG_AAI_HMAC_PRECOMP,
2950 },
2951 },
2952 {
2953 .aead = {
2954 .base = {
2955 .cra_name = "echainiv(authenc(hmac(sha512),"
2956 "cbc(des)))",
2957 .cra_driver_name = "echainiv-authenc-"
2958 "hmac-sha512-cbc-des-caam",
2959 .cra_blocksize = DES_BLOCK_SIZE,
2960 },
2961 .setkey = aead_setkey,
2962 .setauthsize = aead_setauthsize,
2963 .encrypt = aead_encrypt,
2964 .decrypt = aead_decrypt,
2965 .ivsize = DES_BLOCK_SIZE,
2966 .maxauthsize = SHA512_DIGEST_SIZE,
2967 },
2968 .caam = {
2969 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2970 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2971 OP_ALG_AAI_HMAC_PRECOMP,
2972 .geniv = true,
2973 },
2974 },
2975 {
2976 .aead = {
2977 .base = {
2978 .cra_name = "authenc(hmac(md5),"
2979 "rfc3686(ctr(aes)))",
2980 .cra_driver_name = "authenc-hmac-md5-"
2981 "rfc3686-ctr-aes-caam",
2982 .cra_blocksize = 1,
2983 },
2984 .setkey = aead_setkey,
2985 .setauthsize = aead_setauthsize,
2986 .encrypt = aead_encrypt,
2987 .decrypt = aead_decrypt,
2988 .ivsize = CTR_RFC3686_IV_SIZE,
2989 .maxauthsize = MD5_DIGEST_SIZE,
2990 },
2991 .caam = {
2992 .class1_alg_type = OP_ALG_ALGSEL_AES |
2993 OP_ALG_AAI_CTR_MOD128,
2994 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2995 OP_ALG_AAI_HMAC_PRECOMP,
2996 .rfc3686 = true,
2997 },
2998 },
2999 {
3000 .aead = {
3001 .base = {
3002 .cra_name = "seqiv(authenc("
3003 "hmac(md5),rfc3686(ctr(aes))))",
3004 .cra_driver_name = "seqiv-authenc-hmac-md5-"
3005 "rfc3686-ctr-aes-caam",
3006 .cra_blocksize = 1,
3007 },
3008 .setkey = aead_setkey,
3009 .setauthsize = aead_setauthsize,
3010 .encrypt = aead_encrypt,
3011 .decrypt = aead_decrypt,
3012 .ivsize = CTR_RFC3686_IV_SIZE,
3013 .maxauthsize = MD5_DIGEST_SIZE,
3014 },
3015 .caam = {
3016 .class1_alg_type = OP_ALG_ALGSEL_AES |
3017 OP_ALG_AAI_CTR_MOD128,
3018 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
3019 OP_ALG_AAI_HMAC_PRECOMP,
3020 .rfc3686 = true,
3021 .geniv = true,
3022 },
3023 },
3024 {
3025 .aead = {
3026 .base = {
3027 .cra_name = "authenc(hmac(sha1),"
3028 "rfc3686(ctr(aes)))",
3029 .cra_driver_name = "authenc-hmac-sha1-"
3030 "rfc3686-ctr-aes-caam",
3031 .cra_blocksize = 1,
3032 },
3033 .setkey = aead_setkey,
3034 .setauthsize = aead_setauthsize,
3035 .encrypt = aead_encrypt,
3036 .decrypt = aead_decrypt,
3037 .ivsize = CTR_RFC3686_IV_SIZE,
3038 .maxauthsize = SHA1_DIGEST_SIZE,
3039 },
3040 .caam = {
3041 .class1_alg_type = OP_ALG_ALGSEL_AES |
3042 OP_ALG_AAI_CTR_MOD128,
3043 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
3044 OP_ALG_AAI_HMAC_PRECOMP,
3045 .rfc3686 = true,
3046 },
3047 },
3048 {
3049 .aead = {
3050 .base = {
3051 .cra_name = "seqiv(authenc("
3052 "hmac(sha1),rfc3686(ctr(aes))))",
3053 .cra_driver_name = "seqiv-authenc-hmac-sha1-"
3054 "rfc3686-ctr-aes-caam",
3055 .cra_blocksize = 1,
3056 },
3057 .setkey = aead_setkey,
3058 .setauthsize = aead_setauthsize,
3059 .encrypt = aead_encrypt,
3060 .decrypt = aead_decrypt,
3061 .ivsize = CTR_RFC3686_IV_SIZE,
3062 .maxauthsize = SHA1_DIGEST_SIZE,
3063 },
3064 .caam = {
3065 .class1_alg_type = OP_ALG_ALGSEL_AES |
3066 OP_ALG_AAI_CTR_MOD128,
3067 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
3068 OP_ALG_AAI_HMAC_PRECOMP,
3069 .rfc3686 = true,
3070 .geniv = true,
3071 },
3072 },
3073 {
3074 .aead = {
3075 .base = {
3076 .cra_name = "authenc(hmac(sha224),"
3077 "rfc3686(ctr(aes)))",
3078 .cra_driver_name = "authenc-hmac-sha224-"
3079 "rfc3686-ctr-aes-caam",
3080 .cra_blocksize = 1,
3081 },
3082 .setkey = aead_setkey,
3083 .setauthsize = aead_setauthsize,
3084 .encrypt = aead_encrypt,
3085 .decrypt = aead_decrypt,
3086 .ivsize = CTR_RFC3686_IV_SIZE,
3087 .maxauthsize = SHA224_DIGEST_SIZE,
3088 },
3089 .caam = {
3090 .class1_alg_type = OP_ALG_ALGSEL_AES |
3091 OP_ALG_AAI_CTR_MOD128,
3092 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
3093 OP_ALG_AAI_HMAC_PRECOMP,
3094 .rfc3686 = true,
3095 },
3096 },
3097 {
3098 .aead = {
3099 .base = {
3100 .cra_name = "seqiv(authenc("
3101 "hmac(sha224),rfc3686(ctr(aes))))",
3102 .cra_driver_name = "seqiv-authenc-hmac-sha224-"
3103 "rfc3686-ctr-aes-caam",
3104 .cra_blocksize = 1,
3105 },
3106 .setkey = aead_setkey,
3107 .setauthsize = aead_setauthsize,
3108 .encrypt = aead_encrypt,
3109 .decrypt = aead_decrypt,
3110 .ivsize = CTR_RFC3686_IV_SIZE,
3111 .maxauthsize = SHA224_DIGEST_SIZE,
3112 },
3113 .caam = {
3114 .class1_alg_type = OP_ALG_ALGSEL_AES |
3115 OP_ALG_AAI_CTR_MOD128,
3116 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
3117 OP_ALG_AAI_HMAC_PRECOMP,
3118 .rfc3686 = true,
3119 .geniv = true,
3120 },
3121 },
3122 {
3123 .aead = {
3124 .base = {
3125 .cra_name = "authenc(hmac(sha256),"
3126 "rfc3686(ctr(aes)))",
3127 .cra_driver_name = "authenc-hmac-sha256-"
3128 "rfc3686-ctr-aes-caam",
3129 .cra_blocksize = 1,
3130 },
3131 .setkey = aead_setkey,
3132 .setauthsize = aead_setauthsize,
3133 .encrypt = aead_encrypt,
3134 .decrypt = aead_decrypt,
3135 .ivsize = CTR_RFC3686_IV_SIZE,
3136 .maxauthsize = SHA256_DIGEST_SIZE,
3137 },
3138 .caam = {
3139 .class1_alg_type = OP_ALG_ALGSEL_AES |
3140 OP_ALG_AAI_CTR_MOD128,
3141 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
3142 OP_ALG_AAI_HMAC_PRECOMP,
3143 .rfc3686 = true,
3144 },
3145 },
3146 {
3147 .aead = {
3148 .base = {
3149 .cra_name = "seqiv(authenc(hmac(sha256),"
3150 "rfc3686(ctr(aes))))",
3151 .cra_driver_name = "seqiv-authenc-hmac-sha256-"
3152 "rfc3686-ctr-aes-caam",
3153 .cra_blocksize = 1,
3154 },
3155 .setkey = aead_setkey,
3156 .setauthsize = aead_setauthsize,
3157 .encrypt = aead_encrypt,
3158 .decrypt = aead_decrypt,
3159 .ivsize = CTR_RFC3686_IV_SIZE,
3160 .maxauthsize = SHA256_DIGEST_SIZE,
3161 },
3162 .caam = {
3163 .class1_alg_type = OP_ALG_ALGSEL_AES |
3164 OP_ALG_AAI_CTR_MOD128,
3165 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
3166 OP_ALG_AAI_HMAC_PRECOMP,
3167 .rfc3686 = true,
3168 .geniv = true,
3169 },
3170 },
3171 {
3172 .aead = {
3173 .base = {
3174 .cra_name = "authenc(hmac(sha384),"
3175 "rfc3686(ctr(aes)))",
3176 .cra_driver_name = "authenc-hmac-sha384-"
3177 "rfc3686-ctr-aes-caam",
3178 .cra_blocksize = 1,
3179 },
3180 .setkey = aead_setkey,
3181 .setauthsize = aead_setauthsize,
3182 .encrypt = aead_encrypt,
3183 .decrypt = aead_decrypt,
3184 .ivsize = CTR_RFC3686_IV_SIZE,
3185 .maxauthsize = SHA384_DIGEST_SIZE,
3186 },
3187 .caam = {
3188 .class1_alg_type = OP_ALG_ALGSEL_AES |
3189 OP_ALG_AAI_CTR_MOD128,
3190 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
3191 OP_ALG_AAI_HMAC_PRECOMP,
3192 .rfc3686 = true,
3193 },
3194 },
3195 {
3196 .aead = {
3197 .base = {
3198 .cra_name = "seqiv(authenc(hmac(sha384),"
3199 "rfc3686(ctr(aes))))",
3200 .cra_driver_name = "seqiv-authenc-hmac-sha384-"
3201 "rfc3686-ctr-aes-caam",
3202 .cra_blocksize = 1,
3203 },
3204 .setkey = aead_setkey,
3205 .setauthsize = aead_setauthsize,
3206 .encrypt = aead_encrypt,
3207 .decrypt = aead_decrypt,
3208 .ivsize = CTR_RFC3686_IV_SIZE,
3209 .maxauthsize = SHA384_DIGEST_SIZE,
3210 },
3211 .caam = {
3212 .class1_alg_type = OP_ALG_ALGSEL_AES |
3213 OP_ALG_AAI_CTR_MOD128,
3214 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
3215 OP_ALG_AAI_HMAC_PRECOMP,
3216 .rfc3686 = true,
3217 .geniv = true,
3218 },
3219 },
3220 {
3221 .aead = {
3222 .base = {
3223 .cra_name = "authenc(hmac(sha512),"
3224 "rfc3686(ctr(aes)))",
3225 .cra_driver_name = "authenc-hmac-sha512-"
3226 "rfc3686-ctr-aes-caam",
3227 .cra_blocksize = 1,
3228 },
3229 .setkey = aead_setkey,
3230 .setauthsize = aead_setauthsize,
3231 .encrypt = aead_encrypt,
3232 .decrypt = aead_decrypt,
3233 .ivsize = CTR_RFC3686_IV_SIZE,
3234 .maxauthsize = SHA512_DIGEST_SIZE,
3235 },
3236 .caam = {
3237 .class1_alg_type = OP_ALG_ALGSEL_AES |
3238 OP_ALG_AAI_CTR_MOD128,
3239 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
3240 OP_ALG_AAI_HMAC_PRECOMP,
3241 .rfc3686 = true,
3242 },
3243 },
3244 {
3245 .aead = {
3246 .base = {
3247 .cra_name = "seqiv(authenc(hmac(sha512),"
3248 "rfc3686(ctr(aes))))",
3249 .cra_driver_name = "seqiv-authenc-hmac-sha512-"
3250 "rfc3686-ctr-aes-caam",
3251 .cra_blocksize = 1,
3252 },
3253 .setkey = aead_setkey,
3254 .setauthsize = aead_setauthsize,
3255 .encrypt = aead_encrypt,
3256 .decrypt = aead_decrypt,
3257 .ivsize = CTR_RFC3686_IV_SIZE,
3258 .maxauthsize = SHA512_DIGEST_SIZE,
3259 },
3260 .caam = {
3261 .class1_alg_type = OP_ALG_ALGSEL_AES |
3262 OP_ALG_AAI_CTR_MOD128,
3263 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
3264 OP_ALG_AAI_HMAC_PRECOMP,
3265 .rfc3686 = true,
3266 .geniv = true,
3267 },
3268 },
3269 {
3270 .aead = {
3271 .base = {
3272 .cra_name = "rfc7539(chacha20,poly1305)",
3273 .cra_driver_name = "rfc7539-chacha20-poly1305-"
3274 "caam",
3275 .cra_blocksize = 1,
3276 },
3277 .setkey = chachapoly_setkey,
3278 .setauthsize = chachapoly_setauthsize,
3279 .encrypt = chachapoly_encrypt,
3280 .decrypt = chachapoly_decrypt,
3281 .ivsize = CHACHAPOLY_IV_SIZE,
3282 .maxauthsize = POLY1305_DIGEST_SIZE,
3283 },
3284 .caam = {
3285 .class1_alg_type = OP_ALG_ALGSEL_CHACHA20 |
3286 OP_ALG_AAI_AEAD,
3287 .class2_alg_type = OP_ALG_ALGSEL_POLY1305 |
3288 OP_ALG_AAI_AEAD,
3289 .nodkp = true,
3290 },
3291 },
3292 {
3293 .aead = {
3294 .base = {
3295 .cra_name = "rfc7539esp(chacha20,poly1305)",
3296 .cra_driver_name = "rfc7539esp-chacha20-"
3297 "poly1305-caam",
3298 .cra_blocksize = 1,
3299 },
3300 .setkey = chachapoly_setkey,
3301 .setauthsize = chachapoly_setauthsize,
3302 .encrypt = chachapoly_encrypt,
3303 .decrypt = chachapoly_decrypt,
3304 .ivsize = 8,
3305 .maxauthsize = POLY1305_DIGEST_SIZE,
3306 },
3307 .caam = {
3308 .class1_alg_type = OP_ALG_ALGSEL_CHACHA20 |
3309 OP_ALG_AAI_AEAD,
3310 .class2_alg_type = OP_ALG_ALGSEL_POLY1305 |
3311 OP_ALG_AAI_AEAD,
3312 .nodkp = true,
3313 },
3314 },
3315};
3316
3317static int caam_init_common(struct caam_ctx *ctx, struct caam_alg_entry *caam,
3318 bool uses_dkp)
3319{
3320 dma_addr_t dma_addr;
3321 struct caam_drv_private *priv;
3322 const size_t sh_desc_enc_offset = offsetof(struct caam_ctx,
3323 sh_desc_enc);
3324
3325 ctx->jrdev = caam_jr_alloc();
3326 if (IS_ERR(ctx->jrdev)) {
3327 pr_err("Job Ring Device allocation for transform failed\n");
3328 return PTR_ERR(ctx->jrdev);
3329 }
3330
3331 priv = dev_get_drvdata(ctx->jrdev->parent);
3332 if (priv->era >= 6 && uses_dkp)
3333 ctx->dir = DMA_BIDIRECTIONAL;
3334 else
3335 ctx->dir = DMA_TO_DEVICE;
3336
3337 dma_addr = dma_map_single_attrs(ctx->jrdev, ctx->sh_desc_enc,
3338 offsetof(struct caam_ctx,
3339 sh_desc_enc_dma) -
3340 sh_desc_enc_offset,
3341 ctx->dir, DMA_ATTR_SKIP_CPU_SYNC);
3342 if (dma_mapping_error(ctx->jrdev, dma_addr)) {
3343 dev_err(ctx->jrdev, "unable to map key, shared descriptors\n");
3344 caam_jr_free(ctx->jrdev);
3345 return -ENOMEM;
3346 }
3347
3348 ctx->sh_desc_enc_dma = dma_addr;
3349 ctx->sh_desc_dec_dma = dma_addr + offsetof(struct caam_ctx,
3350 sh_desc_dec) -
3351 sh_desc_enc_offset;
3352 ctx->key_dma = dma_addr + offsetof(struct caam_ctx, key) -
3353 sh_desc_enc_offset;
3354
3355
3356 ctx->cdata.algtype = OP_TYPE_CLASS1_ALG | caam->class1_alg_type;
3357 ctx->adata.algtype = OP_TYPE_CLASS2_ALG | caam->class2_alg_type;
3358
3359 return 0;
3360}
3361
3362static int caam_cra_init(struct crypto_skcipher *tfm)
3363{
3364 struct skcipher_alg *alg = crypto_skcipher_alg(tfm);
3365 struct caam_skcipher_alg *caam_alg =
3366 container_of(alg, typeof(*caam_alg), skcipher);
3367 struct caam_ctx *ctx = crypto_skcipher_ctx(tfm);
3368
3369 crypto_skcipher_set_reqsize(tfm, sizeof(struct caam_skcipher_req_ctx));
3370
3371 ctx->enginectx.op.do_one_request = skcipher_do_one_req;
3372
3373 return caam_init_common(crypto_skcipher_ctx(tfm), &caam_alg->caam,
3374 false);
3375}
3376
3377static int caam_aead_init(struct crypto_aead *tfm)
3378{
3379 struct aead_alg *alg = crypto_aead_alg(tfm);
3380 struct caam_aead_alg *caam_alg =
3381 container_of(alg, struct caam_aead_alg, aead);
3382 struct caam_ctx *ctx = crypto_aead_ctx(tfm);
3383
3384 crypto_aead_set_reqsize(tfm, sizeof(struct caam_aead_req_ctx));
3385
3386 ctx->enginectx.op.do_one_request = aead_do_one_req;
3387
3388 return caam_init_common(ctx, &caam_alg->caam, !caam_alg->caam.nodkp);
3389}
3390
3391static void caam_exit_common(struct caam_ctx *ctx)
3392{
3393 dma_unmap_single_attrs(ctx->jrdev, ctx->sh_desc_enc_dma,
3394 offsetof(struct caam_ctx, sh_desc_enc_dma) -
3395 offsetof(struct caam_ctx, sh_desc_enc),
3396 ctx->dir, DMA_ATTR_SKIP_CPU_SYNC);
3397 caam_jr_free(ctx->jrdev);
3398}
3399
3400static void caam_cra_exit(struct crypto_skcipher *tfm)
3401{
3402 caam_exit_common(crypto_skcipher_ctx(tfm));
3403}
3404
3405static void caam_aead_exit(struct crypto_aead *tfm)
3406{
3407 caam_exit_common(crypto_aead_ctx(tfm));
3408}
3409
3410void caam_algapi_exit(void)
3411{
3412 int i;
3413
3414 for (i = 0; i < ARRAY_SIZE(driver_aeads); i++) {
3415 struct caam_aead_alg *t_alg = driver_aeads + i;
3416
3417 if (t_alg->registered)
3418 crypto_unregister_aead(&t_alg->aead);
3419 }
3420
3421 for (i = 0; i < ARRAY_SIZE(driver_algs); i++) {
3422 struct caam_skcipher_alg *t_alg = driver_algs + i;
3423
3424 if (t_alg->registered)
3425 crypto_unregister_skcipher(&t_alg->skcipher);
3426 }
3427}
3428
3429static void caam_skcipher_alg_init(struct caam_skcipher_alg *t_alg)
3430{
3431 struct skcipher_alg *alg = &t_alg->skcipher;
3432
3433 alg->base.cra_module = THIS_MODULE;
3434 alg->base.cra_priority = CAAM_CRA_PRIORITY;
3435 alg->base.cra_ctxsize = sizeof(struct caam_ctx);
3436 alg->base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY;
3437
3438 alg->init = caam_cra_init;
3439 alg->exit = caam_cra_exit;
3440}
3441
3442static void caam_aead_alg_init(struct caam_aead_alg *t_alg)
3443{
3444 struct aead_alg *alg = &t_alg->aead;
3445
3446 alg->base.cra_module = THIS_MODULE;
3447 alg->base.cra_priority = CAAM_CRA_PRIORITY;
3448 alg->base.cra_ctxsize = sizeof(struct caam_ctx);
3449 alg->base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY;
3450
3451 alg->init = caam_aead_init;
3452 alg->exit = caam_aead_exit;
3453}
3454
3455int caam_algapi_init(struct device *ctrldev)
3456{
3457 struct caam_drv_private *priv = dev_get_drvdata(ctrldev);
3458 int i = 0, err = 0;
3459 u32 aes_vid, aes_inst, des_inst, md_vid, md_inst, ccha_inst, ptha_inst;
3460 u32 arc4_inst;
3461 unsigned int md_limit = SHA512_DIGEST_SIZE;
3462 bool registered = false, gcm_support;
3463
3464
3465
3466
3467
3468 if (priv->era < 10) {
3469 u32 cha_vid, cha_inst, aes_rn;
3470
3471 cha_vid = rd_reg32(&priv->ctrl->perfmon.cha_id_ls);
3472 aes_vid = cha_vid & CHA_ID_LS_AES_MASK;
3473 md_vid = (cha_vid & CHA_ID_LS_MD_MASK) >> CHA_ID_LS_MD_SHIFT;
3474
3475 cha_inst = rd_reg32(&priv->ctrl->perfmon.cha_num_ls);
3476 des_inst = (cha_inst & CHA_ID_LS_DES_MASK) >>
3477 CHA_ID_LS_DES_SHIFT;
3478 aes_inst = cha_inst & CHA_ID_LS_AES_MASK;
3479 md_inst = (cha_inst & CHA_ID_LS_MD_MASK) >> CHA_ID_LS_MD_SHIFT;
3480 arc4_inst = (cha_inst & CHA_ID_LS_ARC4_MASK) >>
3481 CHA_ID_LS_ARC4_SHIFT;
3482 ccha_inst = 0;
3483 ptha_inst = 0;
3484
3485 aes_rn = rd_reg32(&priv->ctrl->perfmon.cha_rev_ls) &
3486 CHA_ID_LS_AES_MASK;
3487 gcm_support = !(aes_vid == CHA_VER_VID_AES_LP && aes_rn < 8);
3488 } else {
3489 u32 aesa, mdha;
3490
3491 aesa = rd_reg32(&priv->ctrl->vreg.aesa);
3492 mdha = rd_reg32(&priv->ctrl->vreg.mdha);
3493
3494 aes_vid = (aesa & CHA_VER_VID_MASK) >> CHA_VER_VID_SHIFT;
3495 md_vid = (mdha & CHA_VER_VID_MASK) >> CHA_VER_VID_SHIFT;
3496
3497 des_inst = rd_reg32(&priv->ctrl->vreg.desa) & CHA_VER_NUM_MASK;
3498 aes_inst = aesa & CHA_VER_NUM_MASK;
3499 md_inst = mdha & CHA_VER_NUM_MASK;
3500 ccha_inst = rd_reg32(&priv->ctrl->vreg.ccha) & CHA_VER_NUM_MASK;
3501 ptha_inst = rd_reg32(&priv->ctrl->vreg.ptha) & CHA_VER_NUM_MASK;
3502 arc4_inst = rd_reg32(&priv->ctrl->vreg.afha) & CHA_VER_NUM_MASK;
3503
3504 gcm_support = aesa & CHA_VER_MISC_AES_GCM;
3505 }
3506
3507
3508 if (md_inst && md_vid == CHA_VER_VID_MD_LP256)
3509 md_limit = SHA256_DIGEST_SIZE;
3510
3511 for (i = 0; i < ARRAY_SIZE(driver_algs); i++) {
3512 struct caam_skcipher_alg *t_alg = driver_algs + i;
3513 u32 alg_sel = t_alg->caam.class1_alg_type & OP_ALG_ALGSEL_MASK;
3514
3515
3516 if (!des_inst &&
3517 ((alg_sel == OP_ALG_ALGSEL_3DES) ||
3518 (alg_sel == OP_ALG_ALGSEL_DES)))
3519 continue;
3520
3521
3522 if (!aes_inst && (alg_sel == OP_ALG_ALGSEL_AES))
3523 continue;
3524
3525
3526 if (!arc4_inst && alg_sel == OP_ALG_ALGSEL_ARC4)
3527 continue;
3528
3529
3530
3531
3532
3533 if (aes_vid == CHA_VER_VID_AES_LP &&
3534 (t_alg->caam.class1_alg_type & OP_ALG_AAI_MASK) ==
3535 OP_ALG_AAI_XTS)
3536 continue;
3537
3538 caam_skcipher_alg_init(t_alg);
3539
3540 err = crypto_register_skcipher(&t_alg->skcipher);
3541 if (err) {
3542 pr_warn("%s alg registration failed\n",
3543 t_alg->skcipher.base.cra_driver_name);
3544 continue;
3545 }
3546
3547 t_alg->registered = true;
3548 registered = true;
3549 }
3550
3551 for (i = 0; i < ARRAY_SIZE(driver_aeads); i++) {
3552 struct caam_aead_alg *t_alg = driver_aeads + i;
3553 u32 c1_alg_sel = t_alg->caam.class1_alg_type &
3554 OP_ALG_ALGSEL_MASK;
3555 u32 c2_alg_sel = t_alg->caam.class2_alg_type &
3556 OP_ALG_ALGSEL_MASK;
3557 u32 alg_aai = t_alg->caam.class1_alg_type & OP_ALG_AAI_MASK;
3558
3559
3560 if (!des_inst &&
3561 ((c1_alg_sel == OP_ALG_ALGSEL_3DES) ||
3562 (c1_alg_sel == OP_ALG_ALGSEL_DES)))
3563 continue;
3564
3565
3566 if (!aes_inst && (c1_alg_sel == OP_ALG_ALGSEL_AES))
3567 continue;
3568
3569
3570 if (c1_alg_sel == OP_ALG_ALGSEL_CHACHA20 && !ccha_inst)
3571 continue;
3572
3573
3574 if (c2_alg_sel == OP_ALG_ALGSEL_POLY1305 && !ptha_inst)
3575 continue;
3576
3577
3578 if (c1_alg_sel == OP_ALG_ALGSEL_AES &&
3579 alg_aai == OP_ALG_AAI_GCM && !gcm_support)
3580 continue;
3581
3582
3583
3584
3585
3586 if (is_mdha(c2_alg_sel) &&
3587 (!md_inst || t_alg->aead.maxauthsize > md_limit))
3588 continue;
3589
3590 caam_aead_alg_init(t_alg);
3591
3592 err = crypto_register_aead(&t_alg->aead);
3593 if (err) {
3594 pr_warn("%s alg registration failed\n",
3595 t_alg->aead.base.cra_driver_name);
3596 continue;
3597 }
3598
3599 t_alg->registered = true;
3600 registered = true;
3601 }
3602
3603 if (registered)
3604 pr_info("caam algorithms registered in /proc/crypto\n");
3605
3606 return err;
3607}
3608