1
2
3
4
5
6
7
8#include <linux/device.h>
9#include <linux/dma-mapping.h>
10#include <linux/dmapool.h>
11
12#include <crypto/aead.h>
13#include <crypto/aes.h>
14#include <crypto/authenc.h>
15#include <crypto/ctr.h>
16#include <crypto/internal/des.h>
17#include <crypto/gcm.h>
18#include <crypto/ghash.h>
19#include <crypto/sha.h>
20#include <crypto/xts.h>
21#include <crypto/skcipher.h>
22#include <crypto/internal/aead.h>
23#include <crypto/internal/skcipher.h>
24
25#include "safexcel.h"
26
27enum safexcel_cipher_direction {
28 SAFEXCEL_ENCRYPT,
29 SAFEXCEL_DECRYPT,
30};
31
32enum safexcel_cipher_alg {
33 SAFEXCEL_DES,
34 SAFEXCEL_3DES,
35 SAFEXCEL_AES,
36};
37
38struct safexcel_cipher_ctx {
39 struct safexcel_context base;
40 struct safexcel_crypto_priv *priv;
41
42 u32 mode;
43 enum safexcel_cipher_alg alg;
44 bool aead;
45 int xcm;
46
47 __le32 key[16];
48 u32 nonce;
49 unsigned int key_len, xts;
50
51
52 u32 hash_alg;
53 u32 state_sz;
54 u32 ipad[SHA512_DIGEST_SIZE / sizeof(u32)];
55 u32 opad[SHA512_DIGEST_SIZE / sizeof(u32)];
56
57 struct crypto_cipher *hkaes;
58};
59
60struct safexcel_cipher_req {
61 enum safexcel_cipher_direction direction;
62
63 unsigned int rdescs;
64 bool needs_inv;
65 int nr_src, nr_dst;
66};
67
68static void safexcel_cipher_token(struct safexcel_cipher_ctx *ctx, u8 *iv,
69 struct safexcel_command_desc *cdesc)
70{
71 u32 block_sz = 0;
72
73 if (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD) {
74 cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
75
76
77 cdesc->control_data.token[0] = ctx->nonce;
78
79 memcpy(&cdesc->control_data.token[1], iv, 8);
80
81 cdesc->control_data.token[3] = cpu_to_be32(1);
82
83 return;
84 } else if (ctx->xcm == EIP197_XCM_MODE_GCM) {
85 cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
86
87
88 memcpy(&cdesc->control_data.token[0], iv, 12);
89
90 cdesc->control_data.token[3] = cpu_to_be32(1);
91
92 return;
93 } else if (ctx->xcm == EIP197_XCM_MODE_CCM) {
94 cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
95
96
97 memcpy(&cdesc->control_data.token[0], iv, 15 - iv[0]);
98
99 memset((u8 *)&cdesc->control_data.token[0] + 15 - iv[0],
100 0, iv[0] + 1);
101
102 return;
103 }
104
105 if (ctx->mode != CONTEXT_CONTROL_CRYPTO_MODE_ECB) {
106 switch (ctx->alg) {
107 case SAFEXCEL_DES:
108 block_sz = DES_BLOCK_SIZE;
109 cdesc->control_data.options |= EIP197_OPTION_2_TOKEN_IV_CMD;
110 break;
111 case SAFEXCEL_3DES:
112 block_sz = DES3_EDE_BLOCK_SIZE;
113 cdesc->control_data.options |= EIP197_OPTION_2_TOKEN_IV_CMD;
114 break;
115 case SAFEXCEL_AES:
116 block_sz = AES_BLOCK_SIZE;
117 cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
118 break;
119 }
120 memcpy(cdesc->control_data.token, iv, block_sz);
121 }
122}
123
124static void safexcel_skcipher_token(struct safexcel_cipher_ctx *ctx, u8 *iv,
125 struct safexcel_command_desc *cdesc,
126 u32 length)
127{
128 struct safexcel_token *token;
129
130 safexcel_cipher_token(ctx, iv, cdesc);
131
132
133 token = (struct safexcel_token *)(cdesc->control_data.token + 4);
134
135 token[0].opcode = EIP197_TOKEN_OPCODE_DIRECTION;
136 token[0].packet_length = length;
137 token[0].stat = EIP197_TOKEN_STAT_LAST_PACKET |
138 EIP197_TOKEN_STAT_LAST_HASH;
139 token[0].instructions = EIP197_TOKEN_INS_LAST |
140 EIP197_TOKEN_INS_TYPE_CRYPTO |
141 EIP197_TOKEN_INS_TYPE_OUTPUT;
142}
143
144static void safexcel_aead_token(struct safexcel_cipher_ctx *ctx, u8 *iv,
145 struct safexcel_command_desc *cdesc,
146 enum safexcel_cipher_direction direction,
147 u32 cryptlen, u32 assoclen, u32 digestsize)
148{
149 struct safexcel_token *token;
150
151 safexcel_cipher_token(ctx, iv, cdesc);
152
153 if (direction == SAFEXCEL_ENCRYPT) {
154
155 token = (struct safexcel_token *)(cdesc->control_data.token +
156 EIP197_MAX_TOKENS - 13);
157
158 token[12].opcode = EIP197_TOKEN_OPCODE_INSERT;
159 token[12].packet_length = digestsize;
160 token[12].stat = EIP197_TOKEN_STAT_LAST_HASH |
161 EIP197_TOKEN_STAT_LAST_PACKET;
162 token[12].instructions = EIP197_TOKEN_INS_TYPE_OUTPUT |
163 EIP197_TOKEN_INS_INSERT_HASH_DIGEST;
164 } else {
165 cryptlen -= digestsize;
166
167
168 token = (struct safexcel_token *)(cdesc->control_data.token +
169 EIP197_MAX_TOKENS - 14);
170
171 token[12].opcode = EIP197_TOKEN_OPCODE_RETRIEVE;
172 token[12].packet_length = digestsize;
173 token[12].stat = EIP197_TOKEN_STAT_LAST_HASH |
174 EIP197_TOKEN_STAT_LAST_PACKET;
175 token[12].instructions = EIP197_TOKEN_INS_INSERT_HASH_DIGEST;
176
177 token[13].opcode = EIP197_TOKEN_OPCODE_VERIFY;
178 token[13].packet_length = digestsize |
179 EIP197_TOKEN_HASH_RESULT_VERIFY;
180 token[13].stat = EIP197_TOKEN_STAT_LAST_HASH |
181 EIP197_TOKEN_STAT_LAST_PACKET;
182 token[13].instructions = EIP197_TOKEN_INS_TYPE_OUTPUT;
183 }
184
185 token[6].opcode = EIP197_TOKEN_OPCODE_DIRECTION;
186 token[6].packet_length = assoclen;
187
188 if (likely(cryptlen)) {
189 token[6].instructions = EIP197_TOKEN_INS_TYPE_HASH;
190
191 token[10].opcode = EIP197_TOKEN_OPCODE_DIRECTION;
192 token[10].packet_length = cryptlen;
193 token[10].stat = EIP197_TOKEN_STAT_LAST_HASH;
194 token[10].instructions = EIP197_TOKEN_INS_LAST |
195 EIP197_TOKEN_INS_TYPE_CRYPTO |
196 EIP197_TOKEN_INS_TYPE_HASH |
197 EIP197_TOKEN_INS_TYPE_OUTPUT;
198 } else if (ctx->xcm != EIP197_XCM_MODE_CCM) {
199 token[6].stat = EIP197_TOKEN_STAT_LAST_HASH;
200 token[6].instructions = EIP197_TOKEN_INS_LAST |
201 EIP197_TOKEN_INS_TYPE_HASH;
202 }
203
204 if (!ctx->xcm)
205 return;
206
207 token[8].opcode = EIP197_TOKEN_OPCODE_INSERT_REMRES;
208 token[8].packet_length = 0;
209 token[8].instructions = AES_BLOCK_SIZE;
210
211 token[9].opcode = EIP197_TOKEN_OPCODE_INSERT;
212 token[9].packet_length = AES_BLOCK_SIZE;
213 token[9].instructions = EIP197_TOKEN_INS_TYPE_OUTPUT |
214 EIP197_TOKEN_INS_TYPE_CRYPTO;
215
216 if (ctx->xcm == EIP197_XCM_MODE_GCM) {
217 token[6].instructions = EIP197_TOKEN_INS_LAST |
218 EIP197_TOKEN_INS_TYPE_HASH;
219 } else {
220 u8 *cbcmaciv = (u8 *)&token[1];
221 u32 *aadlen = (u32 *)&token[5];
222
223
224 token[0].opcode = EIP197_TOKEN_OPCODE_INSERT;
225 token[0].packet_length = AES_BLOCK_SIZE +
226 ((assoclen > 0) << 1);
227 token[0].instructions = EIP197_TOKEN_INS_ORIGIN_TOKEN |
228 EIP197_TOKEN_INS_TYPE_HASH;
229
230 memcpy(cbcmaciv, iv, 15 - iv[0]);
231
232 cbcmaciv[0] |= ((assoclen > 0) << 6) | ((digestsize - 2) << 2);
233
234 memset(cbcmaciv + 15 - iv[0], 0, iv[0] - 1);
235
236 cbcmaciv[14] = cryptlen >> 8;
237 cbcmaciv[15] = cryptlen & 255;
238
239 if (assoclen) {
240 *aadlen = cpu_to_le32(cpu_to_be16(assoclen));
241 assoclen += 2;
242 }
243
244 token[6].instructions = EIP197_TOKEN_INS_TYPE_HASH;
245
246
247 token[7].opcode = EIP197_TOKEN_OPCODE_INSERT;
248 assoclen &= 15;
249 token[7].packet_length = assoclen ? 16 - assoclen : 0;
250
251 if (likely(cryptlen)) {
252 token[7].instructions = EIP197_TOKEN_INS_TYPE_HASH;
253
254
255 token[10].stat = 0;
256
257 token[11].opcode = EIP197_TOKEN_OPCODE_INSERT;
258 cryptlen &= 15;
259 token[11].packet_length = cryptlen ? 16 - cryptlen : 0;
260 token[11].stat = EIP197_TOKEN_STAT_LAST_HASH;
261 token[11].instructions = EIP197_TOKEN_INS_TYPE_HASH;
262 } else {
263 token[7].stat = EIP197_TOKEN_STAT_LAST_HASH;
264 token[7].instructions = EIP197_TOKEN_INS_LAST |
265 EIP197_TOKEN_INS_TYPE_HASH;
266 }
267 }
268}
269
270static int safexcel_skcipher_aes_setkey(struct crypto_skcipher *ctfm,
271 const u8 *key, unsigned int len)
272{
273 struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
274 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
275 struct safexcel_crypto_priv *priv = ctx->priv;
276 struct crypto_aes_ctx aes;
277 int ret, i;
278
279 ret = aes_expandkey(&aes, key, len);
280 if (ret) {
281 crypto_skcipher_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
282 return ret;
283 }
284
285 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
286 for (i = 0; i < len / sizeof(u32); i++) {
287 if (ctx->key[i] != cpu_to_le32(aes.key_enc[i])) {
288 ctx->base.needs_inv = true;
289 break;
290 }
291 }
292 }
293
294 for (i = 0; i < len / sizeof(u32); i++)
295 ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
296
297 ctx->key_len = len;
298
299 memzero_explicit(&aes, sizeof(aes));
300 return 0;
301}
302
303static int safexcel_aead_setkey(struct crypto_aead *ctfm, const u8 *key,
304 unsigned int len)
305{
306 struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
307 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
308 struct safexcel_ahash_export_state istate, ostate;
309 struct safexcel_crypto_priv *priv = ctx->priv;
310 struct crypto_authenc_keys keys;
311 struct crypto_aes_ctx aes;
312 int err = -EINVAL;
313
314 if (crypto_authenc_extractkeys(&keys, key, len) != 0)
315 goto badkey;
316
317 if (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD) {
318
319 if (keys.enckeylen < (AES_MIN_KEY_SIZE +
320 CTR_RFC3686_NONCE_SIZE))
321 goto badkey;
322
323 ctx->nonce = *(u32 *)(keys.enckey + keys.enckeylen -
324 CTR_RFC3686_NONCE_SIZE);
325
326 keys.enckeylen -= CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
327 }
328
329
330 switch (ctx->alg) {
331 case SAFEXCEL_3DES:
332 err = verify_aead_des3_key(ctfm, keys.enckey, keys.enckeylen);
333 if (unlikely(err))
334 goto badkey_expflags;
335 break;
336 case SAFEXCEL_AES:
337 err = aes_expandkey(&aes, keys.enckey, keys.enckeylen);
338 if (unlikely(err))
339 goto badkey;
340 break;
341 default:
342 dev_err(priv->dev, "aead: unsupported cipher algorithm\n");
343 goto badkey;
344 }
345
346 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma &&
347 memcmp(ctx->key, keys.enckey, keys.enckeylen))
348 ctx->base.needs_inv = true;
349
350
351 switch (ctx->hash_alg) {
352 case CONTEXT_CONTROL_CRYPTO_ALG_SHA1:
353 if (safexcel_hmac_setkey("safexcel-sha1", keys.authkey,
354 keys.authkeylen, &istate, &ostate))
355 goto badkey;
356 break;
357 case CONTEXT_CONTROL_CRYPTO_ALG_SHA224:
358 if (safexcel_hmac_setkey("safexcel-sha224", keys.authkey,
359 keys.authkeylen, &istate, &ostate))
360 goto badkey;
361 break;
362 case CONTEXT_CONTROL_CRYPTO_ALG_SHA256:
363 if (safexcel_hmac_setkey("safexcel-sha256", keys.authkey,
364 keys.authkeylen, &istate, &ostate))
365 goto badkey;
366 break;
367 case CONTEXT_CONTROL_CRYPTO_ALG_SHA384:
368 if (safexcel_hmac_setkey("safexcel-sha384", keys.authkey,
369 keys.authkeylen, &istate, &ostate))
370 goto badkey;
371 break;
372 case CONTEXT_CONTROL_CRYPTO_ALG_SHA512:
373 if (safexcel_hmac_setkey("safexcel-sha512", keys.authkey,
374 keys.authkeylen, &istate, &ostate))
375 goto badkey;
376 break;
377 default:
378 dev_err(priv->dev, "aead: unsupported hash algorithm\n");
379 goto badkey;
380 }
381
382 crypto_aead_set_flags(ctfm, crypto_aead_get_flags(ctfm) &
383 CRYPTO_TFM_RES_MASK);
384
385 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma &&
386 (memcmp(ctx->ipad, istate.state, ctx->state_sz) ||
387 memcmp(ctx->opad, ostate.state, ctx->state_sz)))
388 ctx->base.needs_inv = true;
389
390
391 memcpy(ctx->key, keys.enckey, keys.enckeylen);
392 ctx->key_len = keys.enckeylen;
393
394 memcpy(ctx->ipad, &istate.state, ctx->state_sz);
395 memcpy(ctx->opad, &ostate.state, ctx->state_sz);
396
397 memzero_explicit(&keys, sizeof(keys));
398 return 0;
399
400badkey:
401 crypto_aead_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
402badkey_expflags:
403 memzero_explicit(&keys, sizeof(keys));
404 return err;
405}
406
407static int safexcel_context_control(struct safexcel_cipher_ctx *ctx,
408 struct crypto_async_request *async,
409 struct safexcel_cipher_req *sreq,
410 struct safexcel_command_desc *cdesc)
411{
412 struct safexcel_crypto_priv *priv = ctx->priv;
413 int ctrl_size = ctx->key_len / sizeof(u32);
414
415 cdesc->control_data.control1 = ctx->mode;
416
417 if (ctx->aead) {
418
419 if (ctx->xcm) {
420 ctrl_size += ctx->state_sz / sizeof(u32);
421 cdesc->control_data.control0 =
422 CONTEXT_CONTROL_KEY_EN |
423 CONTEXT_CONTROL_DIGEST_XCM |
424 ctx->hash_alg |
425 CONTEXT_CONTROL_SIZE(ctrl_size);
426 } else {
427 ctrl_size += ctx->state_sz / sizeof(u32) * 2;
428 cdesc->control_data.control0 =
429 CONTEXT_CONTROL_KEY_EN |
430 CONTEXT_CONTROL_DIGEST_HMAC |
431 ctx->hash_alg |
432 CONTEXT_CONTROL_SIZE(ctrl_size);
433 }
434 if (sreq->direction == SAFEXCEL_ENCRYPT)
435 cdesc->control_data.control0 |=
436 (ctx->xcm == EIP197_XCM_MODE_CCM) ?
437 CONTEXT_CONTROL_TYPE_HASH_ENCRYPT_OUT :
438 CONTEXT_CONTROL_TYPE_ENCRYPT_HASH_OUT;
439
440 else
441 cdesc->control_data.control0 |=
442 (ctx->xcm == EIP197_XCM_MODE_CCM) ?
443 CONTEXT_CONTROL_TYPE_DECRYPT_HASH_IN :
444 CONTEXT_CONTROL_TYPE_HASH_DECRYPT_IN;
445 } else {
446 if (sreq->direction == SAFEXCEL_ENCRYPT)
447 cdesc->control_data.control0 =
448 CONTEXT_CONTROL_TYPE_CRYPTO_OUT |
449 CONTEXT_CONTROL_KEY_EN |
450 CONTEXT_CONTROL_SIZE(ctrl_size);
451 else
452 cdesc->control_data.control0 =
453 CONTEXT_CONTROL_TYPE_CRYPTO_IN |
454 CONTEXT_CONTROL_KEY_EN |
455 CONTEXT_CONTROL_SIZE(ctrl_size);
456 }
457
458 if (ctx->alg == SAFEXCEL_DES) {
459 cdesc->control_data.control0 |=
460 CONTEXT_CONTROL_CRYPTO_ALG_DES;
461 } else if (ctx->alg == SAFEXCEL_3DES) {
462 cdesc->control_data.control0 |=
463 CONTEXT_CONTROL_CRYPTO_ALG_3DES;
464 } else if (ctx->alg == SAFEXCEL_AES) {
465 switch (ctx->key_len >> ctx->xts) {
466 case AES_KEYSIZE_128:
467 cdesc->control_data.control0 |=
468 CONTEXT_CONTROL_CRYPTO_ALG_AES128;
469 break;
470 case AES_KEYSIZE_192:
471 cdesc->control_data.control0 |=
472 CONTEXT_CONTROL_CRYPTO_ALG_AES192;
473 break;
474 case AES_KEYSIZE_256:
475 cdesc->control_data.control0 |=
476 CONTEXT_CONTROL_CRYPTO_ALG_AES256;
477 break;
478 default:
479 dev_err(priv->dev, "aes keysize not supported: %u\n",
480 ctx->key_len >> ctx->xts);
481 return -EINVAL;
482 }
483 }
484
485 return 0;
486}
487
488static int safexcel_handle_req_result(struct safexcel_crypto_priv *priv, int ring,
489 struct crypto_async_request *async,
490 struct scatterlist *src,
491 struct scatterlist *dst,
492 unsigned int cryptlen,
493 struct safexcel_cipher_req *sreq,
494 bool *should_complete, int *ret)
495{
496 struct skcipher_request *areq = skcipher_request_cast(async);
497 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(areq);
498 struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(skcipher);
499 struct safexcel_result_desc *rdesc;
500 int ndesc = 0;
501
502 *ret = 0;
503
504 if (unlikely(!sreq->rdescs))
505 return 0;
506
507 while (sreq->rdescs--) {
508 rdesc = safexcel_ring_next_rptr(priv, &priv->ring[ring].rdr);
509 if (IS_ERR(rdesc)) {
510 dev_err(priv->dev,
511 "cipher: result: could not retrieve the result descriptor\n");
512 *ret = PTR_ERR(rdesc);
513 break;
514 }
515
516 if (likely(!*ret))
517 *ret = safexcel_rdesc_check_errors(priv, rdesc);
518
519 ndesc++;
520 }
521
522 safexcel_complete(priv, ring);
523
524 if (src == dst) {
525 dma_unmap_sg(priv->dev, src, sreq->nr_src, DMA_BIDIRECTIONAL);
526 } else {
527 dma_unmap_sg(priv->dev, src, sreq->nr_src, DMA_TO_DEVICE);
528 dma_unmap_sg(priv->dev, dst, sreq->nr_dst, DMA_FROM_DEVICE);
529 }
530
531
532
533
534 if ((!ctx->aead) && (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CBC) &&
535 (sreq->direction == SAFEXCEL_ENCRYPT)) {
536
537 sg_pcopy_to_buffer(dst, sreq->nr_dst, areq->iv,
538 crypto_skcipher_ivsize(skcipher),
539 (cryptlen -
540 crypto_skcipher_ivsize(skcipher)));
541 }
542
543 *should_complete = true;
544
545 return ndesc;
546}
547
548static int safexcel_send_req(struct crypto_async_request *base, int ring,
549 struct safexcel_cipher_req *sreq,
550 struct scatterlist *src, struct scatterlist *dst,
551 unsigned int cryptlen, unsigned int assoclen,
552 unsigned int digestsize, u8 *iv, int *commands,
553 int *results)
554{
555 struct skcipher_request *areq = skcipher_request_cast(base);
556 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(areq);
557 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
558 struct safexcel_crypto_priv *priv = ctx->priv;
559 struct safexcel_command_desc *cdesc;
560 struct safexcel_command_desc *first_cdesc = NULL;
561 struct safexcel_result_desc *rdesc, *first_rdesc = NULL;
562 struct scatterlist *sg;
563 unsigned int totlen;
564 unsigned int totlen_src = cryptlen + assoclen;
565 unsigned int totlen_dst = totlen_src;
566 int n_cdesc = 0, n_rdesc = 0;
567 int queued, i, ret = 0;
568 bool first = true;
569
570 sreq->nr_src = sg_nents_for_len(src, totlen_src);
571
572 if (ctx->aead) {
573
574
575
576
577 if (sreq->direction == SAFEXCEL_DECRYPT)
578 totlen_dst -= digestsize;
579 else
580 totlen_dst += digestsize;
581
582 memcpy(ctx->base.ctxr->data + ctx->key_len / sizeof(u32),
583 ctx->ipad, ctx->state_sz);
584 if (!ctx->xcm)
585 memcpy(ctx->base.ctxr->data + (ctx->key_len +
586 ctx->state_sz) / sizeof(u32), ctx->opad,
587 ctx->state_sz);
588 } else if ((ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CBC) &&
589 (sreq->direction == SAFEXCEL_DECRYPT)) {
590
591
592
593
594
595 sg_pcopy_to_buffer(src, sreq->nr_src, areq->iv,
596 crypto_skcipher_ivsize(skcipher),
597 (totlen_src -
598 crypto_skcipher_ivsize(skcipher)));
599 }
600
601 sreq->nr_dst = sg_nents_for_len(dst, totlen_dst);
602
603
604
605
606
607 totlen = totlen_src;
608 queued = totlen_src;
609
610 if (src == dst) {
611 sreq->nr_src = max(sreq->nr_src, sreq->nr_dst);
612 sreq->nr_dst = sreq->nr_src;
613 if (unlikely((totlen_src || totlen_dst) &&
614 (sreq->nr_src <= 0))) {
615 dev_err(priv->dev, "In-place buffer not large enough (need %d bytes)!",
616 max(totlen_src, totlen_dst));
617 return -EINVAL;
618 }
619 dma_map_sg(priv->dev, src, sreq->nr_src, DMA_BIDIRECTIONAL);
620 } else {
621 if (unlikely(totlen_src && (sreq->nr_src <= 0))) {
622 dev_err(priv->dev, "Source buffer not large enough (need %d bytes)!",
623 totlen_src);
624 return -EINVAL;
625 }
626 dma_map_sg(priv->dev, src, sreq->nr_src, DMA_TO_DEVICE);
627
628 if (unlikely(totlen_dst && (sreq->nr_dst <= 0))) {
629 dev_err(priv->dev, "Dest buffer not large enough (need %d bytes)!",
630 totlen_dst);
631 dma_unmap_sg(priv->dev, src, sreq->nr_src,
632 DMA_TO_DEVICE);
633 return -EINVAL;
634 }
635 dma_map_sg(priv->dev, dst, sreq->nr_dst, DMA_FROM_DEVICE);
636 }
637
638 memcpy(ctx->base.ctxr->data, ctx->key, ctx->key_len);
639
640
641 if (totlen == 0)
642 totlen = 1;
643
644
645 for_each_sg(src, sg, sreq->nr_src, i) {
646 int len = sg_dma_len(sg);
647
648
649 if (queued - len < 0)
650 len = queued;
651
652 cdesc = safexcel_add_cdesc(priv, ring, !n_cdesc,
653 !(queued - len),
654 sg_dma_address(sg), len, totlen,
655 ctx->base.ctxr_dma);
656 if (IS_ERR(cdesc)) {
657
658 ret = PTR_ERR(cdesc);
659 goto cdesc_rollback;
660 }
661 n_cdesc++;
662
663 if (n_cdesc == 1) {
664 first_cdesc = cdesc;
665 }
666
667 queued -= len;
668 if (!queued)
669 break;
670 }
671
672 if (unlikely(!n_cdesc)) {
673
674
675
676
677 first_cdesc = safexcel_add_cdesc(priv, ring, 1, 1, 0, 0, totlen,
678 ctx->base.ctxr_dma);
679 n_cdesc = 1;
680 }
681
682
683 safexcel_context_control(ctx, base, sreq, first_cdesc);
684 if (ctx->aead)
685 safexcel_aead_token(ctx, iv, first_cdesc,
686 sreq->direction, cryptlen,
687 assoclen, digestsize);
688 else
689 safexcel_skcipher_token(ctx, iv, first_cdesc,
690 cryptlen);
691
692
693 for_each_sg(dst, sg, sreq->nr_dst, i) {
694 bool last = (i == sreq->nr_dst - 1);
695 u32 len = sg_dma_len(sg);
696
697
698 if (len > totlen_dst)
699 len = totlen_dst;
700 if (unlikely(!len))
701 break;
702 totlen_dst -= len;
703
704
705 if (assoclen) {
706 if (assoclen >= len) {
707 assoclen -= len;
708 continue;
709 }
710 rdesc = safexcel_add_rdesc(priv, ring, first, last,
711 sg_dma_address(sg) +
712 assoclen,
713 len - assoclen);
714 assoclen = 0;
715 } else {
716 rdesc = safexcel_add_rdesc(priv, ring, first, last,
717 sg_dma_address(sg),
718 len);
719 }
720 if (IS_ERR(rdesc)) {
721
722 ret = PTR_ERR(rdesc);
723 goto rdesc_rollback;
724 }
725 if (first) {
726 first_rdesc = rdesc;
727 first = false;
728 }
729 n_rdesc++;
730 }
731
732 if (unlikely(first)) {
733
734
735
736
737
738
739 rdesc = safexcel_add_rdesc(priv, ring, true, true, 0, 0);
740 if (IS_ERR(rdesc)) {
741
742 ret = PTR_ERR(rdesc);
743 goto rdesc_rollback;
744 }
745 first_rdesc = rdesc;
746 n_rdesc = 1;
747 }
748
749 safexcel_rdr_req_set(priv, ring, first_rdesc, base);
750
751 *commands = n_cdesc;
752 *results = n_rdesc;
753 return 0;
754
755rdesc_rollback:
756 for (i = 0; i < n_rdesc; i++)
757 safexcel_ring_rollback_wptr(priv, &priv->ring[ring].rdr);
758cdesc_rollback:
759 for (i = 0; i < n_cdesc; i++)
760 safexcel_ring_rollback_wptr(priv, &priv->ring[ring].cdr);
761
762 if (src == dst) {
763 dma_unmap_sg(priv->dev, src, sreq->nr_src, DMA_BIDIRECTIONAL);
764 } else {
765 dma_unmap_sg(priv->dev, src, sreq->nr_src, DMA_TO_DEVICE);
766 dma_unmap_sg(priv->dev, dst, sreq->nr_dst, DMA_FROM_DEVICE);
767 }
768
769 return ret;
770}
771
772static int safexcel_handle_inv_result(struct safexcel_crypto_priv *priv,
773 int ring,
774 struct crypto_async_request *base,
775 struct safexcel_cipher_req *sreq,
776 bool *should_complete, int *ret)
777{
778 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
779 struct safexcel_result_desc *rdesc;
780 int ndesc = 0, enq_ret;
781
782 *ret = 0;
783
784 if (unlikely(!sreq->rdescs))
785 return 0;
786
787 while (sreq->rdescs--) {
788 rdesc = safexcel_ring_next_rptr(priv, &priv->ring[ring].rdr);
789 if (IS_ERR(rdesc)) {
790 dev_err(priv->dev,
791 "cipher: invalidate: could not retrieve the result descriptor\n");
792 *ret = PTR_ERR(rdesc);
793 break;
794 }
795
796 if (likely(!*ret))
797 *ret = safexcel_rdesc_check_errors(priv, rdesc);
798
799 ndesc++;
800 }
801
802 safexcel_complete(priv, ring);
803
804 if (ctx->base.exit_inv) {
805 dma_pool_free(priv->context_pool, ctx->base.ctxr,
806 ctx->base.ctxr_dma);
807
808 *should_complete = true;
809
810 return ndesc;
811 }
812
813 ring = safexcel_select_ring(priv);
814 ctx->base.ring = ring;
815
816 spin_lock_bh(&priv->ring[ring].queue_lock);
817 enq_ret = crypto_enqueue_request(&priv->ring[ring].queue, base);
818 spin_unlock_bh(&priv->ring[ring].queue_lock);
819
820 if (enq_ret != -EINPROGRESS)
821 *ret = enq_ret;
822
823 queue_work(priv->ring[ring].workqueue,
824 &priv->ring[ring].work_data.work);
825
826 *should_complete = false;
827
828 return ndesc;
829}
830
831static int safexcel_skcipher_handle_result(struct safexcel_crypto_priv *priv,
832 int ring,
833 struct crypto_async_request *async,
834 bool *should_complete, int *ret)
835{
836 struct skcipher_request *req = skcipher_request_cast(async);
837 struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
838 int err;
839
840 if (sreq->needs_inv) {
841 sreq->needs_inv = false;
842 err = safexcel_handle_inv_result(priv, ring, async, sreq,
843 should_complete, ret);
844 } else {
845 err = safexcel_handle_req_result(priv, ring, async, req->src,
846 req->dst, req->cryptlen, sreq,
847 should_complete, ret);
848 }
849
850 return err;
851}
852
853static int safexcel_aead_handle_result(struct safexcel_crypto_priv *priv,
854 int ring,
855 struct crypto_async_request *async,
856 bool *should_complete, int *ret)
857{
858 struct aead_request *req = aead_request_cast(async);
859 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
860 struct safexcel_cipher_req *sreq = aead_request_ctx(req);
861 int err;
862
863 if (sreq->needs_inv) {
864 sreq->needs_inv = false;
865 err = safexcel_handle_inv_result(priv, ring, async, sreq,
866 should_complete, ret);
867 } else {
868 err = safexcel_handle_req_result(priv, ring, async, req->src,
869 req->dst,
870 req->cryptlen + crypto_aead_authsize(tfm),
871 sreq, should_complete, ret);
872 }
873
874 return err;
875}
876
877static int safexcel_cipher_send_inv(struct crypto_async_request *base,
878 int ring, int *commands, int *results)
879{
880 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
881 struct safexcel_crypto_priv *priv = ctx->priv;
882 int ret;
883
884 ret = safexcel_invalidate_cache(base, priv, ctx->base.ctxr_dma, ring);
885 if (unlikely(ret))
886 return ret;
887
888 *commands = 1;
889 *results = 1;
890
891 return 0;
892}
893
894static int safexcel_skcipher_send(struct crypto_async_request *async, int ring,
895 int *commands, int *results)
896{
897 struct skcipher_request *req = skcipher_request_cast(async);
898 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
899 struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
900 struct safexcel_crypto_priv *priv = ctx->priv;
901 int ret;
902
903 BUG_ON(!(priv->flags & EIP197_TRC_CACHE) && sreq->needs_inv);
904
905 if (sreq->needs_inv) {
906 ret = safexcel_cipher_send_inv(async, ring, commands, results);
907 } else {
908 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
909 u8 input_iv[AES_BLOCK_SIZE];
910
911
912
913
914
915 memcpy(input_iv, req->iv, crypto_skcipher_ivsize(skcipher));
916
917 ret = safexcel_send_req(async, ring, sreq, req->src,
918 req->dst, req->cryptlen, 0, 0, input_iv,
919 commands, results);
920 }
921
922 sreq->rdescs = *results;
923 return ret;
924}
925
926static int safexcel_aead_send(struct crypto_async_request *async, int ring,
927 int *commands, int *results)
928{
929 struct aead_request *req = aead_request_cast(async);
930 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
931 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
932 struct safexcel_cipher_req *sreq = aead_request_ctx(req);
933 struct safexcel_crypto_priv *priv = ctx->priv;
934 int ret;
935
936 BUG_ON(!(priv->flags & EIP197_TRC_CACHE) && sreq->needs_inv);
937
938 if (sreq->needs_inv)
939 ret = safexcel_cipher_send_inv(async, ring, commands, results);
940 else
941 ret = safexcel_send_req(async, ring, sreq, req->src, req->dst,
942 req->cryptlen, req->assoclen,
943 crypto_aead_authsize(tfm), req->iv,
944 commands, results);
945 sreq->rdescs = *results;
946 return ret;
947}
948
949static int safexcel_cipher_exit_inv(struct crypto_tfm *tfm,
950 struct crypto_async_request *base,
951 struct safexcel_cipher_req *sreq,
952 struct safexcel_inv_result *result)
953{
954 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
955 struct safexcel_crypto_priv *priv = ctx->priv;
956 int ring = ctx->base.ring;
957
958 init_completion(&result->completion);
959
960 ctx = crypto_tfm_ctx(base->tfm);
961 ctx->base.exit_inv = true;
962 sreq->needs_inv = true;
963
964 spin_lock_bh(&priv->ring[ring].queue_lock);
965 crypto_enqueue_request(&priv->ring[ring].queue, base);
966 spin_unlock_bh(&priv->ring[ring].queue_lock);
967
968 queue_work(priv->ring[ring].workqueue,
969 &priv->ring[ring].work_data.work);
970
971 wait_for_completion(&result->completion);
972
973 if (result->error) {
974 dev_warn(priv->dev,
975 "cipher: sync: invalidate: completion error %d\n",
976 result->error);
977 return result->error;
978 }
979
980 return 0;
981}
982
983static int safexcel_skcipher_exit_inv(struct crypto_tfm *tfm)
984{
985 EIP197_REQUEST_ON_STACK(req, skcipher, EIP197_SKCIPHER_REQ_SIZE);
986 struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
987 struct safexcel_inv_result result = {};
988
989 memset(req, 0, sizeof(struct skcipher_request));
990
991 skcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
992 safexcel_inv_complete, &result);
993 skcipher_request_set_tfm(req, __crypto_skcipher_cast(tfm));
994
995 return safexcel_cipher_exit_inv(tfm, &req->base, sreq, &result);
996}
997
998static int safexcel_aead_exit_inv(struct crypto_tfm *tfm)
999{
1000 EIP197_REQUEST_ON_STACK(req, aead, EIP197_AEAD_REQ_SIZE);
1001 struct safexcel_cipher_req *sreq = aead_request_ctx(req);
1002 struct safexcel_inv_result result = {};
1003
1004 memset(req, 0, sizeof(struct aead_request));
1005
1006 aead_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1007 safexcel_inv_complete, &result);
1008 aead_request_set_tfm(req, __crypto_aead_cast(tfm));
1009
1010 return safexcel_cipher_exit_inv(tfm, &req->base, sreq, &result);
1011}
1012
1013static int safexcel_queue_req(struct crypto_async_request *base,
1014 struct safexcel_cipher_req *sreq,
1015 enum safexcel_cipher_direction dir)
1016{
1017 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
1018 struct safexcel_crypto_priv *priv = ctx->priv;
1019 int ret, ring;
1020
1021 sreq->needs_inv = false;
1022 sreq->direction = dir;
1023
1024 if (ctx->base.ctxr) {
1025 if (priv->flags & EIP197_TRC_CACHE && ctx->base.needs_inv) {
1026 sreq->needs_inv = true;
1027 ctx->base.needs_inv = false;
1028 }
1029 } else {
1030 ctx->base.ring = safexcel_select_ring(priv);
1031 ctx->base.ctxr = dma_pool_zalloc(priv->context_pool,
1032 EIP197_GFP_FLAGS(*base),
1033 &ctx->base.ctxr_dma);
1034 if (!ctx->base.ctxr)
1035 return -ENOMEM;
1036 }
1037
1038 ring = ctx->base.ring;
1039
1040 spin_lock_bh(&priv->ring[ring].queue_lock);
1041 ret = crypto_enqueue_request(&priv->ring[ring].queue, base);
1042 spin_unlock_bh(&priv->ring[ring].queue_lock);
1043
1044 queue_work(priv->ring[ring].workqueue,
1045 &priv->ring[ring].work_data.work);
1046
1047 return ret;
1048}
1049
1050static int safexcel_encrypt(struct skcipher_request *req)
1051{
1052 return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
1053 SAFEXCEL_ENCRYPT);
1054}
1055
1056static int safexcel_decrypt(struct skcipher_request *req)
1057{
1058 return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
1059 SAFEXCEL_DECRYPT);
1060}
1061
1062static int safexcel_skcipher_cra_init(struct crypto_tfm *tfm)
1063{
1064 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1065 struct safexcel_alg_template *tmpl =
1066 container_of(tfm->__crt_alg, struct safexcel_alg_template,
1067 alg.skcipher.base);
1068
1069 crypto_skcipher_set_reqsize(__crypto_skcipher_cast(tfm),
1070 sizeof(struct safexcel_cipher_req));
1071
1072 ctx->priv = tmpl->priv;
1073
1074 ctx->base.send = safexcel_skcipher_send;
1075 ctx->base.handle_result = safexcel_skcipher_handle_result;
1076 return 0;
1077}
1078
1079static int safexcel_cipher_cra_exit(struct crypto_tfm *tfm)
1080{
1081 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1082
1083 memzero_explicit(ctx->key, sizeof(ctx->key));
1084
1085
1086 if (!ctx->base.ctxr)
1087 return -ENOMEM;
1088
1089 memzero_explicit(ctx->base.ctxr->data, sizeof(ctx->base.ctxr->data));
1090 return 0;
1091}
1092
1093static void safexcel_skcipher_cra_exit(struct crypto_tfm *tfm)
1094{
1095 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1096 struct safexcel_crypto_priv *priv = ctx->priv;
1097 int ret;
1098
1099 if (safexcel_cipher_cra_exit(tfm))
1100 return;
1101
1102 if (priv->flags & EIP197_TRC_CACHE) {
1103 ret = safexcel_skcipher_exit_inv(tfm);
1104 if (ret)
1105 dev_warn(priv->dev, "skcipher: invalidation error %d\n",
1106 ret);
1107 } else {
1108 dma_pool_free(priv->context_pool, ctx->base.ctxr,
1109 ctx->base.ctxr_dma);
1110 }
1111}
1112
1113static void safexcel_aead_cra_exit(struct crypto_tfm *tfm)
1114{
1115 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1116 struct safexcel_crypto_priv *priv = ctx->priv;
1117 int ret;
1118
1119 if (safexcel_cipher_cra_exit(tfm))
1120 return;
1121
1122 if (priv->flags & EIP197_TRC_CACHE) {
1123 ret = safexcel_aead_exit_inv(tfm);
1124 if (ret)
1125 dev_warn(priv->dev, "aead: invalidation error %d\n",
1126 ret);
1127 } else {
1128 dma_pool_free(priv->context_pool, ctx->base.ctxr,
1129 ctx->base.ctxr_dma);
1130 }
1131}
1132
1133static int safexcel_skcipher_aes_ecb_cra_init(struct crypto_tfm *tfm)
1134{
1135 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1136
1137 safexcel_skcipher_cra_init(tfm);
1138 ctx->alg = SAFEXCEL_AES;
1139 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
1140 return 0;
1141}
1142
1143struct safexcel_alg_template safexcel_alg_ecb_aes = {
1144 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1145 .algo_mask = SAFEXCEL_ALG_AES,
1146 .alg.skcipher = {
1147 .setkey = safexcel_skcipher_aes_setkey,
1148 .encrypt = safexcel_encrypt,
1149 .decrypt = safexcel_decrypt,
1150 .min_keysize = AES_MIN_KEY_SIZE,
1151 .max_keysize = AES_MAX_KEY_SIZE,
1152 .base = {
1153 .cra_name = "ecb(aes)",
1154 .cra_driver_name = "safexcel-ecb-aes",
1155 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1156 .cra_flags = CRYPTO_ALG_ASYNC |
1157 CRYPTO_ALG_KERN_DRIVER_ONLY,
1158 .cra_blocksize = AES_BLOCK_SIZE,
1159 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1160 .cra_alignmask = 0,
1161 .cra_init = safexcel_skcipher_aes_ecb_cra_init,
1162 .cra_exit = safexcel_skcipher_cra_exit,
1163 .cra_module = THIS_MODULE,
1164 },
1165 },
1166};
1167
1168static int safexcel_skcipher_aes_cbc_cra_init(struct crypto_tfm *tfm)
1169{
1170 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1171
1172 safexcel_skcipher_cra_init(tfm);
1173 ctx->alg = SAFEXCEL_AES;
1174 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
1175 return 0;
1176}
1177
1178struct safexcel_alg_template safexcel_alg_cbc_aes = {
1179 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1180 .algo_mask = SAFEXCEL_ALG_AES,
1181 .alg.skcipher = {
1182 .setkey = safexcel_skcipher_aes_setkey,
1183 .encrypt = safexcel_encrypt,
1184 .decrypt = safexcel_decrypt,
1185 .min_keysize = AES_MIN_KEY_SIZE,
1186 .max_keysize = AES_MAX_KEY_SIZE,
1187 .ivsize = AES_BLOCK_SIZE,
1188 .base = {
1189 .cra_name = "cbc(aes)",
1190 .cra_driver_name = "safexcel-cbc-aes",
1191 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1192 .cra_flags = CRYPTO_ALG_ASYNC |
1193 CRYPTO_ALG_KERN_DRIVER_ONLY,
1194 .cra_blocksize = AES_BLOCK_SIZE,
1195 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1196 .cra_alignmask = 0,
1197 .cra_init = safexcel_skcipher_aes_cbc_cra_init,
1198 .cra_exit = safexcel_skcipher_cra_exit,
1199 .cra_module = THIS_MODULE,
1200 },
1201 },
1202};
1203
1204static int safexcel_skcipher_aes_cfb_cra_init(struct crypto_tfm *tfm)
1205{
1206 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1207
1208 safexcel_skcipher_cra_init(tfm);
1209 ctx->alg = SAFEXCEL_AES;
1210 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CFB;
1211 return 0;
1212}
1213
1214struct safexcel_alg_template safexcel_alg_cfb_aes = {
1215 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1216 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_AES_XFB,
1217 .alg.skcipher = {
1218 .setkey = safexcel_skcipher_aes_setkey,
1219 .encrypt = safexcel_encrypt,
1220 .decrypt = safexcel_decrypt,
1221 .min_keysize = AES_MIN_KEY_SIZE,
1222 .max_keysize = AES_MAX_KEY_SIZE,
1223 .ivsize = AES_BLOCK_SIZE,
1224 .base = {
1225 .cra_name = "cfb(aes)",
1226 .cra_driver_name = "safexcel-cfb-aes",
1227 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1228 .cra_flags = CRYPTO_ALG_ASYNC |
1229 CRYPTO_ALG_KERN_DRIVER_ONLY,
1230 .cra_blocksize = 1,
1231 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1232 .cra_alignmask = 0,
1233 .cra_init = safexcel_skcipher_aes_cfb_cra_init,
1234 .cra_exit = safexcel_skcipher_cra_exit,
1235 .cra_module = THIS_MODULE,
1236 },
1237 },
1238};
1239
1240static int safexcel_skcipher_aes_ofb_cra_init(struct crypto_tfm *tfm)
1241{
1242 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1243
1244 safexcel_skcipher_cra_init(tfm);
1245 ctx->alg = SAFEXCEL_AES;
1246 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_OFB;
1247 return 0;
1248}
1249
1250struct safexcel_alg_template safexcel_alg_ofb_aes = {
1251 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1252 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_AES_XFB,
1253 .alg.skcipher = {
1254 .setkey = safexcel_skcipher_aes_setkey,
1255 .encrypt = safexcel_encrypt,
1256 .decrypt = safexcel_decrypt,
1257 .min_keysize = AES_MIN_KEY_SIZE,
1258 .max_keysize = AES_MAX_KEY_SIZE,
1259 .ivsize = AES_BLOCK_SIZE,
1260 .base = {
1261 .cra_name = "ofb(aes)",
1262 .cra_driver_name = "safexcel-ofb-aes",
1263 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1264 .cra_flags = CRYPTO_ALG_ASYNC |
1265 CRYPTO_ALG_KERN_DRIVER_ONLY,
1266 .cra_blocksize = 1,
1267 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1268 .cra_alignmask = 0,
1269 .cra_init = safexcel_skcipher_aes_ofb_cra_init,
1270 .cra_exit = safexcel_skcipher_cra_exit,
1271 .cra_module = THIS_MODULE,
1272 },
1273 },
1274};
1275
1276static int safexcel_skcipher_aesctr_setkey(struct crypto_skcipher *ctfm,
1277 const u8 *key, unsigned int len)
1278{
1279 struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
1280 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1281 struct safexcel_crypto_priv *priv = ctx->priv;
1282 struct crypto_aes_ctx aes;
1283 int ret, i;
1284 unsigned int keylen;
1285
1286
1287 ctx->nonce = *(u32 *)(key + len - CTR_RFC3686_NONCE_SIZE);
1288
1289 keylen = len - CTR_RFC3686_NONCE_SIZE;
1290 ret = aes_expandkey(&aes, key, keylen);
1291 if (ret) {
1292 crypto_skcipher_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
1293 return ret;
1294 }
1295
1296 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
1297 for (i = 0; i < keylen / sizeof(u32); i++) {
1298 if (ctx->key[i] != cpu_to_le32(aes.key_enc[i])) {
1299 ctx->base.needs_inv = true;
1300 break;
1301 }
1302 }
1303 }
1304
1305 for (i = 0; i < keylen / sizeof(u32); i++)
1306 ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
1307
1308 ctx->key_len = keylen;
1309
1310 memzero_explicit(&aes, sizeof(aes));
1311 return 0;
1312}
1313
1314static int safexcel_skcipher_aes_ctr_cra_init(struct crypto_tfm *tfm)
1315{
1316 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1317
1318 safexcel_skcipher_cra_init(tfm);
1319 ctx->alg = SAFEXCEL_AES;
1320 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
1321 return 0;
1322}
1323
1324struct safexcel_alg_template safexcel_alg_ctr_aes = {
1325 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1326 .algo_mask = SAFEXCEL_ALG_AES,
1327 .alg.skcipher = {
1328 .setkey = safexcel_skcipher_aesctr_setkey,
1329 .encrypt = safexcel_encrypt,
1330 .decrypt = safexcel_decrypt,
1331
1332 .min_keysize = AES_MIN_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
1333 .max_keysize = AES_MAX_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
1334 .ivsize = CTR_RFC3686_IV_SIZE,
1335 .base = {
1336 .cra_name = "rfc3686(ctr(aes))",
1337 .cra_driver_name = "safexcel-ctr-aes",
1338 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1339 .cra_flags = CRYPTO_ALG_ASYNC |
1340 CRYPTO_ALG_KERN_DRIVER_ONLY,
1341 .cra_blocksize = 1,
1342 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1343 .cra_alignmask = 0,
1344 .cra_init = safexcel_skcipher_aes_ctr_cra_init,
1345 .cra_exit = safexcel_skcipher_cra_exit,
1346 .cra_module = THIS_MODULE,
1347 },
1348 },
1349};
1350
1351static int safexcel_des_setkey(struct crypto_skcipher *ctfm, const u8 *key,
1352 unsigned int len)
1353{
1354 struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(ctfm);
1355 int ret;
1356
1357 ret = verify_skcipher_des_key(ctfm, key);
1358 if (ret)
1359 return ret;
1360
1361
1362 if (ctx->base.ctxr_dma)
1363 if (memcmp(ctx->key, key, len))
1364 ctx->base.needs_inv = true;
1365
1366 memcpy(ctx->key, key, len);
1367 ctx->key_len = len;
1368
1369 return 0;
1370}
1371
1372static int safexcel_skcipher_des_cbc_cra_init(struct crypto_tfm *tfm)
1373{
1374 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1375
1376 safexcel_skcipher_cra_init(tfm);
1377 ctx->alg = SAFEXCEL_DES;
1378 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
1379 return 0;
1380}
1381
1382struct safexcel_alg_template safexcel_alg_cbc_des = {
1383 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1384 .algo_mask = SAFEXCEL_ALG_DES,
1385 .alg.skcipher = {
1386 .setkey = safexcel_des_setkey,
1387 .encrypt = safexcel_encrypt,
1388 .decrypt = safexcel_decrypt,
1389 .min_keysize = DES_KEY_SIZE,
1390 .max_keysize = DES_KEY_SIZE,
1391 .ivsize = DES_BLOCK_SIZE,
1392 .base = {
1393 .cra_name = "cbc(des)",
1394 .cra_driver_name = "safexcel-cbc-des",
1395 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1396 .cra_flags = CRYPTO_ALG_ASYNC |
1397 CRYPTO_ALG_KERN_DRIVER_ONLY,
1398 .cra_blocksize = DES_BLOCK_SIZE,
1399 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1400 .cra_alignmask = 0,
1401 .cra_init = safexcel_skcipher_des_cbc_cra_init,
1402 .cra_exit = safexcel_skcipher_cra_exit,
1403 .cra_module = THIS_MODULE,
1404 },
1405 },
1406};
1407
1408static int safexcel_skcipher_des_ecb_cra_init(struct crypto_tfm *tfm)
1409{
1410 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1411
1412 safexcel_skcipher_cra_init(tfm);
1413 ctx->alg = SAFEXCEL_DES;
1414 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
1415 return 0;
1416}
1417
1418struct safexcel_alg_template safexcel_alg_ecb_des = {
1419 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1420 .algo_mask = SAFEXCEL_ALG_DES,
1421 .alg.skcipher = {
1422 .setkey = safexcel_des_setkey,
1423 .encrypt = safexcel_encrypt,
1424 .decrypt = safexcel_decrypt,
1425 .min_keysize = DES_KEY_SIZE,
1426 .max_keysize = DES_KEY_SIZE,
1427 .base = {
1428 .cra_name = "ecb(des)",
1429 .cra_driver_name = "safexcel-ecb-des",
1430 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1431 .cra_flags = CRYPTO_ALG_ASYNC |
1432 CRYPTO_ALG_KERN_DRIVER_ONLY,
1433 .cra_blocksize = DES_BLOCK_SIZE,
1434 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1435 .cra_alignmask = 0,
1436 .cra_init = safexcel_skcipher_des_ecb_cra_init,
1437 .cra_exit = safexcel_skcipher_cra_exit,
1438 .cra_module = THIS_MODULE,
1439 },
1440 },
1441};
1442
1443static int safexcel_des3_ede_setkey(struct crypto_skcipher *ctfm,
1444 const u8 *key, unsigned int len)
1445{
1446 struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(ctfm);
1447 int err;
1448
1449 err = verify_skcipher_des3_key(ctfm, key);
1450 if (err)
1451 return err;
1452
1453
1454 if (ctx->base.ctxr_dma) {
1455 if (memcmp(ctx->key, key, len))
1456 ctx->base.needs_inv = true;
1457 }
1458
1459 memcpy(ctx->key, key, len);
1460
1461 ctx->key_len = len;
1462
1463 return 0;
1464}
1465
1466static int safexcel_skcipher_des3_cbc_cra_init(struct crypto_tfm *tfm)
1467{
1468 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1469
1470 safexcel_skcipher_cra_init(tfm);
1471 ctx->alg = SAFEXCEL_3DES;
1472 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
1473 return 0;
1474}
1475
1476struct safexcel_alg_template safexcel_alg_cbc_des3_ede = {
1477 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1478 .algo_mask = SAFEXCEL_ALG_DES,
1479 .alg.skcipher = {
1480 .setkey = safexcel_des3_ede_setkey,
1481 .encrypt = safexcel_encrypt,
1482 .decrypt = safexcel_decrypt,
1483 .min_keysize = DES3_EDE_KEY_SIZE,
1484 .max_keysize = DES3_EDE_KEY_SIZE,
1485 .ivsize = DES3_EDE_BLOCK_SIZE,
1486 .base = {
1487 .cra_name = "cbc(des3_ede)",
1488 .cra_driver_name = "safexcel-cbc-des3_ede",
1489 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1490 .cra_flags = CRYPTO_ALG_ASYNC |
1491 CRYPTO_ALG_KERN_DRIVER_ONLY,
1492 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1493 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1494 .cra_alignmask = 0,
1495 .cra_init = safexcel_skcipher_des3_cbc_cra_init,
1496 .cra_exit = safexcel_skcipher_cra_exit,
1497 .cra_module = THIS_MODULE,
1498 },
1499 },
1500};
1501
1502static int safexcel_skcipher_des3_ecb_cra_init(struct crypto_tfm *tfm)
1503{
1504 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1505
1506 safexcel_skcipher_cra_init(tfm);
1507 ctx->alg = SAFEXCEL_3DES;
1508 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
1509 return 0;
1510}
1511
1512struct safexcel_alg_template safexcel_alg_ecb_des3_ede = {
1513 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1514 .algo_mask = SAFEXCEL_ALG_DES,
1515 .alg.skcipher = {
1516 .setkey = safexcel_des3_ede_setkey,
1517 .encrypt = safexcel_encrypt,
1518 .decrypt = safexcel_decrypt,
1519 .min_keysize = DES3_EDE_KEY_SIZE,
1520 .max_keysize = DES3_EDE_KEY_SIZE,
1521 .base = {
1522 .cra_name = "ecb(des3_ede)",
1523 .cra_driver_name = "safexcel-ecb-des3_ede",
1524 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1525 .cra_flags = CRYPTO_ALG_ASYNC |
1526 CRYPTO_ALG_KERN_DRIVER_ONLY,
1527 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1528 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1529 .cra_alignmask = 0,
1530 .cra_init = safexcel_skcipher_des3_ecb_cra_init,
1531 .cra_exit = safexcel_skcipher_cra_exit,
1532 .cra_module = THIS_MODULE,
1533 },
1534 },
1535};
1536
1537static int safexcel_aead_encrypt(struct aead_request *req)
1538{
1539 struct safexcel_cipher_req *creq = aead_request_ctx(req);
1540
1541 return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT);
1542}
1543
1544static int safexcel_aead_decrypt(struct aead_request *req)
1545{
1546 struct safexcel_cipher_req *creq = aead_request_ctx(req);
1547
1548 return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT);
1549}
1550
1551static int safexcel_aead_cra_init(struct crypto_tfm *tfm)
1552{
1553 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1554 struct safexcel_alg_template *tmpl =
1555 container_of(tfm->__crt_alg, struct safexcel_alg_template,
1556 alg.aead.base);
1557
1558 crypto_aead_set_reqsize(__crypto_aead_cast(tfm),
1559 sizeof(struct safexcel_cipher_req));
1560
1561 ctx->priv = tmpl->priv;
1562
1563 ctx->alg = SAFEXCEL_AES;
1564 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
1565 ctx->aead = true;
1566 ctx->base.send = safexcel_aead_send;
1567 ctx->base.handle_result = safexcel_aead_handle_result;
1568 return 0;
1569}
1570
1571static int safexcel_aead_sha1_cra_init(struct crypto_tfm *tfm)
1572{
1573 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1574
1575 safexcel_aead_cra_init(tfm);
1576 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA1;
1577 ctx->state_sz = SHA1_DIGEST_SIZE;
1578 return 0;
1579}
1580
1581struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_aes = {
1582 .type = SAFEXCEL_ALG_TYPE_AEAD,
1583 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA1,
1584 .alg.aead = {
1585 .setkey = safexcel_aead_setkey,
1586 .encrypt = safexcel_aead_encrypt,
1587 .decrypt = safexcel_aead_decrypt,
1588 .ivsize = AES_BLOCK_SIZE,
1589 .maxauthsize = SHA1_DIGEST_SIZE,
1590 .base = {
1591 .cra_name = "authenc(hmac(sha1),cbc(aes))",
1592 .cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-aes",
1593 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1594 .cra_flags = CRYPTO_ALG_ASYNC |
1595 CRYPTO_ALG_KERN_DRIVER_ONLY,
1596 .cra_blocksize = AES_BLOCK_SIZE,
1597 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1598 .cra_alignmask = 0,
1599 .cra_init = safexcel_aead_sha1_cra_init,
1600 .cra_exit = safexcel_aead_cra_exit,
1601 .cra_module = THIS_MODULE,
1602 },
1603 },
1604};
1605
1606static int safexcel_aead_sha256_cra_init(struct crypto_tfm *tfm)
1607{
1608 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1609
1610 safexcel_aead_cra_init(tfm);
1611 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA256;
1612 ctx->state_sz = SHA256_DIGEST_SIZE;
1613 return 0;
1614}
1615
1616struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_cbc_aes = {
1617 .type = SAFEXCEL_ALG_TYPE_AEAD,
1618 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_256,
1619 .alg.aead = {
1620 .setkey = safexcel_aead_setkey,
1621 .encrypt = safexcel_aead_encrypt,
1622 .decrypt = safexcel_aead_decrypt,
1623 .ivsize = AES_BLOCK_SIZE,
1624 .maxauthsize = SHA256_DIGEST_SIZE,
1625 .base = {
1626 .cra_name = "authenc(hmac(sha256),cbc(aes))",
1627 .cra_driver_name = "safexcel-authenc-hmac-sha256-cbc-aes",
1628 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1629 .cra_flags = CRYPTO_ALG_ASYNC |
1630 CRYPTO_ALG_KERN_DRIVER_ONLY,
1631 .cra_blocksize = AES_BLOCK_SIZE,
1632 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1633 .cra_alignmask = 0,
1634 .cra_init = safexcel_aead_sha256_cra_init,
1635 .cra_exit = safexcel_aead_cra_exit,
1636 .cra_module = THIS_MODULE,
1637 },
1638 },
1639};
1640
1641static int safexcel_aead_sha224_cra_init(struct crypto_tfm *tfm)
1642{
1643 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1644
1645 safexcel_aead_cra_init(tfm);
1646 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA224;
1647 ctx->state_sz = SHA256_DIGEST_SIZE;
1648 return 0;
1649}
1650
1651struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_cbc_aes = {
1652 .type = SAFEXCEL_ALG_TYPE_AEAD,
1653 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_256,
1654 .alg.aead = {
1655 .setkey = safexcel_aead_setkey,
1656 .encrypt = safexcel_aead_encrypt,
1657 .decrypt = safexcel_aead_decrypt,
1658 .ivsize = AES_BLOCK_SIZE,
1659 .maxauthsize = SHA224_DIGEST_SIZE,
1660 .base = {
1661 .cra_name = "authenc(hmac(sha224),cbc(aes))",
1662 .cra_driver_name = "safexcel-authenc-hmac-sha224-cbc-aes",
1663 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1664 .cra_flags = CRYPTO_ALG_ASYNC |
1665 CRYPTO_ALG_KERN_DRIVER_ONLY,
1666 .cra_blocksize = AES_BLOCK_SIZE,
1667 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1668 .cra_alignmask = 0,
1669 .cra_init = safexcel_aead_sha224_cra_init,
1670 .cra_exit = safexcel_aead_cra_exit,
1671 .cra_module = THIS_MODULE,
1672 },
1673 },
1674};
1675
1676static int safexcel_aead_sha512_cra_init(struct crypto_tfm *tfm)
1677{
1678 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1679
1680 safexcel_aead_cra_init(tfm);
1681 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA512;
1682 ctx->state_sz = SHA512_DIGEST_SIZE;
1683 return 0;
1684}
1685
1686struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_cbc_aes = {
1687 .type = SAFEXCEL_ALG_TYPE_AEAD,
1688 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_512,
1689 .alg.aead = {
1690 .setkey = safexcel_aead_setkey,
1691 .encrypt = safexcel_aead_encrypt,
1692 .decrypt = safexcel_aead_decrypt,
1693 .ivsize = AES_BLOCK_SIZE,
1694 .maxauthsize = SHA512_DIGEST_SIZE,
1695 .base = {
1696 .cra_name = "authenc(hmac(sha512),cbc(aes))",
1697 .cra_driver_name = "safexcel-authenc-hmac-sha512-cbc-aes",
1698 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1699 .cra_flags = CRYPTO_ALG_ASYNC |
1700 CRYPTO_ALG_KERN_DRIVER_ONLY,
1701 .cra_blocksize = AES_BLOCK_SIZE,
1702 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1703 .cra_alignmask = 0,
1704 .cra_init = safexcel_aead_sha512_cra_init,
1705 .cra_exit = safexcel_aead_cra_exit,
1706 .cra_module = THIS_MODULE,
1707 },
1708 },
1709};
1710
1711static int safexcel_aead_sha384_cra_init(struct crypto_tfm *tfm)
1712{
1713 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1714
1715 safexcel_aead_cra_init(tfm);
1716 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA384;
1717 ctx->state_sz = SHA512_DIGEST_SIZE;
1718 return 0;
1719}
1720
1721struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_cbc_aes = {
1722 .type = SAFEXCEL_ALG_TYPE_AEAD,
1723 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_512,
1724 .alg.aead = {
1725 .setkey = safexcel_aead_setkey,
1726 .encrypt = safexcel_aead_encrypt,
1727 .decrypt = safexcel_aead_decrypt,
1728 .ivsize = AES_BLOCK_SIZE,
1729 .maxauthsize = SHA384_DIGEST_SIZE,
1730 .base = {
1731 .cra_name = "authenc(hmac(sha384),cbc(aes))",
1732 .cra_driver_name = "safexcel-authenc-hmac-sha384-cbc-aes",
1733 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1734 .cra_flags = CRYPTO_ALG_ASYNC |
1735 CRYPTO_ALG_KERN_DRIVER_ONLY,
1736 .cra_blocksize = AES_BLOCK_SIZE,
1737 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1738 .cra_alignmask = 0,
1739 .cra_init = safexcel_aead_sha384_cra_init,
1740 .cra_exit = safexcel_aead_cra_exit,
1741 .cra_module = THIS_MODULE,
1742 },
1743 },
1744};
1745
1746static int safexcel_aead_sha1_des3_cra_init(struct crypto_tfm *tfm)
1747{
1748 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1749
1750 safexcel_aead_sha1_cra_init(tfm);
1751 ctx->alg = SAFEXCEL_3DES;
1752 return 0;
1753}
1754
1755struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_des3_ede = {
1756 .type = SAFEXCEL_ALG_TYPE_AEAD,
1757 .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA1,
1758 .alg.aead = {
1759 .setkey = safexcel_aead_setkey,
1760 .encrypt = safexcel_aead_encrypt,
1761 .decrypt = safexcel_aead_decrypt,
1762 .ivsize = DES3_EDE_BLOCK_SIZE,
1763 .maxauthsize = SHA1_DIGEST_SIZE,
1764 .base = {
1765 .cra_name = "authenc(hmac(sha1),cbc(des3_ede))",
1766 .cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-des3_ede",
1767 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1768 .cra_flags = CRYPTO_ALG_ASYNC |
1769 CRYPTO_ALG_KERN_DRIVER_ONLY,
1770 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1771 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1772 .cra_alignmask = 0,
1773 .cra_init = safexcel_aead_sha1_des3_cra_init,
1774 .cra_exit = safexcel_aead_cra_exit,
1775 .cra_module = THIS_MODULE,
1776 },
1777 },
1778};
1779
1780static int safexcel_aead_sha1_ctr_cra_init(struct crypto_tfm *tfm)
1781{
1782 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1783
1784 safexcel_aead_sha1_cra_init(tfm);
1785 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
1786 return 0;
1787}
1788
1789struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_ctr_aes = {
1790 .type = SAFEXCEL_ALG_TYPE_AEAD,
1791 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA1,
1792 .alg.aead = {
1793 .setkey = safexcel_aead_setkey,
1794 .encrypt = safexcel_aead_encrypt,
1795 .decrypt = safexcel_aead_decrypt,
1796 .ivsize = CTR_RFC3686_IV_SIZE,
1797 .maxauthsize = SHA1_DIGEST_SIZE,
1798 .base = {
1799 .cra_name = "authenc(hmac(sha1),rfc3686(ctr(aes)))",
1800 .cra_driver_name = "safexcel-authenc-hmac-sha1-ctr-aes",
1801 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1802 .cra_flags = CRYPTO_ALG_ASYNC |
1803 CRYPTO_ALG_KERN_DRIVER_ONLY,
1804 .cra_blocksize = 1,
1805 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1806 .cra_alignmask = 0,
1807 .cra_init = safexcel_aead_sha1_ctr_cra_init,
1808 .cra_exit = safexcel_aead_cra_exit,
1809 .cra_module = THIS_MODULE,
1810 },
1811 },
1812};
1813
1814static int safexcel_aead_sha256_ctr_cra_init(struct crypto_tfm *tfm)
1815{
1816 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1817
1818 safexcel_aead_sha256_cra_init(tfm);
1819 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
1820 return 0;
1821}
1822
1823struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_ctr_aes = {
1824 .type = SAFEXCEL_ALG_TYPE_AEAD,
1825 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_256,
1826 .alg.aead = {
1827 .setkey = safexcel_aead_setkey,
1828 .encrypt = safexcel_aead_encrypt,
1829 .decrypt = safexcel_aead_decrypt,
1830 .ivsize = CTR_RFC3686_IV_SIZE,
1831 .maxauthsize = SHA256_DIGEST_SIZE,
1832 .base = {
1833 .cra_name = "authenc(hmac(sha256),rfc3686(ctr(aes)))",
1834 .cra_driver_name = "safexcel-authenc-hmac-sha256-ctr-aes",
1835 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1836 .cra_flags = CRYPTO_ALG_ASYNC |
1837 CRYPTO_ALG_KERN_DRIVER_ONLY,
1838 .cra_blocksize = 1,
1839 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1840 .cra_alignmask = 0,
1841 .cra_init = safexcel_aead_sha256_ctr_cra_init,
1842 .cra_exit = safexcel_aead_cra_exit,
1843 .cra_module = THIS_MODULE,
1844 },
1845 },
1846};
1847
1848static int safexcel_aead_sha224_ctr_cra_init(struct crypto_tfm *tfm)
1849{
1850 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1851
1852 safexcel_aead_sha224_cra_init(tfm);
1853 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
1854 return 0;
1855}
1856
1857struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_ctr_aes = {
1858 .type = SAFEXCEL_ALG_TYPE_AEAD,
1859 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_256,
1860 .alg.aead = {
1861 .setkey = safexcel_aead_setkey,
1862 .encrypt = safexcel_aead_encrypt,
1863 .decrypt = safexcel_aead_decrypt,
1864 .ivsize = CTR_RFC3686_IV_SIZE,
1865 .maxauthsize = SHA224_DIGEST_SIZE,
1866 .base = {
1867 .cra_name = "authenc(hmac(sha224),rfc3686(ctr(aes)))",
1868 .cra_driver_name = "safexcel-authenc-hmac-sha224-ctr-aes",
1869 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1870 .cra_flags = CRYPTO_ALG_ASYNC |
1871 CRYPTO_ALG_KERN_DRIVER_ONLY,
1872 .cra_blocksize = 1,
1873 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1874 .cra_alignmask = 0,
1875 .cra_init = safexcel_aead_sha224_ctr_cra_init,
1876 .cra_exit = safexcel_aead_cra_exit,
1877 .cra_module = THIS_MODULE,
1878 },
1879 },
1880};
1881
1882static int safexcel_aead_sha512_ctr_cra_init(struct crypto_tfm *tfm)
1883{
1884 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1885
1886 safexcel_aead_sha512_cra_init(tfm);
1887 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
1888 return 0;
1889}
1890
1891struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_ctr_aes = {
1892 .type = SAFEXCEL_ALG_TYPE_AEAD,
1893 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_512,
1894 .alg.aead = {
1895 .setkey = safexcel_aead_setkey,
1896 .encrypt = safexcel_aead_encrypt,
1897 .decrypt = safexcel_aead_decrypt,
1898 .ivsize = CTR_RFC3686_IV_SIZE,
1899 .maxauthsize = SHA512_DIGEST_SIZE,
1900 .base = {
1901 .cra_name = "authenc(hmac(sha512),rfc3686(ctr(aes)))",
1902 .cra_driver_name = "safexcel-authenc-hmac-sha512-ctr-aes",
1903 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1904 .cra_flags = CRYPTO_ALG_ASYNC |
1905 CRYPTO_ALG_KERN_DRIVER_ONLY,
1906 .cra_blocksize = 1,
1907 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1908 .cra_alignmask = 0,
1909 .cra_init = safexcel_aead_sha512_ctr_cra_init,
1910 .cra_exit = safexcel_aead_cra_exit,
1911 .cra_module = THIS_MODULE,
1912 },
1913 },
1914};
1915
1916static int safexcel_aead_sha384_ctr_cra_init(struct crypto_tfm *tfm)
1917{
1918 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1919
1920 safexcel_aead_sha384_cra_init(tfm);
1921 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
1922 return 0;
1923}
1924
1925struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_ctr_aes = {
1926 .type = SAFEXCEL_ALG_TYPE_AEAD,
1927 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_512,
1928 .alg.aead = {
1929 .setkey = safexcel_aead_setkey,
1930 .encrypt = safexcel_aead_encrypt,
1931 .decrypt = safexcel_aead_decrypt,
1932 .ivsize = CTR_RFC3686_IV_SIZE,
1933 .maxauthsize = SHA384_DIGEST_SIZE,
1934 .base = {
1935 .cra_name = "authenc(hmac(sha384),rfc3686(ctr(aes)))",
1936 .cra_driver_name = "safexcel-authenc-hmac-sha384-ctr-aes",
1937 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1938 .cra_flags = CRYPTO_ALG_ASYNC |
1939 CRYPTO_ALG_KERN_DRIVER_ONLY,
1940 .cra_blocksize = 1,
1941 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1942 .cra_alignmask = 0,
1943 .cra_init = safexcel_aead_sha384_ctr_cra_init,
1944 .cra_exit = safexcel_aead_cra_exit,
1945 .cra_module = THIS_MODULE,
1946 },
1947 },
1948};
1949
1950static int safexcel_skcipher_aesxts_setkey(struct crypto_skcipher *ctfm,
1951 const u8 *key, unsigned int len)
1952{
1953 struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
1954 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1955 struct safexcel_crypto_priv *priv = ctx->priv;
1956 struct crypto_aes_ctx aes;
1957 int ret, i;
1958 unsigned int keylen;
1959
1960
1961 ret = xts_verify_key(ctfm, key, len);
1962 if (ret)
1963 return ret;
1964
1965
1966 keylen = (len >> 1);
1967 ret = aes_expandkey(&aes, key, keylen);
1968 if (ret) {
1969 crypto_skcipher_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
1970 return ret;
1971 }
1972
1973 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
1974 for (i = 0; i < keylen / sizeof(u32); i++) {
1975 if (ctx->key[i] != cpu_to_le32(aes.key_enc[i])) {
1976 ctx->base.needs_inv = true;
1977 break;
1978 }
1979 }
1980 }
1981
1982 for (i = 0; i < keylen / sizeof(u32); i++)
1983 ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
1984
1985
1986 ret = aes_expandkey(&aes, (u8 *)(key + keylen), keylen);
1987 if (ret) {
1988 crypto_skcipher_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
1989 return ret;
1990 }
1991
1992 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
1993 for (i = 0; i < keylen / sizeof(u32); i++) {
1994 if (ctx->key[i + keylen / sizeof(u32)] !=
1995 cpu_to_le32(aes.key_enc[i])) {
1996 ctx->base.needs_inv = true;
1997 break;
1998 }
1999 }
2000 }
2001
2002 for (i = 0; i < keylen / sizeof(u32); i++)
2003 ctx->key[i + keylen / sizeof(u32)] =
2004 cpu_to_le32(aes.key_enc[i]);
2005
2006 ctx->key_len = keylen << 1;
2007
2008 memzero_explicit(&aes, sizeof(aes));
2009 return 0;
2010}
2011
2012static int safexcel_skcipher_aes_xts_cra_init(struct crypto_tfm *tfm)
2013{
2014 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2015
2016 safexcel_skcipher_cra_init(tfm);
2017 ctx->alg = SAFEXCEL_AES;
2018 ctx->xts = 1;
2019 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_XTS;
2020 return 0;
2021}
2022
2023static int safexcel_encrypt_xts(struct skcipher_request *req)
2024{
2025 if (req->cryptlen < XTS_BLOCK_SIZE)
2026 return -EINVAL;
2027 return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
2028 SAFEXCEL_ENCRYPT);
2029}
2030
2031static int safexcel_decrypt_xts(struct skcipher_request *req)
2032{
2033 if (req->cryptlen < XTS_BLOCK_SIZE)
2034 return -EINVAL;
2035 return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
2036 SAFEXCEL_DECRYPT);
2037}
2038
2039struct safexcel_alg_template safexcel_alg_xts_aes = {
2040 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
2041 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_AES_XTS,
2042 .alg.skcipher = {
2043 .setkey = safexcel_skcipher_aesxts_setkey,
2044 .encrypt = safexcel_encrypt_xts,
2045 .decrypt = safexcel_decrypt_xts,
2046
2047 .min_keysize = AES_MIN_KEY_SIZE * 2,
2048 .max_keysize = AES_MAX_KEY_SIZE * 2,
2049 .ivsize = XTS_BLOCK_SIZE,
2050 .base = {
2051 .cra_name = "xts(aes)",
2052 .cra_driver_name = "safexcel-xts-aes",
2053 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2054 .cra_flags = CRYPTO_ALG_ASYNC |
2055 CRYPTO_ALG_KERN_DRIVER_ONLY,
2056 .cra_blocksize = XTS_BLOCK_SIZE,
2057 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2058 .cra_alignmask = 0,
2059 .cra_init = safexcel_skcipher_aes_xts_cra_init,
2060 .cra_exit = safexcel_skcipher_cra_exit,
2061 .cra_module = THIS_MODULE,
2062 },
2063 },
2064};
2065
2066static int safexcel_aead_gcm_setkey(struct crypto_aead *ctfm, const u8 *key,
2067 unsigned int len)
2068{
2069 struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
2070 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2071 struct safexcel_crypto_priv *priv = ctx->priv;
2072 struct crypto_aes_ctx aes;
2073 u32 hashkey[AES_BLOCK_SIZE >> 2];
2074 int ret, i;
2075
2076 ret = aes_expandkey(&aes, key, len);
2077 if (ret) {
2078 crypto_aead_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
2079 memzero_explicit(&aes, sizeof(aes));
2080 return ret;
2081 }
2082
2083 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2084 for (i = 0; i < len / sizeof(u32); i++) {
2085 if (ctx->key[i] != cpu_to_le32(aes.key_enc[i])) {
2086 ctx->base.needs_inv = true;
2087 break;
2088 }
2089 }
2090 }
2091
2092 for (i = 0; i < len / sizeof(u32); i++)
2093 ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
2094
2095 ctx->key_len = len;
2096
2097
2098 crypto_cipher_clear_flags(ctx->hkaes, CRYPTO_TFM_REQ_MASK);
2099 crypto_cipher_set_flags(ctx->hkaes, crypto_aead_get_flags(ctfm) &
2100 CRYPTO_TFM_REQ_MASK);
2101 ret = crypto_cipher_setkey(ctx->hkaes, key, len);
2102 crypto_aead_set_flags(ctfm, crypto_cipher_get_flags(ctx->hkaes) &
2103 CRYPTO_TFM_RES_MASK);
2104 if (ret)
2105 return ret;
2106
2107 memset(hashkey, 0, AES_BLOCK_SIZE);
2108 crypto_cipher_encrypt_one(ctx->hkaes, (u8 *)hashkey, (u8 *)hashkey);
2109
2110 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2111 for (i = 0; i < AES_BLOCK_SIZE / sizeof(u32); i++) {
2112 if (ctx->ipad[i] != cpu_to_be32(hashkey[i])) {
2113 ctx->base.needs_inv = true;
2114 break;
2115 }
2116 }
2117 }
2118
2119 for (i = 0; i < AES_BLOCK_SIZE / sizeof(u32); i++)
2120 ctx->ipad[i] = cpu_to_be32(hashkey[i]);
2121
2122 memzero_explicit(hashkey, AES_BLOCK_SIZE);
2123 memzero_explicit(&aes, sizeof(aes));
2124 return 0;
2125}
2126
2127static int safexcel_aead_gcm_cra_init(struct crypto_tfm *tfm)
2128{
2129 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2130
2131 safexcel_aead_cra_init(tfm);
2132 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_GHASH;
2133 ctx->state_sz = GHASH_BLOCK_SIZE;
2134 ctx->xcm = EIP197_XCM_MODE_GCM;
2135 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_XCM;
2136
2137 ctx->hkaes = crypto_alloc_cipher("aes", 0, 0);
2138 if (IS_ERR(ctx->hkaes))
2139 return PTR_ERR(ctx->hkaes);
2140
2141 return 0;
2142}
2143
2144static void safexcel_aead_gcm_cra_exit(struct crypto_tfm *tfm)
2145{
2146 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2147
2148 crypto_free_cipher(ctx->hkaes);
2149 safexcel_aead_cra_exit(tfm);
2150}
2151
2152static int safexcel_aead_gcm_setauthsize(struct crypto_aead *tfm,
2153 unsigned int authsize)
2154{
2155 return crypto_gcm_check_authsize(authsize);
2156}
2157
2158struct safexcel_alg_template safexcel_alg_gcm = {
2159 .type = SAFEXCEL_ALG_TYPE_AEAD,
2160 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_GHASH,
2161 .alg.aead = {
2162 .setkey = safexcel_aead_gcm_setkey,
2163 .setauthsize = safexcel_aead_gcm_setauthsize,
2164 .encrypt = safexcel_aead_encrypt,
2165 .decrypt = safexcel_aead_decrypt,
2166 .ivsize = GCM_AES_IV_SIZE,
2167 .maxauthsize = GHASH_DIGEST_SIZE,
2168 .base = {
2169 .cra_name = "gcm(aes)",
2170 .cra_driver_name = "safexcel-gcm-aes",
2171 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2172 .cra_flags = CRYPTO_ALG_ASYNC |
2173 CRYPTO_ALG_KERN_DRIVER_ONLY,
2174 .cra_blocksize = 1,
2175 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2176 .cra_alignmask = 0,
2177 .cra_init = safexcel_aead_gcm_cra_init,
2178 .cra_exit = safexcel_aead_gcm_cra_exit,
2179 .cra_module = THIS_MODULE,
2180 },
2181 },
2182};
2183
2184static int safexcel_aead_ccm_setkey(struct crypto_aead *ctfm, const u8 *key,
2185 unsigned int len)
2186{
2187 struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
2188 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2189 struct safexcel_crypto_priv *priv = ctx->priv;
2190 struct crypto_aes_ctx aes;
2191 int ret, i;
2192
2193 ret = aes_expandkey(&aes, key, len);
2194 if (ret) {
2195 crypto_aead_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
2196 memzero_explicit(&aes, sizeof(aes));
2197 return ret;
2198 }
2199
2200 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2201 for (i = 0; i < len / sizeof(u32); i++) {
2202 if (ctx->key[i] != cpu_to_le32(aes.key_enc[i])) {
2203 ctx->base.needs_inv = true;
2204 break;
2205 }
2206 }
2207 }
2208
2209 for (i = 0; i < len / sizeof(u32); i++) {
2210 ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
2211 ctx->ipad[i + 2 * AES_BLOCK_SIZE / sizeof(u32)] =
2212 cpu_to_be32(aes.key_enc[i]);
2213 }
2214
2215 ctx->key_len = len;
2216 ctx->state_sz = 2 * AES_BLOCK_SIZE + len;
2217
2218 if (len == AES_KEYSIZE_192)
2219 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC192;
2220 else if (len == AES_KEYSIZE_256)
2221 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC256;
2222 else
2223 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128;
2224
2225 memzero_explicit(&aes, sizeof(aes));
2226 return 0;
2227}
2228
2229static int safexcel_aead_ccm_cra_init(struct crypto_tfm *tfm)
2230{
2231 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2232
2233 safexcel_aead_cra_init(tfm);
2234 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128;
2235 ctx->state_sz = 3 * AES_BLOCK_SIZE;
2236 ctx->xcm = EIP197_XCM_MODE_CCM;
2237 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_XCM;
2238 return 0;
2239}
2240
2241static int safexcel_aead_ccm_setauthsize(struct crypto_aead *tfm,
2242 unsigned int authsize)
2243{
2244
2245 switch (authsize) {
2246 case 4:
2247 case 6:
2248 case 8:
2249 case 10:
2250 case 12:
2251 case 14:
2252 case 16:
2253 break;
2254 default:
2255 return -EINVAL;
2256 }
2257
2258 return 0;
2259}
2260
2261static int safexcel_ccm_encrypt(struct aead_request *req)
2262{
2263 struct safexcel_cipher_req *creq = aead_request_ctx(req);
2264
2265 if (req->iv[0] < 1 || req->iv[0] > 7)
2266 return -EINVAL;
2267
2268 return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT);
2269}
2270
2271static int safexcel_ccm_decrypt(struct aead_request *req)
2272{
2273 struct safexcel_cipher_req *creq = aead_request_ctx(req);
2274
2275 if (req->iv[0] < 1 || req->iv[0] > 7)
2276 return -EINVAL;
2277
2278 return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT);
2279}
2280
2281struct safexcel_alg_template safexcel_alg_ccm = {
2282 .type = SAFEXCEL_ALG_TYPE_AEAD,
2283 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_CBC_MAC_ALL,
2284 .alg.aead = {
2285 .setkey = safexcel_aead_ccm_setkey,
2286 .setauthsize = safexcel_aead_ccm_setauthsize,
2287 .encrypt = safexcel_ccm_encrypt,
2288 .decrypt = safexcel_ccm_decrypt,
2289 .ivsize = AES_BLOCK_SIZE,
2290 .maxauthsize = AES_BLOCK_SIZE,
2291 .base = {
2292 .cra_name = "ccm(aes)",
2293 .cra_driver_name = "safexcel-ccm-aes",
2294 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2295 .cra_flags = CRYPTO_ALG_ASYNC |
2296 CRYPTO_ALG_KERN_DRIVER_ONLY,
2297 .cra_blocksize = 1,
2298 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2299 .cra_alignmask = 0,
2300 .cra_init = safexcel_aead_ccm_cra_init,
2301 .cra_exit = safexcel_aead_cra_exit,
2302 .cra_module = THIS_MODULE,
2303 },
2304 },
2305};
2306