1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42#define pr_fmt(fmt) "chcr:" fmt
43
44#include <linux/kernel.h>
45#include <linux/module.h>
46#include <linux/crypto.h>
47#include <linux/cryptohash.h>
48#include <linux/skbuff.h>
49#include <linux/rtnetlink.h>
50#include <linux/highmem.h>
51#include <linux/scatterlist.h>
52
53#include <crypto/aes.h>
54#include <crypto/algapi.h>
55#include <crypto/hash.h>
56#include <crypto/gcm.h>
57#include <crypto/sha.h>
58#include <crypto/authenc.h>
59#include <crypto/ctr.h>
60#include <crypto/gf128mul.h>
61#include <crypto/internal/aead.h>
62#include <crypto/null.h>
63#include <crypto/internal/skcipher.h>
64#include <crypto/aead.h>
65#include <crypto/scatterwalk.h>
66#include <crypto/internal/hash.h>
67
68#include "t4fw_api.h"
69#include "t4_msg.h"
70#include "chcr_core.h"
71#include "chcr_algo.h"
72#include "chcr_crypto.h"
73
74#define IV AES_BLOCK_SIZE
75
76static unsigned int sgl_ent_len[] = {
77 0, 0, 16, 24, 40, 48, 64, 72, 88,
78 96, 112, 120, 136, 144, 160, 168, 184,
79 192, 208, 216, 232, 240, 256, 264, 280,
80 288, 304, 312, 328, 336, 352, 360, 376
81};
82
83static unsigned int dsgl_ent_len[] = {
84 0, 32, 32, 48, 48, 64, 64, 80, 80,
85 112, 112, 128, 128, 144, 144, 160, 160,
86 192, 192, 208, 208, 224, 224, 240, 240,
87 272, 272, 288, 288, 304, 304, 320, 320
88};
89
90static u32 round_constant[11] = {
91 0x01000000, 0x02000000, 0x04000000, 0x08000000,
92 0x10000000, 0x20000000, 0x40000000, 0x80000000,
93 0x1B000000, 0x36000000, 0x6C000000
94};
95
96static int chcr_handle_cipher_resp(struct ablkcipher_request *req,
97 unsigned char *input, int err);
98
99static inline struct chcr_aead_ctx *AEAD_CTX(struct chcr_context *ctx)
100{
101 return ctx->crypto_ctx->aeadctx;
102}
103
104static inline struct ablk_ctx *ABLK_CTX(struct chcr_context *ctx)
105{
106 return ctx->crypto_ctx->ablkctx;
107}
108
109static inline struct hmac_ctx *HMAC_CTX(struct chcr_context *ctx)
110{
111 return ctx->crypto_ctx->hmacctx;
112}
113
114static inline struct chcr_gcm_ctx *GCM_CTX(struct chcr_aead_ctx *gctx)
115{
116 return gctx->ctx->gcm;
117}
118
119static inline struct chcr_authenc_ctx *AUTHENC_CTX(struct chcr_aead_ctx *gctx)
120{
121 return gctx->ctx->authenc;
122}
123
124static inline struct uld_ctx *ULD_CTX(struct chcr_context *ctx)
125{
126 return ctx->dev->u_ctx;
127}
128
129static inline int is_ofld_imm(const struct sk_buff *skb)
130{
131 return (skb->len <= SGE_MAX_WR_LEN);
132}
133
134static inline void chcr_init_hctx_per_wr(struct chcr_ahash_req_ctx *reqctx)
135{
136 memset(&reqctx->hctx_wr, 0, sizeof(struct chcr_hctx_per_wr));
137}
138
139static int sg_nents_xlen(struct scatterlist *sg, unsigned int reqlen,
140 unsigned int entlen,
141 unsigned int skip)
142{
143 int nents = 0;
144 unsigned int less;
145 unsigned int skip_len = 0;
146
147 while (sg && skip) {
148 if (sg_dma_len(sg) <= skip) {
149 skip -= sg_dma_len(sg);
150 skip_len = 0;
151 sg = sg_next(sg);
152 } else {
153 skip_len = skip;
154 skip = 0;
155 }
156 }
157
158 while (sg && reqlen) {
159 less = min(reqlen, sg_dma_len(sg) - skip_len);
160 nents += DIV_ROUND_UP(less, entlen);
161 reqlen -= less;
162 skip_len = 0;
163 sg = sg_next(sg);
164 }
165 return nents;
166}
167
168static inline int get_aead_subtype(struct crypto_aead *aead)
169{
170 struct aead_alg *alg = crypto_aead_alg(aead);
171 struct chcr_alg_template *chcr_crypto_alg =
172 container_of(alg, struct chcr_alg_template, alg.aead);
173 return chcr_crypto_alg->type & CRYPTO_ALG_SUB_TYPE_MASK;
174}
175
176void chcr_verify_tag(struct aead_request *req, u8 *input, int *err)
177{
178 u8 temp[SHA512_DIGEST_SIZE];
179 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
180 int authsize = crypto_aead_authsize(tfm);
181 struct cpl_fw6_pld *fw6_pld;
182 int cmp = 0;
183
184 fw6_pld = (struct cpl_fw6_pld *)input;
185 if ((get_aead_subtype(tfm) == CRYPTO_ALG_SUB_TYPE_AEAD_RFC4106) ||
186 (get_aead_subtype(tfm) == CRYPTO_ALG_SUB_TYPE_AEAD_GCM)) {
187 cmp = crypto_memneq(&fw6_pld->data[2], (fw6_pld + 1), authsize);
188 } else {
189
190 sg_pcopy_to_buffer(req->src, sg_nents(req->src), temp,
191 authsize, req->assoclen +
192 req->cryptlen - authsize);
193 cmp = crypto_memneq(temp, (fw6_pld + 1), authsize);
194 }
195 if (cmp)
196 *err = -EBADMSG;
197 else
198 *err = 0;
199}
200
201static inline void chcr_handle_aead_resp(struct aead_request *req,
202 unsigned char *input,
203 int err)
204{
205 struct chcr_aead_reqctx *reqctx = aead_request_ctx(req);
206
207 chcr_aead_common_exit(req);
208 if (reqctx->verify == VERIFY_SW) {
209 chcr_verify_tag(req, input, &err);
210 reqctx->verify = VERIFY_HW;
211 }
212 req->base.complete(&req->base, err);
213}
214
215static void get_aes_decrypt_key(unsigned char *dec_key,
216 const unsigned char *key,
217 unsigned int keylength)
218{
219 u32 temp;
220 u32 w_ring[MAX_NK];
221 int i, j, k;
222 u8 nr, nk;
223
224 switch (keylength) {
225 case AES_KEYLENGTH_128BIT:
226 nk = KEYLENGTH_4BYTES;
227 nr = NUMBER_OF_ROUNDS_10;
228 break;
229 case AES_KEYLENGTH_192BIT:
230 nk = KEYLENGTH_6BYTES;
231 nr = NUMBER_OF_ROUNDS_12;
232 break;
233 case AES_KEYLENGTH_256BIT:
234 nk = KEYLENGTH_8BYTES;
235 nr = NUMBER_OF_ROUNDS_14;
236 break;
237 default:
238 return;
239 }
240 for (i = 0; i < nk; i++)
241 w_ring[i] = be32_to_cpu(*(u32 *)&key[4 * i]);
242
243 i = 0;
244 temp = w_ring[nk - 1];
245 while (i + nk < (nr + 1) * 4) {
246 if (!(i % nk)) {
247
248 temp = (temp << 8) | (temp >> 24);
249 temp = aes_ks_subword(temp);
250 temp ^= round_constant[i / nk];
251 } else if (nk == 8 && (i % 4 == 0)) {
252 temp = aes_ks_subword(temp);
253 }
254 w_ring[i % nk] ^= temp;
255 temp = w_ring[i % nk];
256 i++;
257 }
258 i--;
259 for (k = 0, j = i % nk; k < nk; k++) {
260 *((u32 *)dec_key + k) = htonl(w_ring[j]);
261 j--;
262 if (j < 0)
263 j += nk;
264 }
265}
266
267static struct crypto_shash *chcr_alloc_shash(unsigned int ds)
268{
269 struct crypto_shash *base_hash = ERR_PTR(-EINVAL);
270
271 switch (ds) {
272 case SHA1_DIGEST_SIZE:
273 base_hash = crypto_alloc_shash("sha1", 0, 0);
274 break;
275 case SHA224_DIGEST_SIZE:
276 base_hash = crypto_alloc_shash("sha224", 0, 0);
277 break;
278 case SHA256_DIGEST_SIZE:
279 base_hash = crypto_alloc_shash("sha256", 0, 0);
280 break;
281 case SHA384_DIGEST_SIZE:
282 base_hash = crypto_alloc_shash("sha384", 0, 0);
283 break;
284 case SHA512_DIGEST_SIZE:
285 base_hash = crypto_alloc_shash("sha512", 0, 0);
286 break;
287 }
288
289 return base_hash;
290}
291
292static int chcr_compute_partial_hash(struct shash_desc *desc,
293 char *iopad, char *result_hash,
294 int digest_size)
295{
296 struct sha1_state sha1_st;
297 struct sha256_state sha256_st;
298 struct sha512_state sha512_st;
299 int error;
300
301 if (digest_size == SHA1_DIGEST_SIZE) {
302 error = crypto_shash_init(desc) ?:
303 crypto_shash_update(desc, iopad, SHA1_BLOCK_SIZE) ?:
304 crypto_shash_export(desc, (void *)&sha1_st);
305 memcpy(result_hash, sha1_st.state, SHA1_DIGEST_SIZE);
306 } else if (digest_size == SHA224_DIGEST_SIZE) {
307 error = crypto_shash_init(desc) ?:
308 crypto_shash_update(desc, iopad, SHA256_BLOCK_SIZE) ?:
309 crypto_shash_export(desc, (void *)&sha256_st);
310 memcpy(result_hash, sha256_st.state, SHA256_DIGEST_SIZE);
311
312 } else if (digest_size == SHA256_DIGEST_SIZE) {
313 error = crypto_shash_init(desc) ?:
314 crypto_shash_update(desc, iopad, SHA256_BLOCK_SIZE) ?:
315 crypto_shash_export(desc, (void *)&sha256_st);
316 memcpy(result_hash, sha256_st.state, SHA256_DIGEST_SIZE);
317
318 } else if (digest_size == SHA384_DIGEST_SIZE) {
319 error = crypto_shash_init(desc) ?:
320 crypto_shash_update(desc, iopad, SHA512_BLOCK_SIZE) ?:
321 crypto_shash_export(desc, (void *)&sha512_st);
322 memcpy(result_hash, sha512_st.state, SHA512_DIGEST_SIZE);
323
324 } else if (digest_size == SHA512_DIGEST_SIZE) {
325 error = crypto_shash_init(desc) ?:
326 crypto_shash_update(desc, iopad, SHA512_BLOCK_SIZE) ?:
327 crypto_shash_export(desc, (void *)&sha512_st);
328 memcpy(result_hash, sha512_st.state, SHA512_DIGEST_SIZE);
329 } else {
330 error = -EINVAL;
331 pr_err("Unknown digest size %d\n", digest_size);
332 }
333 return error;
334}
335
336static void chcr_change_order(char *buf, int ds)
337{
338 int i;
339
340 if (ds == SHA512_DIGEST_SIZE) {
341 for (i = 0; i < (ds / sizeof(u64)); i++)
342 *((__be64 *)buf + i) =
343 cpu_to_be64(*((u64 *)buf + i));
344 } else {
345 for (i = 0; i < (ds / sizeof(u32)); i++)
346 *((__be32 *)buf + i) =
347 cpu_to_be32(*((u32 *)buf + i));
348 }
349}
350
351static inline int is_hmac(struct crypto_tfm *tfm)
352{
353 struct crypto_alg *alg = tfm->__crt_alg;
354 struct chcr_alg_template *chcr_crypto_alg =
355 container_of(__crypto_ahash_alg(alg), struct chcr_alg_template,
356 alg.hash);
357 if (chcr_crypto_alg->type == CRYPTO_ALG_TYPE_HMAC)
358 return 1;
359 return 0;
360}
361
362static inline void dsgl_walk_init(struct dsgl_walk *walk,
363 struct cpl_rx_phys_dsgl *dsgl)
364{
365 walk->dsgl = dsgl;
366 walk->nents = 0;
367 walk->to = (struct phys_sge_pairs *)(dsgl + 1);
368}
369
370static inline void dsgl_walk_end(struct dsgl_walk *walk, unsigned short qid,
371 int pci_chan_id)
372{
373 struct cpl_rx_phys_dsgl *phys_cpl;
374
375 phys_cpl = walk->dsgl;
376
377 phys_cpl->op_to_tid = htonl(CPL_RX_PHYS_DSGL_OPCODE_V(CPL_RX_PHYS_DSGL)
378 | CPL_RX_PHYS_DSGL_ISRDMA_V(0));
379 phys_cpl->pcirlxorder_to_noofsgentr =
380 htonl(CPL_RX_PHYS_DSGL_PCIRLXORDER_V(0) |
381 CPL_RX_PHYS_DSGL_PCINOSNOOP_V(0) |
382 CPL_RX_PHYS_DSGL_PCITPHNTENB_V(0) |
383 CPL_RX_PHYS_DSGL_PCITPHNT_V(0) |
384 CPL_RX_PHYS_DSGL_DCAID_V(0) |
385 CPL_RX_PHYS_DSGL_NOOFSGENTR_V(walk->nents));
386 phys_cpl->rss_hdr_int.opcode = CPL_RX_PHYS_ADDR;
387 phys_cpl->rss_hdr_int.qid = htons(qid);
388 phys_cpl->rss_hdr_int.hash_val = 0;
389 phys_cpl->rss_hdr_int.channel = pci_chan_id;
390}
391
392static inline void dsgl_walk_add_page(struct dsgl_walk *walk,
393 size_t size,
394 dma_addr_t *addr)
395{
396 int j;
397
398 if (!size)
399 return;
400 j = walk->nents;
401 walk->to->len[j % 8] = htons(size);
402 walk->to->addr[j % 8] = cpu_to_be64(*addr);
403 j++;
404 if ((j % 8) == 0)
405 walk->to++;
406 walk->nents = j;
407}
408
409static void dsgl_walk_add_sg(struct dsgl_walk *walk,
410 struct scatterlist *sg,
411 unsigned int slen,
412 unsigned int skip)
413{
414 int skip_len = 0;
415 unsigned int left_size = slen, len = 0;
416 unsigned int j = walk->nents;
417 int offset, ent_len;
418
419 if (!slen)
420 return;
421 while (sg && skip) {
422 if (sg_dma_len(sg) <= skip) {
423 skip -= sg_dma_len(sg);
424 skip_len = 0;
425 sg = sg_next(sg);
426 } else {
427 skip_len = skip;
428 skip = 0;
429 }
430 }
431
432 while (left_size && sg) {
433 len = min_t(u32, left_size, sg_dma_len(sg) - skip_len);
434 offset = 0;
435 while (len) {
436 ent_len = min_t(u32, len, CHCR_DST_SG_SIZE);
437 walk->to->len[j % 8] = htons(ent_len);
438 walk->to->addr[j % 8] = cpu_to_be64(sg_dma_address(sg) +
439 offset + skip_len);
440 offset += ent_len;
441 len -= ent_len;
442 j++;
443 if ((j % 8) == 0)
444 walk->to++;
445 }
446 walk->last_sg = sg;
447 walk->last_sg_len = min_t(u32, left_size, sg_dma_len(sg) -
448 skip_len) + skip_len;
449 left_size -= min_t(u32, left_size, sg_dma_len(sg) - skip_len);
450 skip_len = 0;
451 sg = sg_next(sg);
452 }
453 walk->nents = j;
454}
455
456static inline void ulptx_walk_init(struct ulptx_walk *walk,
457 struct ulptx_sgl *ulp)
458{
459 walk->sgl = ulp;
460 walk->nents = 0;
461 walk->pair_idx = 0;
462 walk->pair = ulp->sge;
463 walk->last_sg = NULL;
464 walk->last_sg_len = 0;
465}
466
467static inline void ulptx_walk_end(struct ulptx_walk *walk)
468{
469 walk->sgl->cmd_nsge = htonl(ULPTX_CMD_V(ULP_TX_SC_DSGL) |
470 ULPTX_NSGE_V(walk->nents));
471}
472
473
474static inline void ulptx_walk_add_page(struct ulptx_walk *walk,
475 size_t size,
476 dma_addr_t *addr)
477{
478 if (!size)
479 return;
480
481 if (walk->nents == 0) {
482 walk->sgl->len0 = cpu_to_be32(size);
483 walk->sgl->addr0 = cpu_to_be64(*addr);
484 } else {
485 walk->pair->addr[walk->pair_idx] = cpu_to_be64(*addr);
486 walk->pair->len[walk->pair_idx] = cpu_to_be32(size);
487 walk->pair_idx = !walk->pair_idx;
488 if (!walk->pair_idx)
489 walk->pair++;
490 }
491 walk->nents++;
492}
493
494static void ulptx_walk_add_sg(struct ulptx_walk *walk,
495 struct scatterlist *sg,
496 unsigned int len,
497 unsigned int skip)
498{
499 int small;
500 int skip_len = 0;
501 unsigned int sgmin;
502
503 if (!len)
504 return;
505 while (sg && skip) {
506 if (sg_dma_len(sg) <= skip) {
507 skip -= sg_dma_len(sg);
508 skip_len = 0;
509 sg = sg_next(sg);
510 } else {
511 skip_len = skip;
512 skip = 0;
513 }
514 }
515 WARN(!sg, "SG should not be null here\n");
516 if (sg && (walk->nents == 0)) {
517 small = min_t(unsigned int, sg_dma_len(sg) - skip_len, len);
518 sgmin = min_t(unsigned int, small, CHCR_SRC_SG_SIZE);
519 walk->sgl->len0 = cpu_to_be32(sgmin);
520 walk->sgl->addr0 = cpu_to_be64(sg_dma_address(sg) + skip_len);
521 walk->nents++;
522 len -= sgmin;
523 walk->last_sg = sg;
524 walk->last_sg_len = sgmin + skip_len;
525 skip_len += sgmin;
526 if (sg_dma_len(sg) == skip_len) {
527 sg = sg_next(sg);
528 skip_len = 0;
529 }
530 }
531
532 while (sg && len) {
533 small = min(sg_dma_len(sg) - skip_len, len);
534 sgmin = min_t(unsigned int, small, CHCR_SRC_SG_SIZE);
535 walk->pair->len[walk->pair_idx] = cpu_to_be32(sgmin);
536 walk->pair->addr[walk->pair_idx] =
537 cpu_to_be64(sg_dma_address(sg) + skip_len);
538 walk->pair_idx = !walk->pair_idx;
539 walk->nents++;
540 if (!walk->pair_idx)
541 walk->pair++;
542 len -= sgmin;
543 skip_len += sgmin;
544 walk->last_sg = sg;
545 walk->last_sg_len = skip_len;
546 if (sg_dma_len(sg) == skip_len) {
547 sg = sg_next(sg);
548 skip_len = 0;
549 }
550 }
551}
552
553static inline int get_cryptoalg_subtype(struct crypto_tfm *tfm)
554{
555 struct crypto_alg *alg = tfm->__crt_alg;
556 struct chcr_alg_template *chcr_crypto_alg =
557 container_of(alg, struct chcr_alg_template, alg.crypto);
558
559 return chcr_crypto_alg->type & CRYPTO_ALG_SUB_TYPE_MASK;
560}
561
562static int cxgb4_is_crypto_q_full(struct net_device *dev, unsigned int idx)
563{
564 struct adapter *adap = netdev2adap(dev);
565 struct sge_uld_txq_info *txq_info =
566 adap->sge.uld_txq_info[CXGB4_TX_CRYPTO];
567 struct sge_uld_txq *txq;
568 int ret = 0;
569
570 local_bh_disable();
571 txq = &txq_info->uldtxq[idx];
572 spin_lock(&txq->sendq.lock);
573 if (txq->full)
574 ret = -1;
575 spin_unlock(&txq->sendq.lock);
576 local_bh_enable();
577 return ret;
578}
579
580static int generate_copy_rrkey(struct ablk_ctx *ablkctx,
581 struct _key_ctx *key_ctx)
582{
583 if (ablkctx->ciph_mode == CHCR_SCMD_CIPHER_MODE_AES_CBC) {
584 memcpy(key_ctx->key, ablkctx->rrkey, ablkctx->enckey_len);
585 } else {
586 memcpy(key_ctx->key,
587 ablkctx->key + (ablkctx->enckey_len >> 1),
588 ablkctx->enckey_len >> 1);
589 memcpy(key_ctx->key + (ablkctx->enckey_len >> 1),
590 ablkctx->rrkey, ablkctx->enckey_len >> 1);
591 }
592 return 0;
593}
594
595static int chcr_hash_ent_in_wr(struct scatterlist *src,
596 unsigned int minsg,
597 unsigned int space,
598 unsigned int srcskip)
599{
600 int srclen = 0;
601 int srcsg = minsg;
602 int soffset = 0, sless;
603
604 if (sg_dma_len(src) == srcskip) {
605 src = sg_next(src);
606 srcskip = 0;
607 }
608 while (src && space > (sgl_ent_len[srcsg + 1])) {
609 sless = min_t(unsigned int, sg_dma_len(src) - soffset - srcskip,
610 CHCR_SRC_SG_SIZE);
611 srclen += sless;
612 soffset += sless;
613 srcsg++;
614 if (sg_dma_len(src) == (soffset + srcskip)) {
615 src = sg_next(src);
616 soffset = 0;
617 srcskip = 0;
618 }
619 }
620 return srclen;
621}
622
623static int chcr_sg_ent_in_wr(struct scatterlist *src,
624 struct scatterlist *dst,
625 unsigned int minsg,
626 unsigned int space,
627 unsigned int srcskip,
628 unsigned int dstskip)
629{
630 int srclen = 0, dstlen = 0;
631 int srcsg = minsg, dstsg = minsg;
632 int offset = 0, soffset = 0, less, sless = 0;
633
634 if (sg_dma_len(src) == srcskip) {
635 src = sg_next(src);
636 srcskip = 0;
637 }
638 if (sg_dma_len(dst) == dstskip) {
639 dst = sg_next(dst);
640 dstskip = 0;
641 }
642
643 while (src && dst &&
644 space > (sgl_ent_len[srcsg + 1] + dsgl_ent_len[dstsg])) {
645 sless = min_t(unsigned int, sg_dma_len(src) - srcskip - soffset,
646 CHCR_SRC_SG_SIZE);
647 srclen += sless;
648 srcsg++;
649 offset = 0;
650 while (dst && ((dstsg + 1) <= MAX_DSGL_ENT) &&
651 space > (sgl_ent_len[srcsg] + dsgl_ent_len[dstsg + 1])) {
652 if (srclen <= dstlen)
653 break;
654 less = min_t(unsigned int, sg_dma_len(dst) - offset -
655 dstskip, CHCR_DST_SG_SIZE);
656 dstlen += less;
657 offset += less;
658 if ((offset + dstskip) == sg_dma_len(dst)) {
659 dst = sg_next(dst);
660 offset = 0;
661 }
662 dstsg++;
663 dstskip = 0;
664 }
665 soffset += sless;
666 if ((soffset + srcskip) == sg_dma_len(src)) {
667 src = sg_next(src);
668 srcskip = 0;
669 soffset = 0;
670 }
671
672 }
673 return min(srclen, dstlen);
674}
675
676static int chcr_cipher_fallback(struct crypto_skcipher *cipher,
677 u32 flags,
678 struct scatterlist *src,
679 struct scatterlist *dst,
680 unsigned int nbytes,
681 u8 *iv,
682 unsigned short op_type)
683{
684 int err;
685
686 SKCIPHER_REQUEST_ON_STACK(subreq, cipher);
687
688 skcipher_request_set_tfm(subreq, cipher);
689 skcipher_request_set_callback(subreq, flags, NULL, NULL);
690 skcipher_request_set_crypt(subreq, src, dst,
691 nbytes, iv);
692
693 err = op_type ? crypto_skcipher_decrypt(subreq) :
694 crypto_skcipher_encrypt(subreq);
695 skcipher_request_zero(subreq);
696
697 return err;
698
699}
700static inline void create_wreq(struct chcr_context *ctx,
701 struct chcr_wr *chcr_req,
702 struct crypto_async_request *req,
703 unsigned int imm,
704 int hash_sz,
705 unsigned int len16,
706 unsigned int sc_len,
707 unsigned int lcb)
708{
709 struct uld_ctx *u_ctx = ULD_CTX(ctx);
710 int qid = u_ctx->lldi.rxq_ids[ctx->rx_qidx];
711
712
713 chcr_req->wreq.op_to_cctx_size = FILL_WR_OP_CCTX_SIZE;
714 chcr_req->wreq.pld_size_hash_size =
715 htonl(FW_CRYPTO_LOOKASIDE_WR_HASH_SIZE_V(hash_sz));
716 chcr_req->wreq.len16_pkd =
717 htonl(FW_CRYPTO_LOOKASIDE_WR_LEN16_V(DIV_ROUND_UP(len16, 16)));
718 chcr_req->wreq.cookie = cpu_to_be64((uintptr_t)req);
719 chcr_req->wreq.rx_chid_to_rx_q_id =
720 FILL_WR_RX_Q_ID(ctx->dev->rx_channel_id, qid,
721 !!lcb, ctx->tx_qidx);
722
723 chcr_req->ulptx.cmd_dest = FILL_ULPTX_CMD_DEST(ctx->tx_chan_id,
724 qid);
725 chcr_req->ulptx.len = htonl((DIV_ROUND_UP(len16, 16) -
726 ((sizeof(chcr_req->wreq)) >> 4)));
727
728 chcr_req->sc_imm.cmd_more = FILL_CMD_MORE(!imm);
729 chcr_req->sc_imm.len = cpu_to_be32(sizeof(struct cpl_tx_sec_pdu) +
730 sizeof(chcr_req->key_ctx) + sc_len);
731}
732
733
734
735
736
737
738
739
740static struct sk_buff *create_cipher_wr(struct cipher_wr_param *wrparam)
741{
742 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(wrparam->req);
743 struct ablk_ctx *ablkctx = ABLK_CTX(c_ctx(tfm));
744 struct sk_buff *skb = NULL;
745 struct chcr_wr *chcr_req;
746 struct cpl_rx_phys_dsgl *phys_cpl;
747 struct ulptx_sgl *ulptx;
748 struct chcr_blkcipher_req_ctx *reqctx =
749 ablkcipher_request_ctx(wrparam->req);
750 unsigned int temp = 0, transhdr_len, dst_size;
751 int error;
752 int nents;
753 unsigned int kctx_len;
754 gfp_t flags = wrparam->req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP ?
755 GFP_KERNEL : GFP_ATOMIC;
756 struct adapter *adap = padap(c_ctx(tfm)->dev);
757
758 nents = sg_nents_xlen(reqctx->dstsg, wrparam->bytes, CHCR_DST_SG_SIZE,
759 reqctx->dst_ofst);
760 dst_size = get_space_for_phys_dsgl(nents);
761 kctx_len = roundup(ablkctx->enckey_len, 16);
762 transhdr_len = CIPHER_TRANSHDR_SIZE(kctx_len, dst_size);
763 nents = sg_nents_xlen(reqctx->srcsg, wrparam->bytes,
764 CHCR_SRC_SG_SIZE, reqctx->src_ofst);
765 temp = reqctx->imm ? roundup(wrparam->bytes, 16) :
766 (sgl_len(nents) * 8);
767 transhdr_len += temp;
768 transhdr_len = roundup(transhdr_len, 16);
769 skb = alloc_skb(SGE_MAX_WR_LEN, flags);
770 if (!skb) {
771 error = -ENOMEM;
772 goto err;
773 }
774 chcr_req = __skb_put_zero(skb, transhdr_len);
775 chcr_req->sec_cpl.op_ivinsrtofst =
776 FILL_SEC_CPL_OP_IVINSR(c_ctx(tfm)->dev->rx_channel_id, 2, 1);
777
778 chcr_req->sec_cpl.pldlen = htonl(IV + wrparam->bytes);
779 chcr_req->sec_cpl.aadstart_cipherstop_hi =
780 FILL_SEC_CPL_CIPHERSTOP_HI(0, 0, IV + 1, 0);
781
782 chcr_req->sec_cpl.cipherstop_lo_authinsert =
783 FILL_SEC_CPL_AUTHINSERT(0, 0, 0, 0);
784 chcr_req->sec_cpl.seqno_numivs = FILL_SEC_CPL_SCMD0_SEQNO(reqctx->op, 0,
785 ablkctx->ciph_mode,
786 0, 0, IV >> 1);
787 chcr_req->sec_cpl.ivgen_hdrlen = FILL_SEC_CPL_IVGEN_HDRLEN(0, 0, 0,
788 0, 1, dst_size);
789
790 chcr_req->key_ctx.ctx_hdr = ablkctx->key_ctx_hdr;
791 if ((reqctx->op == CHCR_DECRYPT_OP) &&
792 (!(get_cryptoalg_subtype(crypto_ablkcipher_tfm(tfm)) ==
793 CRYPTO_ALG_SUB_TYPE_CTR)) &&
794 (!(get_cryptoalg_subtype(crypto_ablkcipher_tfm(tfm)) ==
795 CRYPTO_ALG_SUB_TYPE_CTR_RFC3686))) {
796 generate_copy_rrkey(ablkctx, &chcr_req->key_ctx);
797 } else {
798 if ((ablkctx->ciph_mode == CHCR_SCMD_CIPHER_MODE_AES_CBC) ||
799 (ablkctx->ciph_mode == CHCR_SCMD_CIPHER_MODE_AES_CTR)) {
800 memcpy(chcr_req->key_ctx.key, ablkctx->key,
801 ablkctx->enckey_len);
802 } else {
803 memcpy(chcr_req->key_ctx.key, ablkctx->key +
804 (ablkctx->enckey_len >> 1),
805 ablkctx->enckey_len >> 1);
806 memcpy(chcr_req->key_ctx.key +
807 (ablkctx->enckey_len >> 1),
808 ablkctx->key,
809 ablkctx->enckey_len >> 1);
810 }
811 }
812 phys_cpl = (struct cpl_rx_phys_dsgl *)((u8 *)(chcr_req + 1) + kctx_len);
813 ulptx = (struct ulptx_sgl *)((u8 *)(phys_cpl + 1) + dst_size);
814 chcr_add_cipher_src_ent(wrparam->req, ulptx, wrparam);
815 chcr_add_cipher_dst_ent(wrparam->req, phys_cpl, wrparam, wrparam->qid);
816
817 atomic_inc(&adap->chcr_stats.cipher_rqst);
818 temp = sizeof(struct cpl_rx_phys_dsgl) + dst_size + kctx_len + IV
819 + (reqctx->imm ? (wrparam->bytes) : 0);
820 create_wreq(c_ctx(tfm), chcr_req, &(wrparam->req->base), reqctx->imm, 0,
821 transhdr_len, temp,
822 ablkctx->ciph_mode == CHCR_SCMD_CIPHER_MODE_AES_CBC);
823 reqctx->skb = skb;
824
825 if (reqctx->op && (ablkctx->ciph_mode ==
826 CHCR_SCMD_CIPHER_MODE_AES_CBC))
827 sg_pcopy_to_buffer(wrparam->req->src,
828 sg_nents(wrparam->req->src), wrparam->req->info, 16,
829 reqctx->processed + wrparam->bytes - AES_BLOCK_SIZE);
830
831 return skb;
832err:
833 return ERR_PTR(error);
834}
835
836static inline int chcr_keyctx_ck_size(unsigned int keylen)
837{
838 int ck_size = 0;
839
840 if (keylen == AES_KEYSIZE_128)
841 ck_size = CHCR_KEYCTX_CIPHER_KEY_SIZE_128;
842 else if (keylen == AES_KEYSIZE_192)
843 ck_size = CHCR_KEYCTX_CIPHER_KEY_SIZE_192;
844 else if (keylen == AES_KEYSIZE_256)
845 ck_size = CHCR_KEYCTX_CIPHER_KEY_SIZE_256;
846 else
847 ck_size = 0;
848
849 return ck_size;
850}
851static int chcr_cipher_fallback_setkey(struct crypto_ablkcipher *cipher,
852 const u8 *key,
853 unsigned int keylen)
854{
855 struct crypto_tfm *tfm = crypto_ablkcipher_tfm(cipher);
856 struct ablk_ctx *ablkctx = ABLK_CTX(c_ctx(cipher));
857 int err = 0;
858
859 crypto_skcipher_clear_flags(ablkctx->sw_cipher, CRYPTO_TFM_REQ_MASK);
860 crypto_skcipher_set_flags(ablkctx->sw_cipher, cipher->base.crt_flags &
861 CRYPTO_TFM_REQ_MASK);
862 err = crypto_skcipher_setkey(ablkctx->sw_cipher, key, keylen);
863 tfm->crt_flags &= ~CRYPTO_TFM_RES_MASK;
864 tfm->crt_flags |=
865 crypto_skcipher_get_flags(ablkctx->sw_cipher) &
866 CRYPTO_TFM_RES_MASK;
867 return err;
868}
869
870static int chcr_aes_cbc_setkey(struct crypto_ablkcipher *cipher,
871 const u8 *key,
872 unsigned int keylen)
873{
874 struct ablk_ctx *ablkctx = ABLK_CTX(c_ctx(cipher));
875 unsigned int ck_size, context_size;
876 u16 alignment = 0;
877 int err;
878
879 err = chcr_cipher_fallback_setkey(cipher, key, keylen);
880 if (err)
881 goto badkey_err;
882
883 ck_size = chcr_keyctx_ck_size(keylen);
884 alignment = ck_size == CHCR_KEYCTX_CIPHER_KEY_SIZE_192 ? 8 : 0;
885 memcpy(ablkctx->key, key, keylen);
886 ablkctx->enckey_len = keylen;
887 get_aes_decrypt_key(ablkctx->rrkey, ablkctx->key, keylen << 3);
888 context_size = (KEY_CONTEXT_HDR_SALT_AND_PAD +
889 keylen + alignment) >> 4;
890
891 ablkctx->key_ctx_hdr = FILL_KEY_CTX_HDR(ck_size, CHCR_KEYCTX_NO_KEY,
892 0, 0, context_size);
893 ablkctx->ciph_mode = CHCR_SCMD_CIPHER_MODE_AES_CBC;
894 return 0;
895badkey_err:
896 crypto_ablkcipher_set_flags(cipher, CRYPTO_TFM_RES_BAD_KEY_LEN);
897 ablkctx->enckey_len = 0;
898
899 return err;
900}
901
902static int chcr_aes_ctr_setkey(struct crypto_ablkcipher *cipher,
903 const u8 *key,
904 unsigned int keylen)
905{
906 struct ablk_ctx *ablkctx = ABLK_CTX(c_ctx(cipher));
907 unsigned int ck_size, context_size;
908 u16 alignment = 0;
909 int err;
910
911 err = chcr_cipher_fallback_setkey(cipher, key, keylen);
912 if (err)
913 goto badkey_err;
914 ck_size = chcr_keyctx_ck_size(keylen);
915 alignment = (ck_size == CHCR_KEYCTX_CIPHER_KEY_SIZE_192) ? 8 : 0;
916 memcpy(ablkctx->key, key, keylen);
917 ablkctx->enckey_len = keylen;
918 context_size = (KEY_CONTEXT_HDR_SALT_AND_PAD +
919 keylen + alignment) >> 4;
920
921 ablkctx->key_ctx_hdr = FILL_KEY_CTX_HDR(ck_size, CHCR_KEYCTX_NO_KEY,
922 0, 0, context_size);
923 ablkctx->ciph_mode = CHCR_SCMD_CIPHER_MODE_AES_CTR;
924
925 return 0;
926badkey_err:
927 crypto_ablkcipher_set_flags(cipher, CRYPTO_TFM_RES_BAD_KEY_LEN);
928 ablkctx->enckey_len = 0;
929
930 return err;
931}
932
933static int chcr_aes_rfc3686_setkey(struct crypto_ablkcipher *cipher,
934 const u8 *key,
935 unsigned int keylen)
936{
937 struct ablk_ctx *ablkctx = ABLK_CTX(c_ctx(cipher));
938 unsigned int ck_size, context_size;
939 u16 alignment = 0;
940 int err;
941
942 if (keylen < CTR_RFC3686_NONCE_SIZE)
943 return -EINVAL;
944 memcpy(ablkctx->nonce, key + (keylen - CTR_RFC3686_NONCE_SIZE),
945 CTR_RFC3686_NONCE_SIZE);
946
947 keylen -= CTR_RFC3686_NONCE_SIZE;
948 err = chcr_cipher_fallback_setkey(cipher, key, keylen);
949 if (err)
950 goto badkey_err;
951
952 ck_size = chcr_keyctx_ck_size(keylen);
953 alignment = (ck_size == CHCR_KEYCTX_CIPHER_KEY_SIZE_192) ? 8 : 0;
954 memcpy(ablkctx->key, key, keylen);
955 ablkctx->enckey_len = keylen;
956 context_size = (KEY_CONTEXT_HDR_SALT_AND_PAD +
957 keylen + alignment) >> 4;
958
959 ablkctx->key_ctx_hdr = FILL_KEY_CTX_HDR(ck_size, CHCR_KEYCTX_NO_KEY,
960 0, 0, context_size);
961 ablkctx->ciph_mode = CHCR_SCMD_CIPHER_MODE_AES_CTR;
962
963 return 0;
964badkey_err:
965 crypto_ablkcipher_set_flags(cipher, CRYPTO_TFM_RES_BAD_KEY_LEN);
966 ablkctx->enckey_len = 0;
967
968 return err;
969}
970static void ctr_add_iv(u8 *dstiv, u8 *srciv, u32 add)
971{
972 unsigned int size = AES_BLOCK_SIZE;
973 __be32 *b = (__be32 *)(dstiv + size);
974 u32 c, prev;
975
976 memcpy(dstiv, srciv, AES_BLOCK_SIZE);
977 for (; size >= 4; size -= 4) {
978 prev = be32_to_cpu(*--b);
979 c = prev + add;
980 *b = cpu_to_be32(c);
981 if (prev < c)
982 break;
983 add = 1;
984 }
985
986}
987
988static unsigned int adjust_ctr_overflow(u8 *iv, u32 bytes)
989{
990 __be32 *b = (__be32 *)(iv + AES_BLOCK_SIZE);
991 u64 c;
992 u32 temp = be32_to_cpu(*--b);
993
994 temp = ~temp;
995 c = (u64)temp + 1;
996 if ((bytes / AES_BLOCK_SIZE) > c)
997 bytes = c * AES_BLOCK_SIZE;
998 return bytes;
999}
1000
1001static int chcr_update_tweak(struct ablkcipher_request *req, u8 *iv,
1002 u32 isfinal)
1003{
1004 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
1005 struct ablk_ctx *ablkctx = ABLK_CTX(c_ctx(tfm));
1006 struct chcr_blkcipher_req_ctx *reqctx = ablkcipher_request_ctx(req);
1007 struct crypto_cipher *cipher;
1008 int ret, i;
1009 u8 *key;
1010 unsigned int keylen;
1011 int round = reqctx->last_req_len / AES_BLOCK_SIZE;
1012 int round8 = round / 8;
1013
1014 cipher = ablkctx->aes_generic;
1015 memcpy(iv, reqctx->iv, AES_BLOCK_SIZE);
1016
1017 keylen = ablkctx->enckey_len / 2;
1018 key = ablkctx->key + keylen;
1019 ret = crypto_cipher_setkey(cipher, key, keylen);
1020 if (ret)
1021 goto out;
1022 crypto_cipher_encrypt_one(cipher, iv, iv);
1023 for (i = 0; i < round8; i++)
1024 gf128mul_x8_ble((le128 *)iv, (le128 *)iv);
1025
1026 for (i = 0; i < (round % 8); i++)
1027 gf128mul_x_ble((le128 *)iv, (le128 *)iv);
1028
1029 if (!isfinal)
1030 crypto_cipher_decrypt_one(cipher, iv, iv);
1031out:
1032 return ret;
1033}
1034
1035static int chcr_update_cipher_iv(struct ablkcipher_request *req,
1036 struct cpl_fw6_pld *fw6_pld, u8 *iv)
1037{
1038 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
1039 struct chcr_blkcipher_req_ctx *reqctx = ablkcipher_request_ctx(req);
1040 int subtype = get_cryptoalg_subtype(crypto_ablkcipher_tfm(tfm));
1041 int ret = 0;
1042
1043 if (subtype == CRYPTO_ALG_SUB_TYPE_CTR)
1044 ctr_add_iv(iv, req->info, (reqctx->processed /
1045 AES_BLOCK_SIZE));
1046 else if (subtype == CRYPTO_ALG_SUB_TYPE_CTR_RFC3686)
1047 *(__be32 *)(reqctx->iv + CTR_RFC3686_NONCE_SIZE +
1048 CTR_RFC3686_IV_SIZE) = cpu_to_be32((reqctx->processed /
1049 AES_BLOCK_SIZE) + 1);
1050 else if (subtype == CRYPTO_ALG_SUB_TYPE_XTS)
1051 ret = chcr_update_tweak(req, iv, 0);
1052 else if (subtype == CRYPTO_ALG_SUB_TYPE_CBC) {
1053 if (reqctx->op)
1054
1055 memcpy(iv, req->info, AES_BLOCK_SIZE);
1056 else
1057 memcpy(iv, &fw6_pld->data[2], AES_BLOCK_SIZE);
1058 }
1059
1060 return ret;
1061
1062}
1063
1064
1065
1066
1067
1068
1069static int chcr_final_cipher_iv(struct ablkcipher_request *req,
1070 struct cpl_fw6_pld *fw6_pld, u8 *iv)
1071{
1072 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
1073 struct chcr_blkcipher_req_ctx *reqctx = ablkcipher_request_ctx(req);
1074 int subtype = get_cryptoalg_subtype(crypto_ablkcipher_tfm(tfm));
1075 int ret = 0;
1076
1077 if (subtype == CRYPTO_ALG_SUB_TYPE_CTR)
1078 ctr_add_iv(iv, req->info, (reqctx->processed /
1079 AES_BLOCK_SIZE));
1080 else if (subtype == CRYPTO_ALG_SUB_TYPE_XTS)
1081 ret = chcr_update_tweak(req, iv, 1);
1082 else if (subtype == CRYPTO_ALG_SUB_TYPE_CBC) {
1083
1084 if (!reqctx->op)
1085 memcpy(iv, &fw6_pld->data[2], AES_BLOCK_SIZE);
1086
1087 }
1088 return ret;
1089
1090}
1091
1092static int chcr_handle_cipher_resp(struct ablkcipher_request *req,
1093 unsigned char *input, int err)
1094{
1095 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
1096 struct uld_ctx *u_ctx = ULD_CTX(c_ctx(tfm));
1097 struct ablk_ctx *ablkctx = ABLK_CTX(c_ctx(tfm));
1098 struct sk_buff *skb;
1099 struct cpl_fw6_pld *fw6_pld = (struct cpl_fw6_pld *)input;
1100 struct chcr_blkcipher_req_ctx *reqctx = ablkcipher_request_ctx(req);
1101 struct cipher_wr_param wrparam;
1102 int bytes;
1103
1104 if (err)
1105 goto unmap;
1106 if (req->nbytes == reqctx->processed) {
1107 chcr_cipher_dma_unmap(&ULD_CTX(c_ctx(tfm))->lldi.pdev->dev,
1108 req);
1109 err = chcr_final_cipher_iv(req, fw6_pld, req->info);
1110 goto complete;
1111 }
1112
1113 if (!reqctx->imm) {
1114 bytes = chcr_sg_ent_in_wr(reqctx->srcsg, reqctx->dstsg, 0,
1115 CIP_SPACE_LEFT(ablkctx->enckey_len),
1116 reqctx->src_ofst, reqctx->dst_ofst);
1117 if ((bytes + reqctx->processed) >= req->nbytes)
1118 bytes = req->nbytes - reqctx->processed;
1119 else
1120 bytes = rounddown(bytes, 16);
1121 } else {
1122
1123 bytes = req->nbytes - reqctx->processed;
1124 }
1125 err = chcr_update_cipher_iv(req, fw6_pld, reqctx->iv);
1126 if (err)
1127 goto unmap;
1128
1129 if (unlikely(bytes == 0)) {
1130 chcr_cipher_dma_unmap(&ULD_CTX(c_ctx(tfm))->lldi.pdev->dev,
1131 req);
1132 err = chcr_cipher_fallback(ablkctx->sw_cipher,
1133 req->base.flags,
1134 req->src,
1135 req->dst,
1136 req->nbytes,
1137 req->info,
1138 reqctx->op);
1139 goto complete;
1140 }
1141
1142 if (get_cryptoalg_subtype(crypto_ablkcipher_tfm(tfm)) ==
1143 CRYPTO_ALG_SUB_TYPE_CTR)
1144 bytes = adjust_ctr_overflow(reqctx->iv, bytes);
1145 wrparam.qid = u_ctx->lldi.rxq_ids[c_ctx(tfm)->rx_qidx];
1146 wrparam.req = req;
1147 wrparam.bytes = bytes;
1148 skb = create_cipher_wr(&wrparam);
1149 if (IS_ERR(skb)) {
1150 pr_err("chcr : %s : Failed to form WR. No memory\n", __func__);
1151 err = PTR_ERR(skb);
1152 goto unmap;
1153 }
1154 skb->dev = u_ctx->lldi.ports[0];
1155 set_wr_txq(skb, CPL_PRIORITY_DATA, c_ctx(tfm)->tx_qidx);
1156 chcr_send_wr(skb);
1157 reqctx->last_req_len = bytes;
1158 reqctx->processed += bytes;
1159 return 0;
1160unmap:
1161 chcr_cipher_dma_unmap(&ULD_CTX(c_ctx(tfm))->lldi.pdev->dev, req);
1162complete:
1163 req->base.complete(&req->base, err);
1164 return err;
1165}
1166
1167static int process_cipher(struct ablkcipher_request *req,
1168 unsigned short qid,
1169 struct sk_buff **skb,
1170 unsigned short op_type)
1171{
1172 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
1173 unsigned int ivsize = crypto_ablkcipher_ivsize(tfm);
1174 struct chcr_blkcipher_req_ctx *reqctx = ablkcipher_request_ctx(req);
1175 struct ablk_ctx *ablkctx = ABLK_CTX(c_ctx(tfm));
1176 struct cipher_wr_param wrparam;
1177 int bytes, err = -EINVAL;
1178
1179 reqctx->processed = 0;
1180 if (!req->info)
1181 goto error;
1182 if ((ablkctx->enckey_len == 0) || (ivsize > AES_BLOCK_SIZE) ||
1183 (req->nbytes == 0) ||
1184 (req->nbytes % crypto_ablkcipher_blocksize(tfm))) {
1185 pr_err("AES: Invalid value of Key Len %d nbytes %d IV Len %d\n",
1186 ablkctx->enckey_len, req->nbytes, ivsize);
1187 goto error;
1188 }
1189 chcr_cipher_dma_map(&ULD_CTX(c_ctx(tfm))->lldi.pdev->dev, req);
1190 if (req->nbytes < (SGE_MAX_WR_LEN - (sizeof(struct chcr_wr) +
1191 AES_MIN_KEY_SIZE +
1192 sizeof(struct cpl_rx_phys_dsgl) +
1193
1194 32))) {
1195
1196 unsigned int dnents = 0, transhdr_len, phys_dsgl, kctx_len;
1197
1198 dnents = sg_nents_xlen(req->dst, req->nbytes,
1199 CHCR_DST_SG_SIZE, 0);
1200 phys_dsgl = get_space_for_phys_dsgl(dnents);
1201 kctx_len = roundup(ablkctx->enckey_len, 16);
1202 transhdr_len = CIPHER_TRANSHDR_SIZE(kctx_len, phys_dsgl);
1203 reqctx->imm = (transhdr_len + IV + req->nbytes) <=
1204 SGE_MAX_WR_LEN;
1205 bytes = IV + req->nbytes;
1206
1207 } else {
1208 reqctx->imm = 0;
1209 }
1210
1211 if (!reqctx->imm) {
1212 bytes = chcr_sg_ent_in_wr(req->src, req->dst, 0,
1213 CIP_SPACE_LEFT(ablkctx->enckey_len),
1214 0, 0);
1215 if ((bytes + reqctx->processed) >= req->nbytes)
1216 bytes = req->nbytes - reqctx->processed;
1217 else
1218 bytes = rounddown(bytes, 16);
1219 } else {
1220 bytes = req->nbytes;
1221 }
1222 if (get_cryptoalg_subtype(crypto_ablkcipher_tfm(tfm)) ==
1223 CRYPTO_ALG_SUB_TYPE_CTR) {
1224 bytes = adjust_ctr_overflow(req->info, bytes);
1225 }
1226 if (get_cryptoalg_subtype(crypto_ablkcipher_tfm(tfm)) ==
1227 CRYPTO_ALG_SUB_TYPE_CTR_RFC3686) {
1228 memcpy(reqctx->iv, ablkctx->nonce, CTR_RFC3686_NONCE_SIZE);
1229 memcpy(reqctx->iv + CTR_RFC3686_NONCE_SIZE, req->info,
1230 CTR_RFC3686_IV_SIZE);
1231
1232
1233 *(__be32 *)(reqctx->iv + CTR_RFC3686_NONCE_SIZE +
1234 CTR_RFC3686_IV_SIZE) = cpu_to_be32(1);
1235
1236 } else {
1237
1238 memcpy(reqctx->iv, req->info, IV);
1239 }
1240 if (unlikely(bytes == 0)) {
1241 chcr_cipher_dma_unmap(&ULD_CTX(c_ctx(tfm))->lldi.pdev->dev,
1242 req);
1243 err = chcr_cipher_fallback(ablkctx->sw_cipher,
1244 req->base.flags,
1245 req->src,
1246 req->dst,
1247 req->nbytes,
1248 reqctx->iv,
1249 op_type);
1250 goto error;
1251 }
1252 reqctx->op = op_type;
1253 reqctx->srcsg = req->src;
1254 reqctx->dstsg = req->dst;
1255 reqctx->src_ofst = 0;
1256 reqctx->dst_ofst = 0;
1257 wrparam.qid = qid;
1258 wrparam.req = req;
1259 wrparam.bytes = bytes;
1260 *skb = create_cipher_wr(&wrparam);
1261 if (IS_ERR(*skb)) {
1262 err = PTR_ERR(*skb);
1263 goto unmap;
1264 }
1265 reqctx->processed = bytes;
1266 reqctx->last_req_len = bytes;
1267
1268 return 0;
1269unmap:
1270 chcr_cipher_dma_unmap(&ULD_CTX(c_ctx(tfm))->lldi.pdev->dev, req);
1271error:
1272 return err;
1273}
1274
1275static int chcr_aes_encrypt(struct ablkcipher_request *req)
1276{
1277 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
1278 struct sk_buff *skb = NULL;
1279 int err, isfull = 0;
1280 struct uld_ctx *u_ctx = ULD_CTX(c_ctx(tfm));
1281
1282 if (unlikely(cxgb4_is_crypto_q_full(u_ctx->lldi.ports[0],
1283 c_ctx(tfm)->tx_qidx))) {
1284 isfull = 1;
1285 if (!(req->base.flags & CRYPTO_TFM_REQ_MAY_BACKLOG))
1286 return -ENOSPC;
1287 }
1288
1289 err = process_cipher(req, u_ctx->lldi.rxq_ids[c_ctx(tfm)->rx_qidx],
1290 &skb, CHCR_ENCRYPT_OP);
1291 if (err || !skb)
1292 return err;
1293 skb->dev = u_ctx->lldi.ports[0];
1294 set_wr_txq(skb, CPL_PRIORITY_DATA, c_ctx(tfm)->tx_qidx);
1295 chcr_send_wr(skb);
1296 return isfull ? -EBUSY : -EINPROGRESS;
1297}
1298
1299static int chcr_aes_decrypt(struct ablkcipher_request *req)
1300{
1301 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
1302 struct uld_ctx *u_ctx = ULD_CTX(c_ctx(tfm));
1303 struct sk_buff *skb = NULL;
1304 int err, isfull = 0;
1305
1306 if (unlikely(cxgb4_is_crypto_q_full(u_ctx->lldi.ports[0],
1307 c_ctx(tfm)->tx_qidx))) {
1308 isfull = 1;
1309 if (!(req->base.flags & CRYPTO_TFM_REQ_MAY_BACKLOG))
1310 return -ENOSPC;
1311 }
1312
1313 err = process_cipher(req, u_ctx->lldi.rxq_ids[c_ctx(tfm)->rx_qidx],
1314 &skb, CHCR_DECRYPT_OP);
1315 if (err || !skb)
1316 return err;
1317 skb->dev = u_ctx->lldi.ports[0];
1318 set_wr_txq(skb, CPL_PRIORITY_DATA, c_ctx(tfm)->tx_qidx);
1319 chcr_send_wr(skb);
1320 return isfull ? -EBUSY : -EINPROGRESS;
1321}
1322
1323static int chcr_device_init(struct chcr_context *ctx)
1324{
1325 struct uld_ctx *u_ctx = NULL;
1326 struct adapter *adap;
1327 unsigned int id;
1328 int txq_perchan, txq_idx, ntxq;
1329 int err = 0, rxq_perchan, rxq_idx;
1330
1331 id = smp_processor_id();
1332 if (!ctx->dev) {
1333 u_ctx = assign_chcr_device();
1334 if (!u_ctx) {
1335 pr_err("chcr device assignment fails\n");
1336 goto out;
1337 }
1338 ctx->dev = u_ctx->dev;
1339 adap = padap(ctx->dev);
1340 ntxq = min_not_zero((unsigned int)u_ctx->lldi.nrxq,
1341 adap->vres.ncrypto_fc);
1342 rxq_perchan = u_ctx->lldi.nrxq / u_ctx->lldi.nchan;
1343 txq_perchan = ntxq / u_ctx->lldi.nchan;
1344 spin_lock(&ctx->dev->lock_chcr_dev);
1345 ctx->tx_chan_id = ctx->dev->tx_channel_id;
1346 ctx->dev->tx_channel_id = !ctx->dev->tx_channel_id;
1347 ctx->dev->rx_channel_id = 0;
1348 spin_unlock(&ctx->dev->lock_chcr_dev);
1349 rxq_idx = ctx->tx_chan_id * rxq_perchan;
1350 rxq_idx += id % rxq_perchan;
1351 txq_idx = ctx->tx_chan_id * txq_perchan;
1352 txq_idx += id % txq_perchan;
1353 ctx->rx_qidx = rxq_idx;
1354 ctx->tx_qidx = txq_idx;
1355
1356
1357
1358
1359
1360 ctx->pci_chan_id = txq_idx / txq_perchan;
1361 }
1362out:
1363 return err;
1364}
1365
1366static int chcr_cra_init(struct crypto_tfm *tfm)
1367{
1368 struct crypto_alg *alg = tfm->__crt_alg;
1369 struct chcr_context *ctx = crypto_tfm_ctx(tfm);
1370 struct ablk_ctx *ablkctx = ABLK_CTX(ctx);
1371
1372 ablkctx->sw_cipher = crypto_alloc_skcipher(alg->cra_name, 0,
1373 CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK);
1374 if (IS_ERR(ablkctx->sw_cipher)) {
1375 pr_err("failed to allocate fallback for %s\n", alg->cra_name);
1376 return PTR_ERR(ablkctx->sw_cipher);
1377 }
1378
1379 if (get_cryptoalg_subtype(tfm) == CRYPTO_ALG_SUB_TYPE_XTS) {
1380
1381 ablkctx->aes_generic = crypto_alloc_cipher("aes-generic", 0, 0);
1382 if (IS_ERR(ablkctx->aes_generic)) {
1383 pr_err("failed to allocate aes cipher for tweak\n");
1384 return PTR_ERR(ablkctx->aes_generic);
1385 }
1386 } else
1387 ablkctx->aes_generic = NULL;
1388
1389 tfm->crt_ablkcipher.reqsize = sizeof(struct chcr_blkcipher_req_ctx);
1390 return chcr_device_init(crypto_tfm_ctx(tfm));
1391}
1392
1393static int chcr_rfc3686_init(struct crypto_tfm *tfm)
1394{
1395 struct crypto_alg *alg = tfm->__crt_alg;
1396 struct chcr_context *ctx = crypto_tfm_ctx(tfm);
1397 struct ablk_ctx *ablkctx = ABLK_CTX(ctx);
1398
1399
1400
1401
1402 ablkctx->sw_cipher = crypto_alloc_skcipher("ctr(aes)", 0,
1403 CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK);
1404 if (IS_ERR(ablkctx->sw_cipher)) {
1405 pr_err("failed to allocate fallback for %s\n", alg->cra_name);
1406 return PTR_ERR(ablkctx->sw_cipher);
1407 }
1408 tfm->crt_ablkcipher.reqsize = sizeof(struct chcr_blkcipher_req_ctx);
1409 return chcr_device_init(crypto_tfm_ctx(tfm));
1410}
1411
1412
1413static void chcr_cra_exit(struct crypto_tfm *tfm)
1414{
1415 struct chcr_context *ctx = crypto_tfm_ctx(tfm);
1416 struct ablk_ctx *ablkctx = ABLK_CTX(ctx);
1417
1418 crypto_free_skcipher(ablkctx->sw_cipher);
1419 if (ablkctx->aes_generic)
1420 crypto_free_cipher(ablkctx->aes_generic);
1421}
1422
1423static int get_alg_config(struct algo_param *params,
1424 unsigned int auth_size)
1425{
1426 switch (auth_size) {
1427 case SHA1_DIGEST_SIZE:
1428 params->mk_size = CHCR_KEYCTX_MAC_KEY_SIZE_160;
1429 params->auth_mode = CHCR_SCMD_AUTH_MODE_SHA1;
1430 params->result_size = SHA1_DIGEST_SIZE;
1431 break;
1432 case SHA224_DIGEST_SIZE:
1433 params->mk_size = CHCR_KEYCTX_MAC_KEY_SIZE_256;
1434 params->auth_mode = CHCR_SCMD_AUTH_MODE_SHA224;
1435 params->result_size = SHA256_DIGEST_SIZE;
1436 break;
1437 case SHA256_DIGEST_SIZE:
1438 params->mk_size = CHCR_KEYCTX_MAC_KEY_SIZE_256;
1439 params->auth_mode = CHCR_SCMD_AUTH_MODE_SHA256;
1440 params->result_size = SHA256_DIGEST_SIZE;
1441 break;
1442 case SHA384_DIGEST_SIZE:
1443 params->mk_size = CHCR_KEYCTX_MAC_KEY_SIZE_512;
1444 params->auth_mode = CHCR_SCMD_AUTH_MODE_SHA512_384;
1445 params->result_size = SHA512_DIGEST_SIZE;
1446 break;
1447 case SHA512_DIGEST_SIZE:
1448 params->mk_size = CHCR_KEYCTX_MAC_KEY_SIZE_512;
1449 params->auth_mode = CHCR_SCMD_AUTH_MODE_SHA512_512;
1450 params->result_size = SHA512_DIGEST_SIZE;
1451 break;
1452 default:
1453 pr_err("chcr : ERROR, unsupported digest size\n");
1454 return -EINVAL;
1455 }
1456 return 0;
1457}
1458
1459static inline void chcr_free_shash(struct crypto_shash *base_hash)
1460{
1461 crypto_free_shash(base_hash);
1462}
1463
1464
1465
1466
1467
1468static struct sk_buff *create_hash_wr(struct ahash_request *req,
1469 struct hash_wr_param *param)
1470{
1471 struct chcr_ahash_req_ctx *req_ctx = ahash_request_ctx(req);
1472 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
1473 struct hmac_ctx *hmacctx = HMAC_CTX(h_ctx(tfm));
1474 struct sk_buff *skb = NULL;
1475 struct uld_ctx *u_ctx = ULD_CTX(h_ctx(tfm));
1476 struct chcr_wr *chcr_req;
1477 struct ulptx_sgl *ulptx;
1478 unsigned int nents = 0, transhdr_len;
1479 unsigned int temp = 0;
1480 gfp_t flags = req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP ? GFP_KERNEL :
1481 GFP_ATOMIC;
1482 struct adapter *adap = padap(h_ctx(tfm)->dev);
1483 int error = 0;
1484
1485 transhdr_len = HASH_TRANSHDR_SIZE(param->kctx_len);
1486 req_ctx->hctx_wr.imm = (transhdr_len + param->bfr_len +
1487 param->sg_len) <= SGE_MAX_WR_LEN;
1488 nents = sg_nents_xlen(req_ctx->hctx_wr.srcsg, param->sg_len,
1489 CHCR_SRC_SG_SIZE, req_ctx->hctx_wr.src_ofst);
1490 nents += param->bfr_len ? 1 : 0;
1491 transhdr_len += req_ctx->hctx_wr.imm ? roundup(param->bfr_len +
1492 param->sg_len, 16) : (sgl_len(nents) * 8);
1493 transhdr_len = roundup(transhdr_len, 16);
1494
1495 skb = alloc_skb(transhdr_len, flags);
1496 if (!skb)
1497 return ERR_PTR(-ENOMEM);
1498 chcr_req = __skb_put_zero(skb, transhdr_len);
1499
1500 chcr_req->sec_cpl.op_ivinsrtofst =
1501 FILL_SEC_CPL_OP_IVINSR(h_ctx(tfm)->dev->rx_channel_id, 2, 0);
1502 chcr_req->sec_cpl.pldlen = htonl(param->bfr_len + param->sg_len);
1503
1504 chcr_req->sec_cpl.aadstart_cipherstop_hi =
1505 FILL_SEC_CPL_CIPHERSTOP_HI(0, 0, 0, 0);
1506 chcr_req->sec_cpl.cipherstop_lo_authinsert =
1507 FILL_SEC_CPL_AUTHINSERT(0, 1, 0, 0);
1508 chcr_req->sec_cpl.seqno_numivs =
1509 FILL_SEC_CPL_SCMD0_SEQNO(0, 0, 0, param->alg_prm.auth_mode,
1510 param->opad_needed, 0);
1511
1512 chcr_req->sec_cpl.ivgen_hdrlen =
1513 FILL_SEC_CPL_IVGEN_HDRLEN(param->last, param->more, 0, 1, 0, 0);
1514
1515 memcpy(chcr_req->key_ctx.key, req_ctx->partial_hash,
1516 param->alg_prm.result_size);
1517
1518 if (param->opad_needed)
1519 memcpy(chcr_req->key_ctx.key +
1520 ((param->alg_prm.result_size <= 32) ? 32 :
1521 CHCR_HASH_MAX_DIGEST_SIZE),
1522 hmacctx->opad, param->alg_prm.result_size);
1523
1524 chcr_req->key_ctx.ctx_hdr = FILL_KEY_CTX_HDR(CHCR_KEYCTX_NO_KEY,
1525 param->alg_prm.mk_size, 0,
1526 param->opad_needed,
1527 ((param->kctx_len +
1528 sizeof(chcr_req->key_ctx)) >> 4));
1529 chcr_req->sec_cpl.scmd1 = cpu_to_be64((u64)param->scmd1);
1530 ulptx = (struct ulptx_sgl *)((u8 *)(chcr_req + 1) + param->kctx_len +
1531 DUMMY_BYTES);
1532 if (param->bfr_len != 0) {
1533 req_ctx->hctx_wr.dma_addr =
1534 dma_map_single(&u_ctx->lldi.pdev->dev, req_ctx->reqbfr,
1535 param->bfr_len, DMA_TO_DEVICE);
1536 if (dma_mapping_error(&u_ctx->lldi.pdev->dev,
1537 req_ctx->hctx_wr. dma_addr)) {
1538 error = -ENOMEM;
1539 goto err;
1540 }
1541 req_ctx->hctx_wr.dma_len = param->bfr_len;
1542 } else {
1543 req_ctx->hctx_wr.dma_addr = 0;
1544 }
1545 chcr_add_hash_src_ent(req, ulptx, param);
1546
1547 temp = param->kctx_len + DUMMY_BYTES + (req_ctx->hctx_wr.imm ?
1548 (param->sg_len + param->bfr_len) : 0);
1549 atomic_inc(&adap->chcr_stats.digest_rqst);
1550 create_wreq(h_ctx(tfm), chcr_req, &req->base, req_ctx->hctx_wr.imm,
1551 param->hash_size, transhdr_len,
1552 temp, 0);
1553 req_ctx->hctx_wr.skb = skb;
1554 return skb;
1555err:
1556 kfree_skb(skb);
1557 return ERR_PTR(error);
1558}
1559
1560static int chcr_ahash_update(struct ahash_request *req)
1561{
1562 struct chcr_ahash_req_ctx *req_ctx = ahash_request_ctx(req);
1563 struct crypto_ahash *rtfm = crypto_ahash_reqtfm(req);
1564 struct uld_ctx *u_ctx = NULL;
1565 struct sk_buff *skb;
1566 u8 remainder = 0, bs;
1567 unsigned int nbytes = req->nbytes;
1568 struct hash_wr_param params;
1569 int error, isfull = 0;
1570
1571 bs = crypto_tfm_alg_blocksize(crypto_ahash_tfm(rtfm));
1572 u_ctx = ULD_CTX(h_ctx(rtfm));
1573 if (unlikely(cxgb4_is_crypto_q_full(u_ctx->lldi.ports[0],
1574 h_ctx(rtfm)->tx_qidx))) {
1575 isfull = 1;
1576 if (!(req->base.flags & CRYPTO_TFM_REQ_MAY_BACKLOG))
1577 return -ENOSPC;
1578 }
1579
1580 if (nbytes + req_ctx->reqlen >= bs) {
1581 remainder = (nbytes + req_ctx->reqlen) % bs;
1582 nbytes = nbytes + req_ctx->reqlen - remainder;
1583 } else {
1584 sg_pcopy_to_buffer(req->src, sg_nents(req->src), req_ctx->reqbfr
1585 + req_ctx->reqlen, nbytes, 0);
1586 req_ctx->reqlen += nbytes;
1587 return 0;
1588 }
1589 chcr_init_hctx_per_wr(req_ctx);
1590 error = chcr_hash_dma_map(&u_ctx->lldi.pdev->dev, req);
1591 if (error)
1592 return -ENOMEM;
1593 get_alg_config(¶ms.alg_prm, crypto_ahash_digestsize(rtfm));
1594 params.kctx_len = roundup(params.alg_prm.result_size, 16);
1595 params.sg_len = chcr_hash_ent_in_wr(req->src, !!req_ctx->reqlen,
1596 HASH_SPACE_LEFT(params.kctx_len), 0);
1597 if (params.sg_len > req->nbytes)
1598 params.sg_len = req->nbytes;
1599 params.sg_len = rounddown(params.sg_len + req_ctx->reqlen, bs) -
1600 req_ctx->reqlen;
1601 params.opad_needed = 0;
1602 params.more = 1;
1603 params.last = 0;
1604 params.bfr_len = req_ctx->reqlen;
1605 params.scmd1 = 0;
1606 req_ctx->hctx_wr.srcsg = req->src;
1607
1608 params.hash_size = params.alg_prm.result_size;
1609 req_ctx->data_len += params.sg_len + params.bfr_len;
1610 skb = create_hash_wr(req, ¶ms);
1611 if (IS_ERR(skb)) {
1612 error = PTR_ERR(skb);
1613 goto unmap;
1614 }
1615
1616 req_ctx->hctx_wr.processed += params.sg_len;
1617 if (remainder) {
1618
1619 swap(req_ctx->reqbfr, req_ctx->skbfr);
1620 sg_pcopy_to_buffer(req->src, sg_nents(req->src),
1621 req_ctx->reqbfr, remainder, req->nbytes -
1622 remainder);
1623 }
1624 req_ctx->reqlen = remainder;
1625 skb->dev = u_ctx->lldi.ports[0];
1626 set_wr_txq(skb, CPL_PRIORITY_DATA, h_ctx(rtfm)->tx_qidx);
1627 chcr_send_wr(skb);
1628
1629 return isfull ? -EBUSY : -EINPROGRESS;
1630unmap:
1631 chcr_hash_dma_unmap(&u_ctx->lldi.pdev->dev, req);
1632 return error;
1633}
1634
1635static void create_last_hash_block(char *bfr_ptr, unsigned int bs, u64 scmd1)
1636{
1637 memset(bfr_ptr, 0, bs);
1638 *bfr_ptr = 0x80;
1639 if (bs == 64)
1640 *(__be64 *)(bfr_ptr + 56) = cpu_to_be64(scmd1 << 3);
1641 else
1642 *(__be64 *)(bfr_ptr + 120) = cpu_to_be64(scmd1 << 3);
1643}
1644
1645static int chcr_ahash_final(struct ahash_request *req)
1646{
1647 struct chcr_ahash_req_ctx *req_ctx = ahash_request_ctx(req);
1648 struct crypto_ahash *rtfm = crypto_ahash_reqtfm(req);
1649 struct hash_wr_param params;
1650 struct sk_buff *skb;
1651 struct uld_ctx *u_ctx = NULL;
1652 u8 bs = crypto_tfm_alg_blocksize(crypto_ahash_tfm(rtfm));
1653
1654 chcr_init_hctx_per_wr(req_ctx);
1655 u_ctx = ULD_CTX(h_ctx(rtfm));
1656 if (is_hmac(crypto_ahash_tfm(rtfm)))
1657 params.opad_needed = 1;
1658 else
1659 params.opad_needed = 0;
1660 params.sg_len = 0;
1661 req_ctx->hctx_wr.isfinal = 1;
1662 get_alg_config(¶ms.alg_prm, crypto_ahash_digestsize(rtfm));
1663 params.kctx_len = roundup(params.alg_prm.result_size, 16);
1664 if (is_hmac(crypto_ahash_tfm(rtfm))) {
1665 params.opad_needed = 1;
1666 params.kctx_len *= 2;
1667 } else {
1668 params.opad_needed = 0;
1669 }
1670
1671 req_ctx->hctx_wr.result = 1;
1672 params.bfr_len = req_ctx->reqlen;
1673 req_ctx->data_len += params.bfr_len + params.sg_len;
1674 req_ctx->hctx_wr.srcsg = req->src;
1675 if (req_ctx->reqlen == 0) {
1676 create_last_hash_block(req_ctx->reqbfr, bs, req_ctx->data_len);
1677 params.last = 0;
1678 params.more = 1;
1679 params.scmd1 = 0;
1680 params.bfr_len = bs;
1681
1682 } else {
1683 params.scmd1 = req_ctx->data_len;
1684 params.last = 1;
1685 params.more = 0;
1686 }
1687 params.hash_size = crypto_ahash_digestsize(rtfm);
1688 skb = create_hash_wr(req, ¶ms);
1689 if (IS_ERR(skb))
1690 return PTR_ERR(skb);
1691 req_ctx->reqlen = 0;
1692 skb->dev = u_ctx->lldi.ports[0];
1693 set_wr_txq(skb, CPL_PRIORITY_DATA, h_ctx(rtfm)->tx_qidx);
1694 chcr_send_wr(skb);
1695 return -EINPROGRESS;
1696}
1697
1698static int chcr_ahash_finup(struct ahash_request *req)
1699{
1700 struct chcr_ahash_req_ctx *req_ctx = ahash_request_ctx(req);
1701 struct crypto_ahash *rtfm = crypto_ahash_reqtfm(req);
1702 struct uld_ctx *u_ctx = NULL;
1703 struct sk_buff *skb;
1704 struct hash_wr_param params;
1705 u8 bs;
1706 int error, isfull = 0;
1707
1708 bs = crypto_tfm_alg_blocksize(crypto_ahash_tfm(rtfm));
1709 u_ctx = ULD_CTX(h_ctx(rtfm));
1710
1711 if (unlikely(cxgb4_is_crypto_q_full(u_ctx->lldi.ports[0],
1712 h_ctx(rtfm)->tx_qidx))) {
1713 isfull = 1;
1714 if (!(req->base.flags & CRYPTO_TFM_REQ_MAY_BACKLOG))
1715 return -ENOSPC;
1716 }
1717 chcr_init_hctx_per_wr(req_ctx);
1718 error = chcr_hash_dma_map(&u_ctx->lldi.pdev->dev, req);
1719 if (error)
1720 return -ENOMEM;
1721
1722 get_alg_config(¶ms.alg_prm, crypto_ahash_digestsize(rtfm));
1723 params.kctx_len = roundup(params.alg_prm.result_size, 16);
1724 if (is_hmac(crypto_ahash_tfm(rtfm))) {
1725 params.kctx_len *= 2;
1726 params.opad_needed = 1;
1727 } else {
1728 params.opad_needed = 0;
1729 }
1730
1731 params.sg_len = chcr_hash_ent_in_wr(req->src, !!req_ctx->reqlen,
1732 HASH_SPACE_LEFT(params.kctx_len), 0);
1733 if (params.sg_len < req->nbytes) {
1734 if (is_hmac(crypto_ahash_tfm(rtfm))) {
1735 params.kctx_len /= 2;
1736 params.opad_needed = 0;
1737 }
1738 params.last = 0;
1739 params.more = 1;
1740 params.sg_len = rounddown(params.sg_len + req_ctx->reqlen, bs)
1741 - req_ctx->reqlen;
1742 params.hash_size = params.alg_prm.result_size;
1743 params.scmd1 = 0;
1744 } else {
1745 params.last = 1;
1746 params.more = 0;
1747 params.sg_len = req->nbytes;
1748 params.hash_size = crypto_ahash_digestsize(rtfm);
1749 params.scmd1 = req_ctx->data_len + req_ctx->reqlen +
1750 params.sg_len;
1751 }
1752 params.bfr_len = req_ctx->reqlen;
1753 req_ctx->data_len += params.bfr_len + params.sg_len;
1754 req_ctx->hctx_wr.result = 1;
1755 req_ctx->hctx_wr.srcsg = req->src;
1756 if ((req_ctx->reqlen + req->nbytes) == 0) {
1757 create_last_hash_block(req_ctx->reqbfr, bs, req_ctx->data_len);
1758 params.last = 0;
1759 params.more = 1;
1760 params.scmd1 = 0;
1761 params.bfr_len = bs;
1762 }
1763 skb = create_hash_wr(req, ¶ms);
1764 if (IS_ERR(skb)) {
1765 error = PTR_ERR(skb);
1766 goto unmap;
1767 }
1768 req_ctx->reqlen = 0;
1769 req_ctx->hctx_wr.processed += params.sg_len;
1770 skb->dev = u_ctx->lldi.ports[0];
1771 set_wr_txq(skb, CPL_PRIORITY_DATA, h_ctx(rtfm)->tx_qidx);
1772 chcr_send_wr(skb);
1773
1774 return isfull ? -EBUSY : -EINPROGRESS;
1775unmap:
1776 chcr_hash_dma_unmap(&u_ctx->lldi.pdev->dev, req);
1777 return error;
1778}
1779
1780static int chcr_ahash_digest(struct ahash_request *req)
1781{
1782 struct chcr_ahash_req_ctx *req_ctx = ahash_request_ctx(req);
1783 struct crypto_ahash *rtfm = crypto_ahash_reqtfm(req);
1784 struct uld_ctx *u_ctx = NULL;
1785 struct sk_buff *skb;
1786 struct hash_wr_param params;
1787 u8 bs;
1788 int error, isfull = 0;
1789
1790 rtfm->init(req);
1791 bs = crypto_tfm_alg_blocksize(crypto_ahash_tfm(rtfm));
1792
1793 u_ctx = ULD_CTX(h_ctx(rtfm));
1794 if (unlikely(cxgb4_is_crypto_q_full(u_ctx->lldi.ports[0],
1795 h_ctx(rtfm)->tx_qidx))) {
1796 isfull = 1;
1797 if (!(req->base.flags & CRYPTO_TFM_REQ_MAY_BACKLOG))
1798 return -ENOSPC;
1799 }
1800
1801 chcr_init_hctx_per_wr(req_ctx);
1802 error = chcr_hash_dma_map(&u_ctx->lldi.pdev->dev, req);
1803 if (error)
1804 return -ENOMEM;
1805
1806 get_alg_config(¶ms.alg_prm, crypto_ahash_digestsize(rtfm));
1807 params.kctx_len = roundup(params.alg_prm.result_size, 16);
1808 if (is_hmac(crypto_ahash_tfm(rtfm))) {
1809 params.kctx_len *= 2;
1810 params.opad_needed = 1;
1811 } else {
1812 params.opad_needed = 0;
1813 }
1814 params.sg_len = chcr_hash_ent_in_wr(req->src, !!req_ctx->reqlen,
1815 HASH_SPACE_LEFT(params.kctx_len), 0);
1816 if (params.sg_len < req->nbytes) {
1817 if (is_hmac(crypto_ahash_tfm(rtfm))) {
1818 params.kctx_len /= 2;
1819 params.opad_needed = 0;
1820 }
1821 params.last = 0;
1822 params.more = 1;
1823 params.scmd1 = 0;
1824 params.sg_len = rounddown(params.sg_len, bs);
1825 params.hash_size = params.alg_prm.result_size;
1826 } else {
1827 params.sg_len = req->nbytes;
1828 params.hash_size = crypto_ahash_digestsize(rtfm);
1829 params.last = 1;
1830 params.more = 0;
1831 params.scmd1 = req->nbytes + req_ctx->data_len;
1832
1833 }
1834 params.bfr_len = 0;
1835 req_ctx->hctx_wr.result = 1;
1836 req_ctx->hctx_wr.srcsg = req->src;
1837 req_ctx->data_len += params.bfr_len + params.sg_len;
1838
1839 if (req->nbytes == 0) {
1840 create_last_hash_block(req_ctx->reqbfr, bs, 0);
1841 params.more = 1;
1842 params.bfr_len = bs;
1843 }
1844
1845 skb = create_hash_wr(req, ¶ms);
1846 if (IS_ERR(skb)) {
1847 error = PTR_ERR(skb);
1848 goto unmap;
1849 }
1850 req_ctx->hctx_wr.processed += params.sg_len;
1851 skb->dev = u_ctx->lldi.ports[0];
1852 set_wr_txq(skb, CPL_PRIORITY_DATA, h_ctx(rtfm)->tx_qidx);
1853 chcr_send_wr(skb);
1854 return isfull ? -EBUSY : -EINPROGRESS;
1855unmap:
1856 chcr_hash_dma_unmap(&u_ctx->lldi.pdev->dev, req);
1857 return error;
1858}
1859
1860static int chcr_ahash_continue(struct ahash_request *req)
1861{
1862 struct chcr_ahash_req_ctx *reqctx = ahash_request_ctx(req);
1863 struct chcr_hctx_per_wr *hctx_wr = &reqctx->hctx_wr;
1864 struct crypto_ahash *rtfm = crypto_ahash_reqtfm(req);
1865 struct uld_ctx *u_ctx = NULL;
1866 struct sk_buff *skb;
1867 struct hash_wr_param params;
1868 u8 bs;
1869 int error;
1870
1871 bs = crypto_tfm_alg_blocksize(crypto_ahash_tfm(rtfm));
1872 u_ctx = ULD_CTX(h_ctx(rtfm));
1873 get_alg_config(¶ms.alg_prm, crypto_ahash_digestsize(rtfm));
1874 params.kctx_len = roundup(params.alg_prm.result_size, 16);
1875 if (is_hmac(crypto_ahash_tfm(rtfm))) {
1876 params.kctx_len *= 2;
1877 params.opad_needed = 1;
1878 } else {
1879 params.opad_needed = 0;
1880 }
1881 params.sg_len = chcr_hash_ent_in_wr(hctx_wr->srcsg, 0,
1882 HASH_SPACE_LEFT(params.kctx_len),
1883 hctx_wr->src_ofst);
1884 if ((params.sg_len + hctx_wr->processed) > req->nbytes)
1885 params.sg_len = req->nbytes - hctx_wr->processed;
1886 if (!hctx_wr->result ||
1887 ((params.sg_len + hctx_wr->processed) < req->nbytes)) {
1888 if (is_hmac(crypto_ahash_tfm(rtfm))) {
1889 params.kctx_len /= 2;
1890 params.opad_needed = 0;
1891 }
1892 params.last = 0;
1893 params.more = 1;
1894 params.sg_len = rounddown(params.sg_len, bs);
1895 params.hash_size = params.alg_prm.result_size;
1896 params.scmd1 = 0;
1897 } else {
1898 params.last = 1;
1899 params.more = 0;
1900 params.hash_size = crypto_ahash_digestsize(rtfm);
1901 params.scmd1 = reqctx->data_len + params.sg_len;
1902 }
1903 params.bfr_len = 0;
1904 reqctx->data_len += params.sg_len;
1905 skb = create_hash_wr(req, ¶ms);
1906 if (IS_ERR(skb)) {
1907 error = PTR_ERR(skb);
1908 goto err;
1909 }
1910 hctx_wr->processed += params.sg_len;
1911 skb->dev = u_ctx->lldi.ports[0];
1912 set_wr_txq(skb, CPL_PRIORITY_DATA, h_ctx(rtfm)->tx_qidx);
1913 chcr_send_wr(skb);
1914 return 0;
1915err:
1916 return error;
1917}
1918
1919static inline void chcr_handle_ahash_resp(struct ahash_request *req,
1920 unsigned char *input,
1921 int err)
1922{
1923 struct chcr_ahash_req_ctx *reqctx = ahash_request_ctx(req);
1924 struct chcr_hctx_per_wr *hctx_wr = &reqctx->hctx_wr;
1925 int digestsize, updated_digestsize;
1926 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
1927 struct uld_ctx *u_ctx = ULD_CTX(h_ctx(tfm));
1928
1929 if (input == NULL)
1930 goto out;
1931 digestsize = crypto_ahash_digestsize(crypto_ahash_reqtfm(req));
1932 updated_digestsize = digestsize;
1933 if (digestsize == SHA224_DIGEST_SIZE)
1934 updated_digestsize = SHA256_DIGEST_SIZE;
1935 else if (digestsize == SHA384_DIGEST_SIZE)
1936 updated_digestsize = SHA512_DIGEST_SIZE;
1937
1938 if (hctx_wr->dma_addr) {
1939 dma_unmap_single(&u_ctx->lldi.pdev->dev, hctx_wr->dma_addr,
1940 hctx_wr->dma_len, DMA_TO_DEVICE);
1941 hctx_wr->dma_addr = 0;
1942 }
1943 if (hctx_wr->isfinal || ((hctx_wr->processed + reqctx->reqlen) ==
1944 req->nbytes)) {
1945 if (hctx_wr->result == 1) {
1946 hctx_wr->result = 0;
1947 memcpy(req->result, input + sizeof(struct cpl_fw6_pld),
1948 digestsize);
1949 } else {
1950 memcpy(reqctx->partial_hash,
1951 input + sizeof(struct cpl_fw6_pld),
1952 updated_digestsize);
1953
1954 }
1955 goto unmap;
1956 }
1957 memcpy(reqctx->partial_hash, input + sizeof(struct cpl_fw6_pld),
1958 updated_digestsize);
1959
1960 err = chcr_ahash_continue(req);
1961 if (err)
1962 goto unmap;
1963 return;
1964unmap:
1965 if (hctx_wr->is_sg_map)
1966 chcr_hash_dma_unmap(&u_ctx->lldi.pdev->dev, req);
1967
1968
1969out:
1970 req->base.complete(&req->base, err);
1971}
1972
1973
1974
1975
1976
1977int chcr_handle_resp(struct crypto_async_request *req, unsigned char *input,
1978 int err)
1979{
1980 struct crypto_tfm *tfm = req->tfm;
1981 struct chcr_context *ctx = crypto_tfm_ctx(tfm);
1982 struct adapter *adap = padap(ctx->dev);
1983
1984 switch (tfm->__crt_alg->cra_flags & CRYPTO_ALG_TYPE_MASK) {
1985 case CRYPTO_ALG_TYPE_AEAD:
1986 chcr_handle_aead_resp(aead_request_cast(req), input, err);
1987 break;
1988
1989 case CRYPTO_ALG_TYPE_ABLKCIPHER:
1990 err = chcr_handle_cipher_resp(ablkcipher_request_cast(req),
1991 input, err);
1992 break;
1993
1994 case CRYPTO_ALG_TYPE_AHASH:
1995 chcr_handle_ahash_resp(ahash_request_cast(req), input, err);
1996 }
1997 atomic_inc(&adap->chcr_stats.complete);
1998 return err;
1999}
2000static int chcr_ahash_export(struct ahash_request *areq, void *out)
2001{
2002 struct chcr_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
2003 struct chcr_ahash_req_ctx *state = out;
2004
2005 state->reqlen = req_ctx->reqlen;
2006 state->data_len = req_ctx->data_len;
2007 memcpy(state->bfr1, req_ctx->reqbfr, req_ctx->reqlen);
2008 memcpy(state->partial_hash, req_ctx->partial_hash,
2009 CHCR_HASH_MAX_DIGEST_SIZE);
2010 chcr_init_hctx_per_wr(state);
2011 return 0;
2012}
2013
2014static int chcr_ahash_import(struct ahash_request *areq, const void *in)
2015{
2016 struct chcr_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
2017 struct chcr_ahash_req_ctx *state = (struct chcr_ahash_req_ctx *)in;
2018
2019 req_ctx->reqlen = state->reqlen;
2020 req_ctx->data_len = state->data_len;
2021 req_ctx->reqbfr = req_ctx->bfr1;
2022 req_ctx->skbfr = req_ctx->bfr2;
2023 memcpy(req_ctx->bfr1, state->bfr1, CHCR_HASH_MAX_BLOCK_SIZE_128);
2024 memcpy(req_ctx->partial_hash, state->partial_hash,
2025 CHCR_HASH_MAX_DIGEST_SIZE);
2026 chcr_init_hctx_per_wr(req_ctx);
2027 return 0;
2028}
2029
2030static int chcr_ahash_setkey(struct crypto_ahash *tfm, const u8 *key,
2031 unsigned int keylen)
2032{
2033 struct hmac_ctx *hmacctx = HMAC_CTX(h_ctx(tfm));
2034 unsigned int digestsize = crypto_ahash_digestsize(tfm);
2035 unsigned int bs = crypto_tfm_alg_blocksize(crypto_ahash_tfm(tfm));
2036 unsigned int i, err = 0, updated_digestsize;
2037
2038 SHASH_DESC_ON_STACK(shash, hmacctx->base_hash);
2039
2040
2041
2042
2043
2044 shash->tfm = hmacctx->base_hash;
2045 shash->flags = crypto_shash_get_flags(hmacctx->base_hash);
2046 if (keylen > bs) {
2047 err = crypto_shash_digest(shash, key, keylen,
2048 hmacctx->ipad);
2049 if (err)
2050 goto out;
2051 keylen = digestsize;
2052 } else {
2053 memcpy(hmacctx->ipad, key, keylen);
2054 }
2055 memset(hmacctx->ipad + keylen, 0, bs - keylen);
2056 memcpy(hmacctx->opad, hmacctx->ipad, bs);
2057
2058 for (i = 0; i < bs / sizeof(int); i++) {
2059 *((unsigned int *)(&hmacctx->ipad) + i) ^= IPAD_DATA;
2060 *((unsigned int *)(&hmacctx->opad) + i) ^= OPAD_DATA;
2061 }
2062
2063 updated_digestsize = digestsize;
2064 if (digestsize == SHA224_DIGEST_SIZE)
2065 updated_digestsize = SHA256_DIGEST_SIZE;
2066 else if (digestsize == SHA384_DIGEST_SIZE)
2067 updated_digestsize = SHA512_DIGEST_SIZE;
2068 err = chcr_compute_partial_hash(shash, hmacctx->ipad,
2069 hmacctx->ipad, digestsize);
2070 if (err)
2071 goto out;
2072 chcr_change_order(hmacctx->ipad, updated_digestsize);
2073
2074 err = chcr_compute_partial_hash(shash, hmacctx->opad,
2075 hmacctx->opad, digestsize);
2076 if (err)
2077 goto out;
2078 chcr_change_order(hmacctx->opad, updated_digestsize);
2079out:
2080 return err;
2081}
2082
2083static int chcr_aes_xts_setkey(struct crypto_ablkcipher *cipher, const u8 *key,
2084 unsigned int key_len)
2085{
2086 struct ablk_ctx *ablkctx = ABLK_CTX(c_ctx(cipher));
2087 unsigned short context_size = 0;
2088 int err;
2089
2090 err = chcr_cipher_fallback_setkey(cipher, key, key_len);
2091 if (err)
2092 goto badkey_err;
2093
2094 memcpy(ablkctx->key, key, key_len);
2095 ablkctx->enckey_len = key_len;
2096 get_aes_decrypt_key(ablkctx->rrkey, ablkctx->key, key_len << 2);
2097 context_size = (KEY_CONTEXT_HDR_SALT_AND_PAD + key_len) >> 4;
2098 ablkctx->key_ctx_hdr =
2099 FILL_KEY_CTX_HDR((key_len == AES_KEYSIZE_256) ?
2100 CHCR_KEYCTX_CIPHER_KEY_SIZE_128 :
2101 CHCR_KEYCTX_CIPHER_KEY_SIZE_256,
2102 CHCR_KEYCTX_NO_KEY, 1,
2103 0, context_size);
2104 ablkctx->ciph_mode = CHCR_SCMD_CIPHER_MODE_AES_XTS;
2105 return 0;
2106badkey_err:
2107 crypto_ablkcipher_set_flags(cipher, CRYPTO_TFM_RES_BAD_KEY_LEN);
2108 ablkctx->enckey_len = 0;
2109
2110 return err;
2111}
2112
2113static int chcr_sha_init(struct ahash_request *areq)
2114{
2115 struct chcr_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
2116 struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2117 int digestsize = crypto_ahash_digestsize(tfm);
2118
2119 req_ctx->data_len = 0;
2120 req_ctx->reqlen = 0;
2121 req_ctx->reqbfr = req_ctx->bfr1;
2122 req_ctx->skbfr = req_ctx->bfr2;
2123 copy_hash_init_values(req_ctx->partial_hash, digestsize);
2124
2125 return 0;
2126}
2127
2128static int chcr_sha_cra_init(struct crypto_tfm *tfm)
2129{
2130 crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm),
2131 sizeof(struct chcr_ahash_req_ctx));
2132 return chcr_device_init(crypto_tfm_ctx(tfm));
2133}
2134
2135static int chcr_hmac_init(struct ahash_request *areq)
2136{
2137 struct chcr_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
2138 struct crypto_ahash *rtfm = crypto_ahash_reqtfm(areq);
2139 struct hmac_ctx *hmacctx = HMAC_CTX(h_ctx(rtfm));
2140 unsigned int digestsize = crypto_ahash_digestsize(rtfm);
2141 unsigned int bs = crypto_tfm_alg_blocksize(crypto_ahash_tfm(rtfm));
2142
2143 chcr_sha_init(areq);
2144 req_ctx->data_len = bs;
2145 if (is_hmac(crypto_ahash_tfm(rtfm))) {
2146 if (digestsize == SHA224_DIGEST_SIZE)
2147 memcpy(req_ctx->partial_hash, hmacctx->ipad,
2148 SHA256_DIGEST_SIZE);
2149 else if (digestsize == SHA384_DIGEST_SIZE)
2150 memcpy(req_ctx->partial_hash, hmacctx->ipad,
2151 SHA512_DIGEST_SIZE);
2152 else
2153 memcpy(req_ctx->partial_hash, hmacctx->ipad,
2154 digestsize);
2155 }
2156 return 0;
2157}
2158
2159static int chcr_hmac_cra_init(struct crypto_tfm *tfm)
2160{
2161 struct chcr_context *ctx = crypto_tfm_ctx(tfm);
2162 struct hmac_ctx *hmacctx = HMAC_CTX(ctx);
2163 unsigned int digestsize =
2164 crypto_ahash_digestsize(__crypto_ahash_cast(tfm));
2165
2166 crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm),
2167 sizeof(struct chcr_ahash_req_ctx));
2168 hmacctx->base_hash = chcr_alloc_shash(digestsize);
2169 if (IS_ERR(hmacctx->base_hash))
2170 return PTR_ERR(hmacctx->base_hash);
2171 return chcr_device_init(crypto_tfm_ctx(tfm));
2172}
2173
2174static void chcr_hmac_cra_exit(struct crypto_tfm *tfm)
2175{
2176 struct chcr_context *ctx = crypto_tfm_ctx(tfm);
2177 struct hmac_ctx *hmacctx = HMAC_CTX(ctx);
2178
2179 if (hmacctx->base_hash) {
2180 chcr_free_shash(hmacctx->base_hash);
2181 hmacctx->base_hash = NULL;
2182 }
2183}
2184
2185inline void chcr_aead_common_exit(struct aead_request *req)
2186{
2187 struct chcr_aead_reqctx *reqctx = aead_request_ctx(req);
2188 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
2189 struct uld_ctx *u_ctx = ULD_CTX(a_ctx(tfm));
2190
2191 chcr_aead_dma_unmap(&u_ctx->lldi.pdev->dev, req, reqctx->op);
2192}
2193
2194static int chcr_aead_common_init(struct aead_request *req)
2195{
2196 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
2197 struct chcr_aead_ctx *aeadctx = AEAD_CTX(a_ctx(tfm));
2198 struct chcr_aead_reqctx *reqctx = aead_request_ctx(req);
2199 unsigned int authsize = crypto_aead_authsize(tfm);
2200 int error = -EINVAL;
2201
2202
2203 if (aeadctx->enckey_len == 0)
2204 goto err;
2205 if (reqctx->op && req->cryptlen < authsize)
2206 goto err;
2207 if (reqctx->b0_len)
2208 reqctx->scratch_pad = reqctx->iv + IV;
2209 else
2210 reqctx->scratch_pad = NULL;
2211
2212 error = chcr_aead_dma_map(&ULD_CTX(a_ctx(tfm))->lldi.pdev->dev, req,
2213 reqctx->op);
2214 if (error) {
2215 error = -ENOMEM;
2216 goto err;
2217 }
2218 reqctx->aad_nents = sg_nents_xlen(req->src, req->assoclen,
2219 CHCR_SRC_SG_SIZE, 0);
2220 reqctx->src_nents = sg_nents_xlen(req->src, req->cryptlen,
2221 CHCR_SRC_SG_SIZE, req->assoclen);
2222 return 0;
2223err:
2224 return error;
2225}
2226
2227static int chcr_aead_need_fallback(struct aead_request *req, int dst_nents,
2228 int aadmax, int wrlen,
2229 unsigned short op_type)
2230{
2231 unsigned int authsize = crypto_aead_authsize(crypto_aead_reqtfm(req));
2232
2233 if (((req->cryptlen - (op_type ? authsize : 0)) == 0) ||
2234 dst_nents > MAX_DSGL_ENT ||
2235 (req->assoclen > aadmax) ||
2236 (wrlen > SGE_MAX_WR_LEN))
2237 return 1;
2238 return 0;
2239}
2240
2241static int chcr_aead_fallback(struct aead_request *req, unsigned short op_type)
2242{
2243 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
2244 struct chcr_aead_ctx *aeadctx = AEAD_CTX(a_ctx(tfm));
2245 struct aead_request *subreq = aead_request_ctx(req);
2246
2247 aead_request_set_tfm(subreq, aeadctx->sw_cipher);
2248 aead_request_set_callback(subreq, req->base.flags,
2249 req->base.complete, req->base.data);
2250 aead_request_set_crypt(subreq, req->src, req->dst, req->cryptlen,
2251 req->iv);
2252 aead_request_set_ad(subreq, req->assoclen);
2253 return op_type ? crypto_aead_decrypt(subreq) :
2254 crypto_aead_encrypt(subreq);
2255}
2256
2257static struct sk_buff *create_authenc_wr(struct aead_request *req,
2258 unsigned short qid,
2259 int size)
2260{
2261 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
2262 struct chcr_aead_ctx *aeadctx = AEAD_CTX(a_ctx(tfm));
2263 struct chcr_authenc_ctx *actx = AUTHENC_CTX(aeadctx);
2264 struct chcr_aead_reqctx *reqctx = aead_request_ctx(req);
2265 struct sk_buff *skb = NULL;
2266 struct chcr_wr *chcr_req;
2267 struct cpl_rx_phys_dsgl *phys_cpl;
2268 struct ulptx_sgl *ulptx;
2269 unsigned int transhdr_len;
2270 unsigned int dst_size = 0, temp, subtype = get_aead_subtype(tfm);
2271 unsigned int kctx_len = 0, dnents;
2272 unsigned int assoclen = req->assoclen;
2273 unsigned int authsize = crypto_aead_authsize(tfm);
2274 int error = -EINVAL;
2275 int null = 0;
2276 gfp_t flags = req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP ? GFP_KERNEL :
2277 GFP_ATOMIC;
2278 struct adapter *adap = padap(a_ctx(tfm)->dev);
2279
2280 if (req->cryptlen == 0)
2281 return NULL;
2282
2283 reqctx->b0_len = 0;
2284 error = chcr_aead_common_init(req);
2285 if (error)
2286 return ERR_PTR(error);
2287
2288 if (subtype == CRYPTO_ALG_SUB_TYPE_CBC_NULL ||
2289 subtype == CRYPTO_ALG_SUB_TYPE_CTR_NULL) {
2290 null = 1;
2291 assoclen = 0;
2292 reqctx->aad_nents = 0;
2293 }
2294 dnents = sg_nents_xlen(req->dst, assoclen, CHCR_DST_SG_SIZE, 0);
2295 dnents += sg_nents_xlen(req->dst, req->cryptlen +
2296 (reqctx->op ? -authsize : authsize), CHCR_DST_SG_SIZE,
2297 req->assoclen);
2298 dnents += MIN_AUTH_SG;
2299
2300 dst_size = get_space_for_phys_dsgl(dnents);
2301 kctx_len = (ntohl(KEY_CONTEXT_CTX_LEN_V(aeadctx->key_ctx_hdr)) << 4)
2302 - sizeof(chcr_req->key_ctx);
2303 transhdr_len = CIPHER_TRANSHDR_SIZE(kctx_len, dst_size);
2304 reqctx->imm = (transhdr_len + assoclen + IV + req->cryptlen) <
2305 SGE_MAX_WR_LEN;
2306 temp = reqctx->imm ? roundup(assoclen + IV + req->cryptlen, 16)
2307 : (sgl_len(reqctx->src_nents + reqctx->aad_nents
2308 + MIN_GCM_SG) * 8);
2309 transhdr_len += temp;
2310 transhdr_len = roundup(transhdr_len, 16);
2311
2312 if (chcr_aead_need_fallback(req, dnents, T6_MAX_AAD_SIZE,
2313 transhdr_len, reqctx->op)) {
2314 atomic_inc(&adap->chcr_stats.fallback);
2315 chcr_aead_common_exit(req);
2316 return ERR_PTR(chcr_aead_fallback(req, reqctx->op));
2317 }
2318 skb = alloc_skb(SGE_MAX_WR_LEN, flags);
2319 if (!skb) {
2320 error = -ENOMEM;
2321 goto err;
2322 }
2323
2324 chcr_req = __skb_put_zero(skb, transhdr_len);
2325
2326 temp = (reqctx->op == CHCR_ENCRYPT_OP) ? 0 : authsize;
2327
2328
2329
2330
2331
2332
2333 chcr_req->sec_cpl.op_ivinsrtofst =
2334 FILL_SEC_CPL_OP_IVINSR(a_ctx(tfm)->dev->rx_channel_id, 2,
2335 assoclen + 1);
2336 chcr_req->sec_cpl.pldlen = htonl(assoclen + IV + req->cryptlen);
2337 chcr_req->sec_cpl.aadstart_cipherstop_hi = FILL_SEC_CPL_CIPHERSTOP_HI(
2338 assoclen ? 1 : 0, assoclen,
2339 assoclen + IV + 1,
2340 (temp & 0x1F0) >> 4);
2341 chcr_req->sec_cpl.cipherstop_lo_authinsert = FILL_SEC_CPL_AUTHINSERT(
2342 temp & 0xF,
2343 null ? 0 : assoclen + IV + 1,
2344 temp, temp);
2345 if (subtype == CRYPTO_ALG_SUB_TYPE_CTR_NULL ||
2346 subtype == CRYPTO_ALG_SUB_TYPE_CTR_SHA)
2347 temp = CHCR_SCMD_CIPHER_MODE_AES_CTR;
2348 else
2349 temp = CHCR_SCMD_CIPHER_MODE_AES_CBC;
2350 chcr_req->sec_cpl.seqno_numivs = FILL_SEC_CPL_SCMD0_SEQNO(reqctx->op,
2351 (reqctx->op == CHCR_ENCRYPT_OP) ? 1 : 0,
2352 temp,
2353 actx->auth_mode, aeadctx->hmac_ctrl,
2354 IV >> 1);
2355 chcr_req->sec_cpl.ivgen_hdrlen = FILL_SEC_CPL_IVGEN_HDRLEN(0, 0, 1,
2356 0, 0, dst_size);
2357
2358 chcr_req->key_ctx.ctx_hdr = aeadctx->key_ctx_hdr;
2359 if (reqctx->op == CHCR_ENCRYPT_OP ||
2360 subtype == CRYPTO_ALG_SUB_TYPE_CTR_SHA ||
2361 subtype == CRYPTO_ALG_SUB_TYPE_CTR_NULL)
2362 memcpy(chcr_req->key_ctx.key, aeadctx->key,
2363 aeadctx->enckey_len);
2364 else
2365 memcpy(chcr_req->key_ctx.key, actx->dec_rrkey,
2366 aeadctx->enckey_len);
2367
2368 memcpy(chcr_req->key_ctx.key + roundup(aeadctx->enckey_len, 16),
2369 actx->h_iopad, kctx_len - roundup(aeadctx->enckey_len, 16));
2370 if (subtype == CRYPTO_ALG_SUB_TYPE_CTR_SHA ||
2371 subtype == CRYPTO_ALG_SUB_TYPE_CTR_NULL) {
2372 memcpy(reqctx->iv, aeadctx->nonce, CTR_RFC3686_NONCE_SIZE);
2373 memcpy(reqctx->iv + CTR_RFC3686_NONCE_SIZE, req->iv,
2374 CTR_RFC3686_IV_SIZE);
2375 *(__be32 *)(reqctx->iv + CTR_RFC3686_NONCE_SIZE +
2376 CTR_RFC3686_IV_SIZE) = cpu_to_be32(1);
2377 } else {
2378 memcpy(reqctx->iv, req->iv, IV);
2379 }
2380 phys_cpl = (struct cpl_rx_phys_dsgl *)((u8 *)(chcr_req + 1) + kctx_len);
2381 ulptx = (struct ulptx_sgl *)((u8 *)(phys_cpl + 1) + dst_size);
2382 chcr_add_aead_dst_ent(req, phys_cpl, assoclen, qid);
2383 chcr_add_aead_src_ent(req, ulptx, assoclen);
2384 atomic_inc(&adap->chcr_stats.cipher_rqst);
2385 temp = sizeof(struct cpl_rx_phys_dsgl) + dst_size +
2386 kctx_len + (reqctx->imm ? (assoclen + IV + req->cryptlen) : 0);
2387 create_wreq(a_ctx(tfm), chcr_req, &req->base, reqctx->imm, size,
2388 transhdr_len, temp, 0);
2389 reqctx->skb = skb;
2390
2391 return skb;
2392err:
2393 chcr_aead_common_exit(req);
2394
2395 return ERR_PTR(error);
2396}
2397
2398int chcr_aead_dma_map(struct device *dev,
2399 struct aead_request *req,
2400 unsigned short op_type)
2401{
2402 int error;
2403 struct chcr_aead_reqctx *reqctx = aead_request_ctx(req);
2404 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
2405 unsigned int authsize = crypto_aead_authsize(tfm);
2406 int dst_size;
2407
2408 dst_size = req->assoclen + req->cryptlen + (op_type ?
2409 -authsize : authsize);
2410 if (!req->cryptlen || !dst_size)
2411 return 0;
2412 reqctx->iv_dma = dma_map_single(dev, reqctx->iv, (IV + reqctx->b0_len),
2413 DMA_BIDIRECTIONAL);
2414 if (dma_mapping_error(dev, reqctx->iv_dma))
2415 return -ENOMEM;
2416 if (reqctx->b0_len)
2417 reqctx->b0_dma = reqctx->iv_dma + IV;
2418 else
2419 reqctx->b0_dma = 0;
2420 if (req->src == req->dst) {
2421 error = dma_map_sg(dev, req->src, sg_nents(req->src),
2422 DMA_BIDIRECTIONAL);
2423 if (!error)
2424 goto err;
2425 } else {
2426 error = dma_map_sg(dev, req->src, sg_nents(req->src),
2427 DMA_TO_DEVICE);
2428 if (!error)
2429 goto err;
2430 error = dma_map_sg(dev, req->dst, sg_nents(req->dst),
2431 DMA_FROM_DEVICE);
2432 if (!error) {
2433 dma_unmap_sg(dev, req->src, sg_nents(req->src),
2434 DMA_TO_DEVICE);
2435 goto err;
2436 }
2437 }
2438
2439 return 0;
2440err:
2441 dma_unmap_single(dev, reqctx->iv_dma, IV, DMA_BIDIRECTIONAL);
2442 return -ENOMEM;
2443}
2444
2445void chcr_aead_dma_unmap(struct device *dev,
2446 struct aead_request *req,
2447 unsigned short op_type)
2448{
2449 struct chcr_aead_reqctx *reqctx = aead_request_ctx(req);
2450 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
2451 unsigned int authsize = crypto_aead_authsize(tfm);
2452 int dst_size;
2453
2454 dst_size = req->assoclen + req->cryptlen + (op_type ?
2455 -authsize : authsize);
2456 if (!req->cryptlen || !dst_size)
2457 return;
2458
2459 dma_unmap_single(dev, reqctx->iv_dma, (IV + reqctx->b0_len),
2460 DMA_BIDIRECTIONAL);
2461 if (req->src == req->dst) {
2462 dma_unmap_sg(dev, req->src, sg_nents(req->src),
2463 DMA_BIDIRECTIONAL);
2464 } else {
2465 dma_unmap_sg(dev, req->src, sg_nents(req->src),
2466 DMA_TO_DEVICE);
2467 dma_unmap_sg(dev, req->dst, sg_nents(req->dst),
2468 DMA_FROM_DEVICE);
2469 }
2470}
2471
2472void chcr_add_aead_src_ent(struct aead_request *req,
2473 struct ulptx_sgl *ulptx,
2474 unsigned int assoclen)
2475{
2476 struct ulptx_walk ulp_walk;
2477 struct chcr_aead_reqctx *reqctx = aead_request_ctx(req);
2478
2479 if (reqctx->imm) {
2480 u8 *buf = (u8 *)ulptx;
2481
2482 if (reqctx->b0_len) {
2483 memcpy(buf, reqctx->scratch_pad, reqctx->b0_len);
2484 buf += reqctx->b0_len;
2485 }
2486 sg_pcopy_to_buffer(req->src, sg_nents(req->src),
2487 buf, assoclen, 0);
2488 buf += assoclen;
2489 memcpy(buf, reqctx->iv, IV);
2490 buf += IV;
2491 sg_pcopy_to_buffer(req->src, sg_nents(req->src),
2492 buf, req->cryptlen, req->assoclen);
2493 } else {
2494 ulptx_walk_init(&ulp_walk, ulptx);
2495 if (reqctx->b0_len)
2496 ulptx_walk_add_page(&ulp_walk, reqctx->b0_len,
2497 &reqctx->b0_dma);
2498 ulptx_walk_add_sg(&ulp_walk, req->src, assoclen, 0);
2499 ulptx_walk_add_page(&ulp_walk, IV, &reqctx->iv_dma);
2500 ulptx_walk_add_sg(&ulp_walk, req->src, req->cryptlen,
2501 req->assoclen);
2502 ulptx_walk_end(&ulp_walk);
2503 }
2504}
2505
2506void chcr_add_aead_dst_ent(struct aead_request *req,
2507 struct cpl_rx_phys_dsgl *phys_cpl,
2508 unsigned int assoclen,
2509 unsigned short qid)
2510{
2511 struct chcr_aead_reqctx *reqctx = aead_request_ctx(req);
2512 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
2513 struct dsgl_walk dsgl_walk;
2514 unsigned int authsize = crypto_aead_authsize(tfm);
2515 struct chcr_context *ctx = a_ctx(tfm);
2516 u32 temp;
2517
2518 dsgl_walk_init(&dsgl_walk, phys_cpl);
2519 if (reqctx->b0_len)
2520 dsgl_walk_add_page(&dsgl_walk, reqctx->b0_len, &reqctx->b0_dma);
2521 dsgl_walk_add_sg(&dsgl_walk, req->dst, assoclen, 0);
2522 dsgl_walk_add_page(&dsgl_walk, IV, &reqctx->iv_dma);
2523 temp = req->cryptlen + (reqctx->op ? -authsize : authsize);
2524 dsgl_walk_add_sg(&dsgl_walk, req->dst, temp, req->assoclen);
2525 dsgl_walk_end(&dsgl_walk, qid, ctx->pci_chan_id);
2526}
2527
2528void chcr_add_cipher_src_ent(struct ablkcipher_request *req,
2529 void *ulptx,
2530 struct cipher_wr_param *wrparam)
2531{
2532 struct ulptx_walk ulp_walk;
2533 struct chcr_blkcipher_req_ctx *reqctx = ablkcipher_request_ctx(req);
2534 u8 *buf = ulptx;
2535
2536 memcpy(buf, reqctx->iv, IV);
2537 buf += IV;
2538 if (reqctx->imm) {
2539 sg_pcopy_to_buffer(req->src, sg_nents(req->src),
2540 buf, wrparam->bytes, reqctx->processed);
2541 } else {
2542 ulptx_walk_init(&ulp_walk, (struct ulptx_sgl *)buf);
2543 ulptx_walk_add_sg(&ulp_walk, reqctx->srcsg, wrparam->bytes,
2544 reqctx->src_ofst);
2545 reqctx->srcsg = ulp_walk.last_sg;
2546 reqctx->src_ofst = ulp_walk.last_sg_len;
2547 ulptx_walk_end(&ulp_walk);
2548 }
2549}
2550
2551void chcr_add_cipher_dst_ent(struct ablkcipher_request *req,
2552 struct cpl_rx_phys_dsgl *phys_cpl,
2553 struct cipher_wr_param *wrparam,
2554 unsigned short qid)
2555{
2556 struct chcr_blkcipher_req_ctx *reqctx = ablkcipher_request_ctx(req);
2557 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(wrparam->req);
2558 struct chcr_context *ctx = c_ctx(tfm);
2559 struct dsgl_walk dsgl_walk;
2560
2561 dsgl_walk_init(&dsgl_walk, phys_cpl);
2562 dsgl_walk_add_sg(&dsgl_walk, reqctx->dstsg, wrparam->bytes,
2563 reqctx->dst_ofst);
2564 reqctx->dstsg = dsgl_walk.last_sg;
2565 reqctx->dst_ofst = dsgl_walk.last_sg_len;
2566
2567 dsgl_walk_end(&dsgl_walk, qid, ctx->pci_chan_id);
2568}
2569
2570void chcr_add_hash_src_ent(struct ahash_request *req,
2571 struct ulptx_sgl *ulptx,
2572 struct hash_wr_param *param)
2573{
2574 struct ulptx_walk ulp_walk;
2575 struct chcr_ahash_req_ctx *reqctx = ahash_request_ctx(req);
2576
2577 if (reqctx->hctx_wr.imm) {
2578 u8 *buf = (u8 *)ulptx;
2579
2580 if (param->bfr_len) {
2581 memcpy(buf, reqctx->reqbfr, param->bfr_len);
2582 buf += param->bfr_len;
2583 }
2584
2585 sg_pcopy_to_buffer(reqctx->hctx_wr.srcsg,
2586 sg_nents(reqctx->hctx_wr.srcsg), buf,
2587 param->sg_len, 0);
2588 } else {
2589 ulptx_walk_init(&ulp_walk, ulptx);
2590 if (param->bfr_len)
2591 ulptx_walk_add_page(&ulp_walk, param->bfr_len,
2592 &reqctx->hctx_wr.dma_addr);
2593 ulptx_walk_add_sg(&ulp_walk, reqctx->hctx_wr.srcsg,
2594 param->sg_len, reqctx->hctx_wr.src_ofst);
2595 reqctx->hctx_wr.srcsg = ulp_walk.last_sg;
2596 reqctx->hctx_wr.src_ofst = ulp_walk.last_sg_len;
2597 ulptx_walk_end(&ulp_walk);
2598 }
2599}
2600
2601int chcr_hash_dma_map(struct device *dev,
2602 struct ahash_request *req)
2603{
2604 struct chcr_ahash_req_ctx *req_ctx = ahash_request_ctx(req);
2605 int error = 0;
2606
2607 if (!req->nbytes)
2608 return 0;
2609 error = dma_map_sg(dev, req->src, sg_nents(req->src),
2610 DMA_TO_DEVICE);
2611 if (!error)
2612 return -ENOMEM;
2613 req_ctx->hctx_wr.is_sg_map = 1;
2614 return 0;
2615}
2616
2617void chcr_hash_dma_unmap(struct device *dev,
2618 struct ahash_request *req)
2619{
2620 struct chcr_ahash_req_ctx *req_ctx = ahash_request_ctx(req);
2621
2622 if (!req->nbytes)
2623 return;
2624
2625 dma_unmap_sg(dev, req->src, sg_nents(req->src),
2626 DMA_TO_DEVICE);
2627 req_ctx->hctx_wr.is_sg_map = 0;
2628
2629}
2630
2631int chcr_cipher_dma_map(struct device *dev,
2632 struct ablkcipher_request *req)
2633{
2634 int error;
2635
2636 if (req->src == req->dst) {
2637 error = dma_map_sg(dev, req->src, sg_nents(req->src),
2638 DMA_BIDIRECTIONAL);
2639 if (!error)
2640 goto err;
2641 } else {
2642 error = dma_map_sg(dev, req->src, sg_nents(req->src),
2643 DMA_TO_DEVICE);
2644 if (!error)
2645 goto err;
2646 error = dma_map_sg(dev, req->dst, sg_nents(req->dst),
2647 DMA_FROM_DEVICE);
2648 if (!error) {
2649 dma_unmap_sg(dev, req->src, sg_nents(req->src),
2650 DMA_TO_DEVICE);
2651 goto err;
2652 }
2653 }
2654
2655 return 0;
2656err:
2657 return -ENOMEM;
2658}
2659
2660void chcr_cipher_dma_unmap(struct device *dev,
2661 struct ablkcipher_request *req)
2662{
2663 if (req->src == req->dst) {
2664 dma_unmap_sg(dev, req->src, sg_nents(req->src),
2665 DMA_BIDIRECTIONAL);
2666 } else {
2667 dma_unmap_sg(dev, req->src, sg_nents(req->src),
2668 DMA_TO_DEVICE);
2669 dma_unmap_sg(dev, req->dst, sg_nents(req->dst),
2670 DMA_FROM_DEVICE);
2671 }
2672}
2673
2674static int set_msg_len(u8 *block, unsigned int msglen, int csize)
2675{
2676 __be32 data;
2677
2678 memset(block, 0, csize);
2679 block += csize;
2680
2681 if (csize >= 4)
2682 csize = 4;
2683 else if (msglen > (unsigned int)(1 << (8 * csize)))
2684 return -EOVERFLOW;
2685
2686 data = cpu_to_be32(msglen);
2687 memcpy(block - csize, (u8 *)&data + 4 - csize, csize);
2688
2689 return 0;
2690}
2691
2692static void generate_b0(struct aead_request *req,
2693 struct chcr_aead_ctx *aeadctx,
2694 unsigned short op_type)
2695{
2696 unsigned int l, lp, m;
2697 int rc;
2698 struct crypto_aead *aead = crypto_aead_reqtfm(req);
2699 struct chcr_aead_reqctx *reqctx = aead_request_ctx(req);
2700 u8 *b0 = reqctx->scratch_pad;
2701
2702 m = crypto_aead_authsize(aead);
2703
2704 memcpy(b0, reqctx->iv, 16);
2705
2706 lp = b0[0];
2707 l = lp + 1;
2708
2709
2710 *b0 |= (8 * ((m - 2) / 2));
2711
2712
2713 if (req->assoclen)
2714 *b0 |= 64;
2715 rc = set_msg_len(b0 + 16 - l,
2716 (op_type == CHCR_DECRYPT_OP) ?
2717 req->cryptlen - m : req->cryptlen, l);
2718}
2719
2720static inline int crypto_ccm_check_iv(const u8 *iv)
2721{
2722
2723 if (iv[0] < 1 || iv[0] > 7)
2724 return -EINVAL;
2725
2726 return 0;
2727}
2728
2729static int ccm_format_packet(struct aead_request *req,
2730 struct chcr_aead_ctx *aeadctx,
2731 unsigned int sub_type,
2732 unsigned short op_type,
2733 unsigned int assoclen)
2734{
2735 struct chcr_aead_reqctx *reqctx = aead_request_ctx(req);
2736 int rc = 0;
2737
2738 if (sub_type == CRYPTO_ALG_SUB_TYPE_AEAD_RFC4309) {
2739 reqctx->iv[0] = 3;
2740 memcpy(reqctx->iv + 1, &aeadctx->salt[0], 3);
2741 memcpy(reqctx->iv + 4, req->iv, 8);
2742 memset(reqctx->iv + 12, 0, 4);
2743 } else {
2744 memcpy(reqctx->iv, req->iv, 16);
2745 }
2746 if (assoclen)
2747 *((unsigned short *)(reqctx->scratch_pad + 16)) =
2748 htons(assoclen);
2749
2750 generate_b0(req, aeadctx, op_type);
2751
2752 memset(reqctx->iv + 15 - reqctx->iv[0], 0, reqctx->iv[0] + 1);
2753 return rc;
2754}
2755
2756static void fill_sec_cpl_for_aead(struct cpl_tx_sec_pdu *sec_cpl,
2757 unsigned int dst_size,
2758 struct aead_request *req,
2759 unsigned short op_type)
2760{
2761 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
2762 struct chcr_aead_ctx *aeadctx = AEAD_CTX(a_ctx(tfm));
2763 unsigned int cipher_mode = CHCR_SCMD_CIPHER_MODE_AES_CCM;
2764 unsigned int mac_mode = CHCR_SCMD_AUTH_MODE_CBCMAC;
2765 unsigned int c_id = a_ctx(tfm)->dev->rx_channel_id;
2766 unsigned int ccm_xtra;
2767 unsigned char tag_offset = 0, auth_offset = 0;
2768 unsigned int assoclen;
2769
2770 if (get_aead_subtype(tfm) == CRYPTO_ALG_SUB_TYPE_AEAD_RFC4309)
2771 assoclen = req->assoclen - 8;
2772 else
2773 assoclen = req->assoclen;
2774 ccm_xtra = CCM_B0_SIZE +
2775 ((assoclen) ? CCM_AAD_FIELD_SIZE : 0);
2776
2777 auth_offset = req->cryptlen ?
2778 (assoclen + IV + 1 + ccm_xtra) : 0;
2779 if (op_type == CHCR_DECRYPT_OP) {
2780 if (crypto_aead_authsize(tfm) != req->cryptlen)
2781 tag_offset = crypto_aead_authsize(tfm);
2782 else
2783 auth_offset = 0;
2784 }
2785
2786
2787 sec_cpl->op_ivinsrtofst = FILL_SEC_CPL_OP_IVINSR(c_id,
2788 2, assoclen + 1 + ccm_xtra);
2789 sec_cpl->pldlen =
2790 htonl(assoclen + IV + req->cryptlen + ccm_xtra);
2791
2792 sec_cpl->aadstart_cipherstop_hi = FILL_SEC_CPL_CIPHERSTOP_HI(
2793 1, assoclen + ccm_xtra, assoclen
2794 + IV + 1 + ccm_xtra, 0);
2795
2796 sec_cpl->cipherstop_lo_authinsert = FILL_SEC_CPL_AUTHINSERT(0,
2797 auth_offset, tag_offset,
2798 (op_type == CHCR_ENCRYPT_OP) ? 0 :
2799 crypto_aead_authsize(tfm));
2800 sec_cpl->seqno_numivs = FILL_SEC_CPL_SCMD0_SEQNO(op_type,
2801 (op_type == CHCR_ENCRYPT_OP) ? 0 : 1,
2802 cipher_mode, mac_mode,
2803 aeadctx->hmac_ctrl, IV >> 1);
2804
2805 sec_cpl->ivgen_hdrlen = FILL_SEC_CPL_IVGEN_HDRLEN(0, 0, 1, 0,
2806 0, dst_size);
2807}
2808
2809static int aead_ccm_validate_input(unsigned short op_type,
2810 struct aead_request *req,
2811 struct chcr_aead_ctx *aeadctx,
2812 unsigned int sub_type)
2813{
2814 if (sub_type != CRYPTO_ALG_SUB_TYPE_AEAD_RFC4309) {
2815 if (crypto_ccm_check_iv(req->iv)) {
2816 pr_err("CCM: IV check fails\n");
2817 return -EINVAL;
2818 }
2819 } else {
2820 if (req->assoclen != 16 && req->assoclen != 20) {
2821 pr_err("RFC4309: Invalid AAD length %d\n",
2822 req->assoclen);
2823 return -EINVAL;
2824 }
2825 }
2826 return 0;
2827}
2828
2829static struct sk_buff *create_aead_ccm_wr(struct aead_request *req,
2830 unsigned short qid,
2831 int size)
2832{
2833 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
2834 struct chcr_aead_ctx *aeadctx = AEAD_CTX(a_ctx(tfm));
2835 struct chcr_aead_reqctx *reqctx = aead_request_ctx(req);
2836 struct sk_buff *skb = NULL;
2837 struct chcr_wr *chcr_req;
2838 struct cpl_rx_phys_dsgl *phys_cpl;
2839 struct ulptx_sgl *ulptx;
2840 unsigned int transhdr_len;
2841 unsigned int dst_size = 0, kctx_len, dnents, temp;
2842 unsigned int sub_type, assoclen = req->assoclen;
2843 unsigned int authsize = crypto_aead_authsize(tfm);
2844 int error = -EINVAL;
2845 gfp_t flags = req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP ? GFP_KERNEL :
2846 GFP_ATOMIC;
2847 struct adapter *adap = padap(a_ctx(tfm)->dev);
2848
2849 sub_type = get_aead_subtype(tfm);
2850 if (sub_type == CRYPTO_ALG_SUB_TYPE_AEAD_RFC4309)
2851 assoclen -= 8;
2852 reqctx->b0_len = CCM_B0_SIZE + (assoclen ? CCM_AAD_FIELD_SIZE : 0);
2853 error = chcr_aead_common_init(req);
2854 if (error)
2855 return ERR_PTR(error);
2856
2857 error = aead_ccm_validate_input(reqctx->op, req, aeadctx, sub_type);
2858 if (error)
2859 goto err;
2860 dnents = sg_nents_xlen(req->dst, assoclen, CHCR_DST_SG_SIZE, 0);
2861 dnents += sg_nents_xlen(req->dst, req->cryptlen
2862 + (reqctx->op ? -authsize : authsize),
2863 CHCR_DST_SG_SIZE, req->assoclen);
2864 dnents += MIN_CCM_SG;
2865 dst_size = get_space_for_phys_dsgl(dnents);
2866 kctx_len = roundup(aeadctx->enckey_len, 16) * 2;
2867 transhdr_len = CIPHER_TRANSHDR_SIZE(kctx_len, dst_size);
2868 reqctx->imm = (transhdr_len + assoclen + IV + req->cryptlen +
2869 reqctx->b0_len) <= SGE_MAX_WR_LEN;
2870 temp = reqctx->imm ? roundup(assoclen + IV + req->cryptlen +
2871 reqctx->b0_len, 16) :
2872 (sgl_len(reqctx->src_nents + reqctx->aad_nents +
2873 MIN_CCM_SG) * 8);
2874 transhdr_len += temp;
2875 transhdr_len = roundup(transhdr_len, 16);
2876
2877 if (chcr_aead_need_fallback(req, dnents, T6_MAX_AAD_SIZE -
2878 reqctx->b0_len, transhdr_len, reqctx->op)) {
2879 atomic_inc(&adap->chcr_stats.fallback);
2880 chcr_aead_common_exit(req);
2881 return ERR_PTR(chcr_aead_fallback(req, reqctx->op));
2882 }
2883 skb = alloc_skb(SGE_MAX_WR_LEN, flags);
2884
2885 if (!skb) {
2886 error = -ENOMEM;
2887 goto err;
2888 }
2889
2890 chcr_req = (struct chcr_wr *) __skb_put_zero(skb, transhdr_len);
2891
2892 fill_sec_cpl_for_aead(&chcr_req->sec_cpl, dst_size, req, reqctx->op);
2893
2894 chcr_req->key_ctx.ctx_hdr = aeadctx->key_ctx_hdr;
2895 memcpy(chcr_req->key_ctx.key, aeadctx->key, aeadctx->enckey_len);
2896 memcpy(chcr_req->key_ctx.key + roundup(aeadctx->enckey_len, 16),
2897 aeadctx->key, aeadctx->enckey_len);
2898
2899 phys_cpl = (struct cpl_rx_phys_dsgl *)((u8 *)(chcr_req + 1) + kctx_len);
2900 ulptx = (struct ulptx_sgl *)((u8 *)(phys_cpl + 1) + dst_size);
2901 error = ccm_format_packet(req, aeadctx, sub_type, reqctx->op, assoclen);
2902 if (error)
2903 goto dstmap_fail;
2904 chcr_add_aead_dst_ent(req, phys_cpl, assoclen, qid);
2905 chcr_add_aead_src_ent(req, ulptx, assoclen);
2906
2907 atomic_inc(&adap->chcr_stats.aead_rqst);
2908 temp = sizeof(struct cpl_rx_phys_dsgl) + dst_size +
2909 kctx_len + (reqctx->imm ? (assoclen + IV + req->cryptlen +
2910 reqctx->b0_len) : 0);
2911 create_wreq(a_ctx(tfm), chcr_req, &req->base, reqctx->imm, 0,
2912 transhdr_len, temp, 0);
2913 reqctx->skb = skb;
2914
2915 return skb;
2916dstmap_fail:
2917 kfree_skb(skb);
2918err:
2919 chcr_aead_common_exit(req);
2920 return ERR_PTR(error);
2921}
2922
2923static struct sk_buff *create_gcm_wr(struct aead_request *req,
2924 unsigned short qid,
2925 int size)
2926{
2927 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
2928 struct chcr_aead_ctx *aeadctx = AEAD_CTX(a_ctx(tfm));
2929 struct chcr_aead_reqctx *reqctx = aead_request_ctx(req);
2930 struct sk_buff *skb = NULL;
2931 struct chcr_wr *chcr_req;
2932 struct cpl_rx_phys_dsgl *phys_cpl;
2933 struct ulptx_sgl *ulptx;
2934 unsigned int transhdr_len, dnents = 0;
2935 unsigned int dst_size = 0, temp = 0, kctx_len, assoclen = req->assoclen;
2936 unsigned int authsize = crypto_aead_authsize(tfm);
2937 int error = -EINVAL;
2938 gfp_t flags = req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP ? GFP_KERNEL :
2939 GFP_ATOMIC;
2940 struct adapter *adap = padap(a_ctx(tfm)->dev);
2941
2942 if (get_aead_subtype(tfm) == CRYPTO_ALG_SUB_TYPE_AEAD_RFC4106)
2943 assoclen = req->assoclen - 8;
2944
2945 reqctx->b0_len = 0;
2946 error = chcr_aead_common_init(req);
2947 if (error)
2948 return ERR_PTR(error);
2949 dnents = sg_nents_xlen(req->dst, assoclen, CHCR_DST_SG_SIZE, 0);
2950 dnents += sg_nents_xlen(req->dst, req->cryptlen +
2951 (reqctx->op ? -authsize : authsize),
2952 CHCR_DST_SG_SIZE, req->assoclen);
2953 dnents += MIN_GCM_SG;
2954 dst_size = get_space_for_phys_dsgl(dnents);
2955 kctx_len = roundup(aeadctx->enckey_len, 16) + AEAD_H_SIZE;
2956 transhdr_len = CIPHER_TRANSHDR_SIZE(kctx_len, dst_size);
2957 reqctx->imm = (transhdr_len + assoclen + IV + req->cryptlen) <=
2958 SGE_MAX_WR_LEN;
2959 temp = reqctx->imm ? roundup(assoclen + IV + req->cryptlen, 16) :
2960 (sgl_len(reqctx->src_nents +
2961 reqctx->aad_nents + MIN_GCM_SG) * 8);
2962 transhdr_len += temp;
2963 transhdr_len = roundup(transhdr_len, 16);
2964 if (chcr_aead_need_fallback(req, dnents, T6_MAX_AAD_SIZE,
2965 transhdr_len, reqctx->op)) {
2966
2967 atomic_inc(&adap->chcr_stats.fallback);
2968 chcr_aead_common_exit(req);
2969 return ERR_PTR(chcr_aead_fallback(req, reqctx->op));
2970 }
2971 skb = alloc_skb(SGE_MAX_WR_LEN, flags);
2972 if (!skb) {
2973 error = -ENOMEM;
2974 goto err;
2975 }
2976
2977 chcr_req = __skb_put_zero(skb, transhdr_len);
2978
2979
2980 temp = (reqctx->op == CHCR_ENCRYPT_OP) ? 0 : authsize;
2981 chcr_req->sec_cpl.op_ivinsrtofst = FILL_SEC_CPL_OP_IVINSR(
2982 a_ctx(tfm)->dev->rx_channel_id, 2,
2983 (assoclen + 1));
2984 chcr_req->sec_cpl.pldlen =
2985 htonl(assoclen + IV + req->cryptlen);
2986 chcr_req->sec_cpl.aadstart_cipherstop_hi = FILL_SEC_CPL_CIPHERSTOP_HI(
2987 assoclen ? 1 : 0, assoclen,
2988 assoclen + IV + 1, 0);
2989 chcr_req->sec_cpl.cipherstop_lo_authinsert =
2990 FILL_SEC_CPL_AUTHINSERT(0, assoclen + IV + 1,
2991 temp, temp);
2992 chcr_req->sec_cpl.seqno_numivs =
2993 FILL_SEC_CPL_SCMD0_SEQNO(reqctx->op, (reqctx->op ==
2994 CHCR_ENCRYPT_OP) ? 1 : 0,
2995 CHCR_SCMD_CIPHER_MODE_AES_GCM,
2996 CHCR_SCMD_AUTH_MODE_GHASH,
2997 aeadctx->hmac_ctrl, IV >> 1);
2998 chcr_req->sec_cpl.ivgen_hdrlen = FILL_SEC_CPL_IVGEN_HDRLEN(0, 0, 1,
2999 0, 0, dst_size);
3000 chcr_req->key_ctx.ctx_hdr = aeadctx->key_ctx_hdr;
3001 memcpy(chcr_req->key_ctx.key, aeadctx->key, aeadctx->enckey_len);
3002 memcpy(chcr_req->key_ctx.key + roundup(aeadctx->enckey_len, 16),
3003 GCM_CTX(aeadctx)->ghash_h, AEAD_H_SIZE);
3004
3005
3006
3007 if (get_aead_subtype(tfm) ==
3008 CRYPTO_ALG_SUB_TYPE_AEAD_RFC4106) {
3009 memcpy(reqctx->iv, aeadctx->salt, 4);
3010 memcpy(reqctx->iv + 4, req->iv, GCM_RFC4106_IV_SIZE);
3011 } else {
3012 memcpy(reqctx->iv, req->iv, GCM_AES_IV_SIZE);
3013 }
3014 *((unsigned int *)(reqctx->iv + 12)) = htonl(0x01);
3015
3016 phys_cpl = (struct cpl_rx_phys_dsgl *)((u8 *)(chcr_req + 1) + kctx_len);
3017 ulptx = (struct ulptx_sgl *)((u8 *)(phys_cpl + 1) + dst_size);
3018
3019 chcr_add_aead_dst_ent(req, phys_cpl, assoclen, qid);
3020 chcr_add_aead_src_ent(req, ulptx, assoclen);
3021 atomic_inc(&adap->chcr_stats.aead_rqst);
3022 temp = sizeof(struct cpl_rx_phys_dsgl) + dst_size +
3023 kctx_len + (reqctx->imm ? (assoclen + IV + req->cryptlen) : 0);
3024 create_wreq(a_ctx(tfm), chcr_req, &req->base, reqctx->imm, size,
3025 transhdr_len, temp, reqctx->verify);
3026 reqctx->skb = skb;
3027 return skb;
3028
3029err:
3030 chcr_aead_common_exit(req);
3031 return ERR_PTR(error);
3032}
3033
3034
3035
3036static int chcr_aead_cra_init(struct crypto_aead *tfm)
3037{
3038 struct chcr_aead_ctx *aeadctx = AEAD_CTX(a_ctx(tfm));
3039 struct aead_alg *alg = crypto_aead_alg(tfm);
3040
3041 aeadctx->sw_cipher = crypto_alloc_aead(alg->base.cra_name, 0,
3042 CRYPTO_ALG_NEED_FALLBACK |
3043 CRYPTO_ALG_ASYNC);
3044 if (IS_ERR(aeadctx->sw_cipher))
3045 return PTR_ERR(aeadctx->sw_cipher);
3046 crypto_aead_set_reqsize(tfm, max(sizeof(struct chcr_aead_reqctx),
3047 sizeof(struct aead_request) +
3048 crypto_aead_reqsize(aeadctx->sw_cipher)));
3049 return chcr_device_init(a_ctx(tfm));
3050}
3051
3052static void chcr_aead_cra_exit(struct crypto_aead *tfm)
3053{
3054 struct chcr_aead_ctx *aeadctx = AEAD_CTX(a_ctx(tfm));
3055
3056 crypto_free_aead(aeadctx->sw_cipher);
3057}
3058
3059static int chcr_authenc_null_setauthsize(struct crypto_aead *tfm,
3060 unsigned int authsize)
3061{
3062 struct chcr_aead_ctx *aeadctx = AEAD_CTX(a_ctx(tfm));
3063
3064 aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_NOP;
3065 aeadctx->mayverify = VERIFY_HW;
3066 return crypto_aead_setauthsize(aeadctx->sw_cipher, authsize);
3067}
3068static int chcr_authenc_setauthsize(struct crypto_aead *tfm,
3069 unsigned int authsize)
3070{
3071 struct chcr_aead_ctx *aeadctx = AEAD_CTX(a_ctx(tfm));
3072 u32 maxauth = crypto_aead_maxauthsize(tfm);
3073
3074
3075
3076
3077
3078 if (authsize == ICV_4) {
3079 aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_PL1;
3080 aeadctx->mayverify = VERIFY_HW;
3081 } else if (authsize == ICV_6) {
3082 aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_PL2;
3083 aeadctx->mayverify = VERIFY_HW;
3084 } else if (authsize == ICV_10) {
3085 aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_TRUNC_RFC4366;
3086 aeadctx->mayverify = VERIFY_HW;
3087 } else if (authsize == ICV_12) {
3088 aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_IPSEC_96BIT;
3089 aeadctx->mayverify = VERIFY_HW;
3090 } else if (authsize == ICV_14) {
3091 aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_PL3;
3092 aeadctx->mayverify = VERIFY_HW;
3093 } else if (authsize == (maxauth >> 1)) {
3094 aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_DIV2;
3095 aeadctx->mayverify = VERIFY_HW;
3096 } else if (authsize == maxauth) {
3097 aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_NO_TRUNC;
3098 aeadctx->mayverify = VERIFY_HW;
3099 } else {
3100 aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_NO_TRUNC;
3101 aeadctx->mayverify = VERIFY_SW;
3102 }
3103 return crypto_aead_setauthsize(aeadctx->sw_cipher, authsize);
3104}
3105
3106
3107static int chcr_gcm_setauthsize(struct crypto_aead *tfm, unsigned int authsize)
3108{
3109 struct chcr_aead_ctx *aeadctx = AEAD_CTX(a_ctx(tfm));
3110
3111 switch (authsize) {
3112 case ICV_4:
3113 aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_PL1;
3114 aeadctx->mayverify = VERIFY_HW;
3115 break;
3116 case ICV_8:
3117 aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_DIV2;
3118 aeadctx->mayverify = VERIFY_HW;
3119 break;
3120 case ICV_12:
3121 aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_IPSEC_96BIT;
3122 aeadctx->mayverify = VERIFY_HW;
3123 break;
3124 case ICV_14:
3125 aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_PL3;
3126 aeadctx->mayverify = VERIFY_HW;
3127 break;
3128 case ICV_16:
3129 aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_NO_TRUNC;
3130 aeadctx->mayverify = VERIFY_HW;
3131 break;
3132 case ICV_13:
3133 case ICV_15:
3134 aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_NO_TRUNC;
3135 aeadctx->mayverify = VERIFY_SW;
3136 break;
3137 default:
3138
3139 crypto_tfm_set_flags((struct crypto_tfm *) tfm,
3140 CRYPTO_TFM_RES_BAD_KEY_LEN);
3141 return -EINVAL;
3142 }
3143 return crypto_aead_setauthsize(aeadctx->sw_cipher, authsize);
3144}
3145
3146static int chcr_4106_4309_setauthsize(struct crypto_aead *tfm,
3147 unsigned int authsize)
3148{
3149 struct chcr_aead_ctx *aeadctx = AEAD_CTX(a_ctx(tfm));
3150
3151 switch (authsize) {
3152 case ICV_8:
3153 aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_DIV2;
3154 aeadctx->mayverify = VERIFY_HW;
3155 break;
3156 case ICV_12:
3157 aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_IPSEC_96BIT;
3158 aeadctx->mayverify = VERIFY_HW;
3159 break;
3160 case ICV_16:
3161 aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_NO_TRUNC;
3162 aeadctx->mayverify = VERIFY_HW;
3163 break;
3164 default:
3165 crypto_tfm_set_flags((struct crypto_tfm *)tfm,
3166 CRYPTO_TFM_RES_BAD_KEY_LEN);
3167 return -EINVAL;
3168 }
3169 return crypto_aead_setauthsize(aeadctx->sw_cipher, authsize);
3170}
3171
3172static int chcr_ccm_setauthsize(struct crypto_aead *tfm,
3173 unsigned int authsize)
3174{
3175 struct chcr_aead_ctx *aeadctx = AEAD_CTX(a_ctx(tfm));
3176
3177 switch (authsize) {
3178 case ICV_4:
3179 aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_PL1;
3180 aeadctx->mayverify = VERIFY_HW;
3181 break;
3182 case ICV_6:
3183 aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_PL2;
3184 aeadctx->mayverify = VERIFY_HW;
3185 break;
3186 case ICV_8:
3187 aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_DIV2;
3188 aeadctx->mayverify = VERIFY_HW;
3189 break;
3190 case ICV_10:
3191 aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_TRUNC_RFC4366;
3192 aeadctx->mayverify = VERIFY_HW;
3193 break;
3194 case ICV_12:
3195 aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_IPSEC_96BIT;
3196 aeadctx->mayverify = VERIFY_HW;
3197 break;
3198 case ICV_14:
3199 aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_PL3;
3200 aeadctx->mayverify = VERIFY_HW;
3201 break;
3202 case ICV_16:
3203 aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_NO_TRUNC;
3204 aeadctx->mayverify = VERIFY_HW;
3205 break;
3206 default:
3207 crypto_tfm_set_flags((struct crypto_tfm *)tfm,
3208 CRYPTO_TFM_RES_BAD_KEY_LEN);
3209 return -EINVAL;
3210 }
3211 return crypto_aead_setauthsize(aeadctx->sw_cipher, authsize);
3212}
3213
3214static int chcr_ccm_common_setkey(struct crypto_aead *aead,
3215 const u8 *key,
3216 unsigned int keylen)
3217{
3218 struct chcr_aead_ctx *aeadctx = AEAD_CTX(a_ctx(aead));
3219 unsigned char ck_size, mk_size;
3220 int key_ctx_size = 0;
3221
3222 key_ctx_size = sizeof(struct _key_ctx) + roundup(keylen, 16) * 2;
3223 if (keylen == AES_KEYSIZE_128) {
3224 ck_size = CHCR_KEYCTX_CIPHER_KEY_SIZE_128;
3225 mk_size = CHCR_KEYCTX_MAC_KEY_SIZE_128;
3226 } else if (keylen == AES_KEYSIZE_192) {
3227 ck_size = CHCR_KEYCTX_CIPHER_KEY_SIZE_192;
3228 mk_size = CHCR_KEYCTX_MAC_KEY_SIZE_192;
3229 } else if (keylen == AES_KEYSIZE_256) {
3230 ck_size = CHCR_KEYCTX_CIPHER_KEY_SIZE_256;
3231 mk_size = CHCR_KEYCTX_MAC_KEY_SIZE_256;
3232 } else {
3233 crypto_tfm_set_flags((struct crypto_tfm *)aead,
3234 CRYPTO_TFM_RES_BAD_KEY_LEN);
3235 aeadctx->enckey_len = 0;
3236 return -EINVAL;
3237 }
3238 aeadctx->key_ctx_hdr = FILL_KEY_CTX_HDR(ck_size, mk_size, 0, 0,
3239 key_ctx_size >> 4);
3240 memcpy(aeadctx->key, key, keylen);
3241 aeadctx->enckey_len = keylen;
3242
3243 return 0;
3244}
3245
3246static int chcr_aead_ccm_setkey(struct crypto_aead *aead,
3247 const u8 *key,
3248 unsigned int keylen)
3249{
3250 struct chcr_aead_ctx *aeadctx = AEAD_CTX(a_ctx(aead));
3251 int error;
3252
3253 crypto_aead_clear_flags(aeadctx->sw_cipher, CRYPTO_TFM_REQ_MASK);
3254 crypto_aead_set_flags(aeadctx->sw_cipher, crypto_aead_get_flags(aead) &
3255 CRYPTO_TFM_REQ_MASK);
3256 error = crypto_aead_setkey(aeadctx->sw_cipher, key, keylen);
3257 crypto_aead_clear_flags(aead, CRYPTO_TFM_RES_MASK);
3258 crypto_aead_set_flags(aead, crypto_aead_get_flags(aeadctx->sw_cipher) &
3259 CRYPTO_TFM_RES_MASK);
3260 if (error)
3261 return error;
3262 return chcr_ccm_common_setkey(aead, key, keylen);
3263}
3264
3265static int chcr_aead_rfc4309_setkey(struct crypto_aead *aead, const u8 *key,
3266 unsigned int keylen)
3267{
3268 struct chcr_aead_ctx *aeadctx = AEAD_CTX(a_ctx(aead));
3269 int error;
3270
3271 if (keylen < 3) {
3272 crypto_tfm_set_flags((struct crypto_tfm *)aead,
3273 CRYPTO_TFM_RES_BAD_KEY_LEN);
3274 aeadctx->enckey_len = 0;
3275 return -EINVAL;
3276 }
3277 crypto_aead_clear_flags(aeadctx->sw_cipher, CRYPTO_TFM_REQ_MASK);
3278 crypto_aead_set_flags(aeadctx->sw_cipher, crypto_aead_get_flags(aead) &
3279 CRYPTO_TFM_REQ_MASK);
3280 error = crypto_aead_setkey(aeadctx->sw_cipher, key, keylen);
3281 crypto_aead_clear_flags(aead, CRYPTO_TFM_RES_MASK);
3282 crypto_aead_set_flags(aead, crypto_aead_get_flags(aeadctx->sw_cipher) &
3283 CRYPTO_TFM_RES_MASK);
3284 if (error)
3285 return error;
3286 keylen -= 3;
3287 memcpy(aeadctx->salt, key + keylen, 3);
3288 return chcr_ccm_common_setkey(aead, key, keylen);
3289}
3290
3291static int chcr_gcm_setkey(struct crypto_aead *aead, const u8 *key,
3292 unsigned int keylen)
3293{
3294 struct chcr_aead_ctx *aeadctx = AEAD_CTX(a_ctx(aead));
3295 struct chcr_gcm_ctx *gctx = GCM_CTX(aeadctx);
3296 struct crypto_cipher *cipher;
3297 unsigned int ck_size;
3298 int ret = 0, key_ctx_size = 0;
3299
3300 aeadctx->enckey_len = 0;
3301 crypto_aead_clear_flags(aeadctx->sw_cipher, CRYPTO_TFM_REQ_MASK);
3302 crypto_aead_set_flags(aeadctx->sw_cipher, crypto_aead_get_flags(aead)
3303 & CRYPTO_TFM_REQ_MASK);
3304 ret = crypto_aead_setkey(aeadctx->sw_cipher, key, keylen);
3305 crypto_aead_clear_flags(aead, CRYPTO_TFM_RES_MASK);
3306 crypto_aead_set_flags(aead, crypto_aead_get_flags(aeadctx->sw_cipher) &
3307 CRYPTO_TFM_RES_MASK);
3308 if (ret)
3309 goto out;
3310
3311 if (get_aead_subtype(aead) == CRYPTO_ALG_SUB_TYPE_AEAD_RFC4106 &&
3312 keylen > 3) {
3313 keylen -= 4;
3314 memcpy(aeadctx->salt, key + keylen, 4);
3315 }
3316 if (keylen == AES_KEYSIZE_128) {
3317 ck_size = CHCR_KEYCTX_CIPHER_KEY_SIZE_128;
3318 } else if (keylen == AES_KEYSIZE_192) {
3319 ck_size = CHCR_KEYCTX_CIPHER_KEY_SIZE_192;
3320 } else if (keylen == AES_KEYSIZE_256) {
3321 ck_size = CHCR_KEYCTX_CIPHER_KEY_SIZE_256;
3322 } else {
3323 crypto_tfm_set_flags((struct crypto_tfm *)aead,
3324 CRYPTO_TFM_RES_BAD_KEY_LEN);
3325 pr_err("GCM: Invalid key length %d\n", keylen);
3326 ret = -EINVAL;
3327 goto out;
3328 }
3329
3330 memcpy(aeadctx->key, key, keylen);
3331 aeadctx->enckey_len = keylen;
3332 key_ctx_size = sizeof(struct _key_ctx) + roundup(keylen, 16) +
3333 AEAD_H_SIZE;
3334 aeadctx->key_ctx_hdr = FILL_KEY_CTX_HDR(ck_size,
3335 CHCR_KEYCTX_MAC_KEY_SIZE_128,
3336 0, 0,
3337 key_ctx_size >> 4);
3338
3339
3340
3341 cipher = crypto_alloc_cipher("aes-generic", 0, 0);
3342 if (IS_ERR(cipher)) {
3343 aeadctx->enckey_len = 0;
3344 ret = -ENOMEM;
3345 goto out;
3346 }
3347
3348 ret = crypto_cipher_setkey(cipher, key, keylen);
3349 if (ret) {
3350 aeadctx->enckey_len = 0;
3351 goto out1;
3352 }
3353 memset(gctx->ghash_h, 0, AEAD_H_SIZE);
3354 crypto_cipher_encrypt_one(cipher, gctx->ghash_h, gctx->ghash_h);
3355
3356out1:
3357 crypto_free_cipher(cipher);
3358out:
3359 return ret;
3360}
3361
3362static int chcr_authenc_setkey(struct crypto_aead *authenc, const u8 *key,
3363 unsigned int keylen)
3364{
3365 struct chcr_aead_ctx *aeadctx = AEAD_CTX(a_ctx(authenc));
3366 struct chcr_authenc_ctx *actx = AUTHENC_CTX(aeadctx);
3367
3368 struct crypto_authenc_keys keys;
3369 unsigned int bs, subtype;
3370 unsigned int max_authsize = crypto_aead_alg(authenc)->maxauthsize;
3371 int err = 0, i, key_ctx_len = 0;
3372 unsigned char ck_size = 0;
3373 unsigned char pad[CHCR_HASH_MAX_BLOCK_SIZE_128] = { 0 };
3374 struct crypto_shash *base_hash = ERR_PTR(-EINVAL);
3375 struct algo_param param;
3376 int align;
3377 u8 *o_ptr = NULL;
3378
3379 crypto_aead_clear_flags(aeadctx->sw_cipher, CRYPTO_TFM_REQ_MASK);
3380 crypto_aead_set_flags(aeadctx->sw_cipher, crypto_aead_get_flags(authenc)
3381 & CRYPTO_TFM_REQ_MASK);
3382 err = crypto_aead_setkey(aeadctx->sw_cipher, key, keylen);
3383 crypto_aead_clear_flags(authenc, CRYPTO_TFM_RES_MASK);
3384 crypto_aead_set_flags(authenc, crypto_aead_get_flags(aeadctx->sw_cipher)
3385 & CRYPTO_TFM_RES_MASK);
3386 if (err)
3387 goto out;
3388
3389 if (crypto_authenc_extractkeys(&keys, key, keylen) != 0) {
3390 crypto_aead_set_flags(authenc, CRYPTO_TFM_RES_BAD_KEY_LEN);
3391 goto out;
3392 }
3393
3394 if (get_alg_config(¶m, max_authsize)) {
3395 pr_err("chcr : Unsupported digest size\n");
3396 goto out;
3397 }
3398 subtype = get_aead_subtype(authenc);
3399 if (subtype == CRYPTO_ALG_SUB_TYPE_CTR_SHA ||
3400 subtype == CRYPTO_ALG_SUB_TYPE_CTR_NULL) {
3401 if (keys.enckeylen < CTR_RFC3686_NONCE_SIZE)
3402 goto out;
3403 memcpy(aeadctx->nonce, keys.enckey + (keys.enckeylen
3404 - CTR_RFC3686_NONCE_SIZE), CTR_RFC3686_NONCE_SIZE);
3405 keys.enckeylen -= CTR_RFC3686_NONCE_SIZE;
3406 }
3407 if (keys.enckeylen == AES_KEYSIZE_128) {
3408 ck_size = CHCR_KEYCTX_CIPHER_KEY_SIZE_128;
3409 } else if (keys.enckeylen == AES_KEYSIZE_192) {
3410 ck_size = CHCR_KEYCTX_CIPHER_KEY_SIZE_192;
3411 } else if (keys.enckeylen == AES_KEYSIZE_256) {
3412 ck_size = CHCR_KEYCTX_CIPHER_KEY_SIZE_256;
3413 } else {
3414 pr_err("chcr : Unsupported cipher key\n");
3415 goto out;
3416 }
3417
3418
3419
3420
3421
3422 memcpy(aeadctx->key, keys.enckey, keys.enckeylen);
3423 aeadctx->enckey_len = keys.enckeylen;
3424 if (subtype == CRYPTO_ALG_SUB_TYPE_CBC_SHA ||
3425 subtype == CRYPTO_ALG_SUB_TYPE_CBC_NULL) {
3426
3427 get_aes_decrypt_key(actx->dec_rrkey, aeadctx->key,
3428 aeadctx->enckey_len << 3);
3429 }
3430 base_hash = chcr_alloc_shash(max_authsize);
3431 if (IS_ERR(base_hash)) {
3432 pr_err("chcr : Base driver cannot be loaded\n");
3433 aeadctx->enckey_len = 0;
3434 memzero_explicit(&keys, sizeof(keys));
3435 return -EINVAL;
3436 }
3437 {
3438 SHASH_DESC_ON_STACK(shash, base_hash);
3439
3440 shash->tfm = base_hash;
3441 shash->flags = crypto_shash_get_flags(base_hash);
3442 bs = crypto_shash_blocksize(base_hash);
3443 align = KEYCTX_ALIGN_PAD(max_authsize);
3444 o_ptr = actx->h_iopad + param.result_size + align;
3445
3446 if (keys.authkeylen > bs) {
3447 err = crypto_shash_digest(shash, keys.authkey,
3448 keys.authkeylen,
3449 o_ptr);
3450 if (err) {
3451 pr_err("chcr : Base driver cannot be loaded\n");
3452 goto out;
3453 }
3454 keys.authkeylen = max_authsize;
3455 } else
3456 memcpy(o_ptr, keys.authkey, keys.authkeylen);
3457
3458
3459 memset(pad + keys.authkeylen, 0, bs - keys.authkeylen);
3460 memcpy(pad, o_ptr, keys.authkeylen);
3461 for (i = 0; i < bs >> 2; i++)
3462 *((unsigned int *)pad + i) ^= IPAD_DATA;
3463
3464 if (chcr_compute_partial_hash(shash, pad, actx->h_iopad,
3465 max_authsize))
3466 goto out;
3467
3468 memset(pad + keys.authkeylen, 0, bs - keys.authkeylen);
3469 memcpy(pad, o_ptr, keys.authkeylen);
3470 for (i = 0; i < bs >> 2; i++)
3471 *((unsigned int *)pad + i) ^= OPAD_DATA;
3472
3473 if (chcr_compute_partial_hash(shash, pad, o_ptr, max_authsize))
3474 goto out;
3475
3476
3477 chcr_change_order(actx->h_iopad, param.result_size);
3478 chcr_change_order(o_ptr, param.result_size);
3479 key_ctx_len = sizeof(struct _key_ctx) +
3480 roundup(keys.enckeylen, 16) +
3481 (param.result_size + align) * 2;
3482 aeadctx->key_ctx_hdr = FILL_KEY_CTX_HDR(ck_size, param.mk_size,
3483 0, 1, key_ctx_len >> 4);
3484 actx->auth_mode = param.auth_mode;
3485 chcr_free_shash(base_hash);
3486
3487 memzero_explicit(&keys, sizeof(keys));
3488 return 0;
3489 }
3490out:
3491 aeadctx->enckey_len = 0;
3492 memzero_explicit(&keys, sizeof(keys));
3493 if (!IS_ERR(base_hash))
3494 chcr_free_shash(base_hash);
3495 return -EINVAL;
3496}
3497
3498static int chcr_aead_digest_null_setkey(struct crypto_aead *authenc,
3499 const u8 *key, unsigned int keylen)
3500{
3501 struct chcr_aead_ctx *aeadctx = AEAD_CTX(a_ctx(authenc));
3502 struct chcr_authenc_ctx *actx = AUTHENC_CTX(aeadctx);
3503 struct crypto_authenc_keys keys;
3504 int err;
3505
3506 unsigned int subtype;
3507 int key_ctx_len = 0;
3508 unsigned char ck_size = 0;
3509
3510 crypto_aead_clear_flags(aeadctx->sw_cipher, CRYPTO_TFM_REQ_MASK);
3511 crypto_aead_set_flags(aeadctx->sw_cipher, crypto_aead_get_flags(authenc)
3512 & CRYPTO_TFM_REQ_MASK);
3513 err = crypto_aead_setkey(aeadctx->sw_cipher, key, keylen);
3514 crypto_aead_clear_flags(authenc, CRYPTO_TFM_RES_MASK);
3515 crypto_aead_set_flags(authenc, crypto_aead_get_flags(aeadctx->sw_cipher)
3516 & CRYPTO_TFM_RES_MASK);
3517 if (err)
3518 goto out;
3519
3520 if (crypto_authenc_extractkeys(&keys, key, keylen) != 0) {
3521 crypto_aead_set_flags(authenc, CRYPTO_TFM_RES_BAD_KEY_LEN);
3522 goto out;
3523 }
3524 subtype = get_aead_subtype(authenc);
3525 if (subtype == CRYPTO_ALG_SUB_TYPE_CTR_SHA ||
3526 subtype == CRYPTO_ALG_SUB_TYPE_CTR_NULL) {
3527 if (keys.enckeylen < CTR_RFC3686_NONCE_SIZE)
3528 goto out;
3529 memcpy(aeadctx->nonce, keys.enckey + (keys.enckeylen
3530 - CTR_RFC3686_NONCE_SIZE), CTR_RFC3686_NONCE_SIZE);
3531 keys.enckeylen -= CTR_RFC3686_NONCE_SIZE;
3532 }
3533 if (keys.enckeylen == AES_KEYSIZE_128) {
3534 ck_size = CHCR_KEYCTX_CIPHER_KEY_SIZE_128;
3535 } else if (keys.enckeylen == AES_KEYSIZE_192) {
3536 ck_size = CHCR_KEYCTX_CIPHER_KEY_SIZE_192;
3537 } else if (keys.enckeylen == AES_KEYSIZE_256) {
3538 ck_size = CHCR_KEYCTX_CIPHER_KEY_SIZE_256;
3539 } else {
3540 pr_err("chcr : Unsupported cipher key %d\n", keys.enckeylen);
3541 goto out;
3542 }
3543 memcpy(aeadctx->key, keys.enckey, keys.enckeylen);
3544 aeadctx->enckey_len = keys.enckeylen;
3545 if (subtype == CRYPTO_ALG_SUB_TYPE_CBC_SHA ||
3546 subtype == CRYPTO_ALG_SUB_TYPE_CBC_NULL) {
3547 get_aes_decrypt_key(actx->dec_rrkey, aeadctx->key,
3548 aeadctx->enckey_len << 3);
3549 }
3550 key_ctx_len = sizeof(struct _key_ctx) + roundup(keys.enckeylen, 16);
3551
3552 aeadctx->key_ctx_hdr = FILL_KEY_CTX_HDR(ck_size, CHCR_KEYCTX_NO_KEY, 0,
3553 0, key_ctx_len >> 4);
3554 actx->auth_mode = CHCR_SCMD_AUTH_MODE_NOP;
3555 memzero_explicit(&keys, sizeof(keys));
3556 return 0;
3557out:
3558 aeadctx->enckey_len = 0;
3559 memzero_explicit(&keys, sizeof(keys));
3560 return -EINVAL;
3561}
3562
3563static int chcr_aead_op(struct aead_request *req,
3564 int size,
3565 create_wr_t create_wr_fn)
3566{
3567 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
3568 struct uld_ctx *u_ctx;
3569 struct sk_buff *skb;
3570 int isfull = 0;
3571
3572 if (!a_ctx(tfm)->dev) {
3573 pr_err("chcr : %s : No crypto device.\n", __func__);
3574 return -ENXIO;
3575 }
3576 u_ctx = ULD_CTX(a_ctx(tfm));
3577 if (cxgb4_is_crypto_q_full(u_ctx->lldi.ports[0],
3578 a_ctx(tfm)->tx_qidx)) {
3579 isfull = 1;
3580 if (!(req->base.flags & CRYPTO_TFM_REQ_MAY_BACKLOG))
3581 return -ENOSPC;
3582 }
3583
3584
3585 skb = create_wr_fn(req, u_ctx->lldi.rxq_ids[a_ctx(tfm)->rx_qidx], size);
3586
3587 if (IS_ERR(skb) || !skb)
3588 return PTR_ERR(skb);
3589
3590 skb->dev = u_ctx->lldi.ports[0];
3591 set_wr_txq(skb, CPL_PRIORITY_DATA, a_ctx(tfm)->tx_qidx);
3592 chcr_send_wr(skb);
3593 return isfull ? -EBUSY : -EINPROGRESS;
3594}
3595
3596static int chcr_aead_encrypt(struct aead_request *req)
3597{
3598 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
3599 struct chcr_aead_reqctx *reqctx = aead_request_ctx(req);
3600
3601 reqctx->verify = VERIFY_HW;
3602 reqctx->op = CHCR_ENCRYPT_OP;
3603
3604 switch (get_aead_subtype(tfm)) {
3605 case CRYPTO_ALG_SUB_TYPE_CTR_SHA:
3606 case CRYPTO_ALG_SUB_TYPE_CBC_SHA:
3607 case CRYPTO_ALG_SUB_TYPE_CBC_NULL:
3608 case CRYPTO_ALG_SUB_TYPE_CTR_NULL:
3609 return chcr_aead_op(req, 0, create_authenc_wr);
3610 case CRYPTO_ALG_SUB_TYPE_AEAD_CCM:
3611 case CRYPTO_ALG_SUB_TYPE_AEAD_RFC4309:
3612 return chcr_aead_op(req, 0, create_aead_ccm_wr);
3613 default:
3614 return chcr_aead_op(req, 0, create_gcm_wr);
3615 }
3616}
3617
3618static int chcr_aead_decrypt(struct aead_request *req)
3619{
3620 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
3621 struct chcr_aead_ctx *aeadctx = AEAD_CTX(a_ctx(tfm));
3622 struct chcr_aead_reqctx *reqctx = aead_request_ctx(req);
3623 int size;
3624
3625 if (aeadctx->mayverify == VERIFY_SW) {
3626 size = crypto_aead_maxauthsize(tfm);
3627 reqctx->verify = VERIFY_SW;
3628 } else {
3629 size = 0;
3630 reqctx->verify = VERIFY_HW;
3631 }
3632 reqctx->op = CHCR_DECRYPT_OP;
3633 switch (get_aead_subtype(tfm)) {
3634 case CRYPTO_ALG_SUB_TYPE_CBC_SHA:
3635 case CRYPTO_ALG_SUB_TYPE_CTR_SHA:
3636 case CRYPTO_ALG_SUB_TYPE_CBC_NULL:
3637 case CRYPTO_ALG_SUB_TYPE_CTR_NULL:
3638 return chcr_aead_op(req, size, create_authenc_wr);
3639 case CRYPTO_ALG_SUB_TYPE_AEAD_CCM:
3640 case CRYPTO_ALG_SUB_TYPE_AEAD_RFC4309:
3641 return chcr_aead_op(req, size, create_aead_ccm_wr);
3642 default:
3643 return chcr_aead_op(req, size, create_gcm_wr);
3644 }
3645}
3646
3647static struct chcr_alg_template driver_algs[] = {
3648
3649 {
3650 .type = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_SUB_TYPE_CBC,
3651 .is_registered = 0,
3652 .alg.crypto = {
3653 .cra_name = "cbc(aes)",
3654 .cra_driver_name = "cbc-aes-chcr",
3655 .cra_blocksize = AES_BLOCK_SIZE,
3656 .cra_init = chcr_cra_init,
3657 .cra_exit = chcr_cra_exit,
3658 .cra_u.ablkcipher = {
3659 .min_keysize = AES_MIN_KEY_SIZE,
3660 .max_keysize = AES_MAX_KEY_SIZE,
3661 .ivsize = AES_BLOCK_SIZE,
3662 .setkey = chcr_aes_cbc_setkey,
3663 .encrypt = chcr_aes_encrypt,
3664 .decrypt = chcr_aes_decrypt,
3665 }
3666 }
3667 },
3668 {
3669 .type = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_SUB_TYPE_XTS,
3670 .is_registered = 0,
3671 .alg.crypto = {
3672 .cra_name = "xts(aes)",
3673 .cra_driver_name = "xts-aes-chcr",
3674 .cra_blocksize = AES_BLOCK_SIZE,
3675 .cra_init = chcr_cra_init,
3676 .cra_exit = NULL,
3677 .cra_u .ablkcipher = {
3678 .min_keysize = 2 * AES_MIN_KEY_SIZE,
3679 .max_keysize = 2 * AES_MAX_KEY_SIZE,
3680 .ivsize = AES_BLOCK_SIZE,
3681 .setkey = chcr_aes_xts_setkey,
3682 .encrypt = chcr_aes_encrypt,
3683 .decrypt = chcr_aes_decrypt,
3684 }
3685 }
3686 },
3687 {
3688 .type = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_SUB_TYPE_CTR,
3689 .is_registered = 0,
3690 .alg.crypto = {
3691 .cra_name = "ctr(aes)",
3692 .cra_driver_name = "ctr-aes-chcr",
3693 .cra_blocksize = 1,
3694 .cra_init = chcr_cra_init,
3695 .cra_exit = chcr_cra_exit,
3696 .cra_u.ablkcipher = {
3697 .min_keysize = AES_MIN_KEY_SIZE,
3698 .max_keysize = AES_MAX_KEY_SIZE,
3699 .ivsize = AES_BLOCK_SIZE,
3700 .setkey = chcr_aes_ctr_setkey,
3701 .encrypt = chcr_aes_encrypt,
3702 .decrypt = chcr_aes_decrypt,
3703 }
3704 }
3705 },
3706 {
3707 .type = CRYPTO_ALG_TYPE_ABLKCIPHER |
3708 CRYPTO_ALG_SUB_TYPE_CTR_RFC3686,
3709 .is_registered = 0,
3710 .alg.crypto = {
3711 .cra_name = "rfc3686(ctr(aes))",
3712 .cra_driver_name = "rfc3686-ctr-aes-chcr",
3713 .cra_blocksize = 1,
3714 .cra_init = chcr_rfc3686_init,
3715 .cra_exit = chcr_cra_exit,
3716 .cra_u.ablkcipher = {
3717 .min_keysize = AES_MIN_KEY_SIZE +
3718 CTR_RFC3686_NONCE_SIZE,
3719 .max_keysize = AES_MAX_KEY_SIZE +
3720 CTR_RFC3686_NONCE_SIZE,
3721 .ivsize = CTR_RFC3686_IV_SIZE,
3722 .setkey = chcr_aes_rfc3686_setkey,
3723 .encrypt = chcr_aes_encrypt,
3724 .decrypt = chcr_aes_decrypt,
3725 .geniv = "seqiv",
3726 }
3727 }
3728 },
3729
3730 {
3731 .type = CRYPTO_ALG_TYPE_AHASH,
3732 .is_registered = 0,
3733 .alg.hash = {
3734 .halg.digestsize = SHA1_DIGEST_SIZE,
3735 .halg.base = {
3736 .cra_name = "sha1",
3737 .cra_driver_name = "sha1-chcr",
3738 .cra_blocksize = SHA1_BLOCK_SIZE,
3739 }
3740 }
3741 },
3742 {
3743 .type = CRYPTO_ALG_TYPE_AHASH,
3744 .is_registered = 0,
3745 .alg.hash = {
3746 .halg.digestsize = SHA256_DIGEST_SIZE,
3747 .halg.base = {
3748 .cra_name = "sha256",
3749 .cra_driver_name = "sha256-chcr",
3750 .cra_blocksize = SHA256_BLOCK_SIZE,
3751 }
3752 }
3753 },
3754 {
3755 .type = CRYPTO_ALG_TYPE_AHASH,
3756 .is_registered = 0,
3757 .alg.hash = {
3758 .halg.digestsize = SHA224_DIGEST_SIZE,
3759 .halg.base = {
3760 .cra_name = "sha224",
3761 .cra_driver_name = "sha224-chcr",
3762 .cra_blocksize = SHA224_BLOCK_SIZE,
3763 }
3764 }
3765 },
3766 {
3767 .type = CRYPTO_ALG_TYPE_AHASH,
3768 .is_registered = 0,
3769 .alg.hash = {
3770 .halg.digestsize = SHA384_DIGEST_SIZE,
3771 .halg.base = {
3772 .cra_name = "sha384",
3773 .cra_driver_name = "sha384-chcr",
3774 .cra_blocksize = SHA384_BLOCK_SIZE,
3775 }
3776 }
3777 },
3778 {
3779 .type = CRYPTO_ALG_TYPE_AHASH,
3780 .is_registered = 0,
3781 .alg.hash = {
3782 .halg.digestsize = SHA512_DIGEST_SIZE,
3783 .halg.base = {
3784 .cra_name = "sha512",
3785 .cra_driver_name = "sha512-chcr",
3786 .cra_blocksize = SHA512_BLOCK_SIZE,
3787 }
3788 }
3789 },
3790
3791 {
3792 .type = CRYPTO_ALG_TYPE_HMAC,
3793 .is_registered = 0,
3794 .alg.hash = {
3795 .halg.digestsize = SHA1_DIGEST_SIZE,
3796 .halg.base = {
3797 .cra_name = "hmac(sha1)",
3798 .cra_driver_name = "hmac-sha1-chcr",
3799 .cra_blocksize = SHA1_BLOCK_SIZE,
3800 }
3801 }
3802 },
3803 {
3804 .type = CRYPTO_ALG_TYPE_HMAC,
3805 .is_registered = 0,
3806 .alg.hash = {
3807 .halg.digestsize = SHA224_DIGEST_SIZE,
3808 .halg.base = {
3809 .cra_name = "hmac(sha224)",
3810 .cra_driver_name = "hmac-sha224-chcr",
3811 .cra_blocksize = SHA224_BLOCK_SIZE,
3812 }
3813 }
3814 },
3815 {
3816 .type = CRYPTO_ALG_TYPE_HMAC,
3817 .is_registered = 0,
3818 .alg.hash = {
3819 .halg.digestsize = SHA256_DIGEST_SIZE,
3820 .halg.base = {
3821 .cra_name = "hmac(sha256)",
3822 .cra_driver_name = "hmac-sha256-chcr",
3823 .cra_blocksize = SHA256_BLOCK_SIZE,
3824 }
3825 }
3826 },
3827 {
3828 .type = CRYPTO_ALG_TYPE_HMAC,
3829 .is_registered = 0,
3830 .alg.hash = {
3831 .halg.digestsize = SHA384_DIGEST_SIZE,
3832 .halg.base = {
3833 .cra_name = "hmac(sha384)",
3834 .cra_driver_name = "hmac-sha384-chcr",
3835 .cra_blocksize = SHA384_BLOCK_SIZE,
3836 }
3837 }
3838 },
3839 {
3840 .type = CRYPTO_ALG_TYPE_HMAC,
3841 .is_registered = 0,
3842 .alg.hash = {
3843 .halg.digestsize = SHA512_DIGEST_SIZE,
3844 .halg.base = {
3845 .cra_name = "hmac(sha512)",
3846 .cra_driver_name = "hmac-sha512-chcr",
3847 .cra_blocksize = SHA512_BLOCK_SIZE,
3848 }
3849 }
3850 },
3851
3852 {
3853 .type = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_SUB_TYPE_AEAD_GCM,
3854 .is_registered = 0,
3855 .alg.aead = {
3856 .base = {
3857 .cra_name = "gcm(aes)",
3858 .cra_driver_name = "gcm-aes-chcr",
3859 .cra_blocksize = 1,
3860 .cra_priority = CHCR_AEAD_PRIORITY,
3861 .cra_ctxsize = sizeof(struct chcr_context) +
3862 sizeof(struct chcr_aead_ctx) +
3863 sizeof(struct chcr_gcm_ctx),
3864 },
3865 .ivsize = GCM_AES_IV_SIZE,
3866 .maxauthsize = GHASH_DIGEST_SIZE,
3867 .setkey = chcr_gcm_setkey,
3868 .setauthsize = chcr_gcm_setauthsize,
3869 }
3870 },
3871 {
3872 .type = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_SUB_TYPE_AEAD_RFC4106,
3873 .is_registered = 0,
3874 .alg.aead = {
3875 .base = {
3876 .cra_name = "rfc4106(gcm(aes))",
3877 .cra_driver_name = "rfc4106-gcm-aes-chcr",
3878 .cra_blocksize = 1,
3879 .cra_priority = CHCR_AEAD_PRIORITY + 1,
3880 .cra_ctxsize = sizeof(struct chcr_context) +
3881 sizeof(struct chcr_aead_ctx) +
3882 sizeof(struct chcr_gcm_ctx),
3883
3884 },
3885 .ivsize = GCM_RFC4106_IV_SIZE,
3886 .maxauthsize = GHASH_DIGEST_SIZE,
3887 .setkey = chcr_gcm_setkey,
3888 .setauthsize = chcr_4106_4309_setauthsize,
3889 }
3890 },
3891 {
3892 .type = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_SUB_TYPE_AEAD_CCM,
3893 .is_registered = 0,
3894 .alg.aead = {
3895 .base = {
3896 .cra_name = "ccm(aes)",
3897 .cra_driver_name = "ccm-aes-chcr",
3898 .cra_blocksize = 1,
3899 .cra_priority = CHCR_AEAD_PRIORITY,
3900 .cra_ctxsize = sizeof(struct chcr_context) +
3901 sizeof(struct chcr_aead_ctx),
3902
3903 },
3904 .ivsize = AES_BLOCK_SIZE,
3905 .maxauthsize = GHASH_DIGEST_SIZE,
3906 .setkey = chcr_aead_ccm_setkey,
3907 .setauthsize = chcr_ccm_setauthsize,
3908 }
3909 },
3910 {
3911 .type = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_SUB_TYPE_AEAD_RFC4309,
3912 .is_registered = 0,
3913 .alg.aead = {
3914 .base = {
3915 .cra_name = "rfc4309(ccm(aes))",
3916 .cra_driver_name = "rfc4309-ccm-aes-chcr",
3917 .cra_blocksize = 1,
3918 .cra_priority = CHCR_AEAD_PRIORITY + 1,
3919 .cra_ctxsize = sizeof(struct chcr_context) +
3920 sizeof(struct chcr_aead_ctx),
3921
3922 },
3923 .ivsize = 8,
3924 .maxauthsize = GHASH_DIGEST_SIZE,
3925 .setkey = chcr_aead_rfc4309_setkey,
3926 .setauthsize = chcr_4106_4309_setauthsize,
3927 }
3928 },
3929 {
3930 .type = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_SUB_TYPE_CBC_SHA,
3931 .is_registered = 0,
3932 .alg.aead = {
3933 .base = {
3934 .cra_name = "authenc(hmac(sha1),cbc(aes))",
3935 .cra_driver_name =
3936 "authenc-hmac-sha1-cbc-aes-chcr",
3937 .cra_blocksize = AES_BLOCK_SIZE,
3938 .cra_priority = CHCR_AEAD_PRIORITY,
3939 .cra_ctxsize = sizeof(struct chcr_context) +
3940 sizeof(struct chcr_aead_ctx) +
3941 sizeof(struct chcr_authenc_ctx),
3942
3943 },
3944 .ivsize = AES_BLOCK_SIZE,
3945 .maxauthsize = SHA1_DIGEST_SIZE,
3946 .setkey = chcr_authenc_setkey,
3947 .setauthsize = chcr_authenc_setauthsize,
3948 }
3949 },
3950 {
3951 .type = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_SUB_TYPE_CBC_SHA,
3952 .is_registered = 0,
3953 .alg.aead = {
3954 .base = {
3955
3956 .cra_name = "authenc(hmac(sha256),cbc(aes))",
3957 .cra_driver_name =
3958 "authenc-hmac-sha256-cbc-aes-chcr",
3959 .cra_blocksize = AES_BLOCK_SIZE,
3960 .cra_priority = CHCR_AEAD_PRIORITY,
3961 .cra_ctxsize = sizeof(struct chcr_context) +
3962 sizeof(struct chcr_aead_ctx) +
3963 sizeof(struct chcr_authenc_ctx),
3964
3965 },
3966 .ivsize = AES_BLOCK_SIZE,
3967 .maxauthsize = SHA256_DIGEST_SIZE,
3968 .setkey = chcr_authenc_setkey,
3969 .setauthsize = chcr_authenc_setauthsize,
3970 }
3971 },
3972 {
3973 .type = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_SUB_TYPE_CBC_SHA,
3974 .is_registered = 0,
3975 .alg.aead = {
3976 .base = {
3977 .cra_name = "authenc(hmac(sha224),cbc(aes))",
3978 .cra_driver_name =
3979 "authenc-hmac-sha224-cbc-aes-chcr",
3980 .cra_blocksize = AES_BLOCK_SIZE,
3981 .cra_priority = CHCR_AEAD_PRIORITY,
3982 .cra_ctxsize = sizeof(struct chcr_context) +
3983 sizeof(struct chcr_aead_ctx) +
3984 sizeof(struct chcr_authenc_ctx),
3985 },
3986 .ivsize = AES_BLOCK_SIZE,
3987 .maxauthsize = SHA224_DIGEST_SIZE,
3988 .setkey = chcr_authenc_setkey,
3989 .setauthsize = chcr_authenc_setauthsize,
3990 }
3991 },
3992 {
3993 .type = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_SUB_TYPE_CBC_SHA,
3994 .is_registered = 0,
3995 .alg.aead = {
3996 .base = {
3997 .cra_name = "authenc(hmac(sha384),cbc(aes))",
3998 .cra_driver_name =
3999 "authenc-hmac-sha384-cbc-aes-chcr",
4000 .cra_blocksize = AES_BLOCK_SIZE,
4001 .cra_priority = CHCR_AEAD_PRIORITY,
4002 .cra_ctxsize = sizeof(struct chcr_context) +
4003 sizeof(struct chcr_aead_ctx) +
4004 sizeof(struct chcr_authenc_ctx),
4005
4006 },
4007 .ivsize = AES_BLOCK_SIZE,
4008 .maxauthsize = SHA384_DIGEST_SIZE,
4009 .setkey = chcr_authenc_setkey,
4010 .setauthsize = chcr_authenc_setauthsize,
4011 }
4012 },
4013 {
4014 .type = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_SUB_TYPE_CBC_SHA,
4015 .is_registered = 0,
4016 .alg.aead = {
4017 .base = {
4018 .cra_name = "authenc(hmac(sha512),cbc(aes))",
4019 .cra_driver_name =
4020 "authenc-hmac-sha512-cbc-aes-chcr",
4021 .cra_blocksize = AES_BLOCK_SIZE,
4022 .cra_priority = CHCR_AEAD_PRIORITY,
4023 .cra_ctxsize = sizeof(struct chcr_context) +
4024 sizeof(struct chcr_aead_ctx) +
4025 sizeof(struct chcr_authenc_ctx),
4026
4027 },
4028 .ivsize = AES_BLOCK_SIZE,
4029 .maxauthsize = SHA512_DIGEST_SIZE,
4030 .setkey = chcr_authenc_setkey,
4031 .setauthsize = chcr_authenc_setauthsize,
4032 }
4033 },
4034 {
4035 .type = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_SUB_TYPE_CBC_NULL,
4036 .is_registered = 0,
4037 .alg.aead = {
4038 .base = {
4039 .cra_name = "authenc(digest_null,cbc(aes))",
4040 .cra_driver_name =
4041 "authenc-digest_null-cbc-aes-chcr",
4042 .cra_blocksize = AES_BLOCK_SIZE,
4043 .cra_priority = CHCR_AEAD_PRIORITY,
4044 .cra_ctxsize = sizeof(struct chcr_context) +
4045 sizeof(struct chcr_aead_ctx) +
4046 sizeof(struct chcr_authenc_ctx),
4047
4048 },
4049 .ivsize = AES_BLOCK_SIZE,
4050 .maxauthsize = 0,
4051 .setkey = chcr_aead_digest_null_setkey,
4052 .setauthsize = chcr_authenc_null_setauthsize,
4053 }
4054 },
4055 {
4056 .type = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_SUB_TYPE_CTR_SHA,
4057 .is_registered = 0,
4058 .alg.aead = {
4059 .base = {
4060 .cra_name = "authenc(hmac(sha1),rfc3686(ctr(aes)))",
4061 .cra_driver_name =
4062 "authenc-hmac-sha1-rfc3686-ctr-aes-chcr",
4063 .cra_blocksize = 1,
4064 .cra_priority = CHCR_AEAD_PRIORITY,
4065 .cra_ctxsize = sizeof(struct chcr_context) +
4066 sizeof(struct chcr_aead_ctx) +
4067 sizeof(struct chcr_authenc_ctx),
4068
4069 },
4070 .ivsize = CTR_RFC3686_IV_SIZE,
4071 .maxauthsize = SHA1_DIGEST_SIZE,
4072 .setkey = chcr_authenc_setkey,
4073 .setauthsize = chcr_authenc_setauthsize,
4074 }
4075 },
4076 {
4077 .type = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_SUB_TYPE_CTR_SHA,
4078 .is_registered = 0,
4079 .alg.aead = {
4080 .base = {
4081
4082 .cra_name = "authenc(hmac(sha256),rfc3686(ctr(aes)))",
4083 .cra_driver_name =
4084 "authenc-hmac-sha256-rfc3686-ctr-aes-chcr",
4085 .cra_blocksize = 1,
4086 .cra_priority = CHCR_AEAD_PRIORITY,
4087 .cra_ctxsize = sizeof(struct chcr_context) +
4088 sizeof(struct chcr_aead_ctx) +
4089 sizeof(struct chcr_authenc_ctx),
4090
4091 },
4092 .ivsize = CTR_RFC3686_IV_SIZE,
4093 .maxauthsize = SHA256_DIGEST_SIZE,
4094 .setkey = chcr_authenc_setkey,
4095 .setauthsize = chcr_authenc_setauthsize,
4096 }
4097 },
4098 {
4099 .type = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_SUB_TYPE_CTR_SHA,
4100 .is_registered = 0,
4101 .alg.aead = {
4102 .base = {
4103 .cra_name = "authenc(hmac(sha224),rfc3686(ctr(aes)))",
4104 .cra_driver_name =
4105 "authenc-hmac-sha224-rfc3686-ctr-aes-chcr",
4106 .cra_blocksize = 1,
4107 .cra_priority = CHCR_AEAD_PRIORITY,
4108 .cra_ctxsize = sizeof(struct chcr_context) +
4109 sizeof(struct chcr_aead_ctx) +
4110 sizeof(struct chcr_authenc_ctx),
4111 },
4112 .ivsize = CTR_RFC3686_IV_SIZE,
4113 .maxauthsize = SHA224_DIGEST_SIZE,
4114 .setkey = chcr_authenc_setkey,
4115 .setauthsize = chcr_authenc_setauthsize,
4116 }
4117 },
4118 {
4119 .type = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_SUB_TYPE_CTR_SHA,
4120 .is_registered = 0,
4121 .alg.aead = {
4122 .base = {
4123 .cra_name = "authenc(hmac(sha384),rfc3686(ctr(aes)))",
4124 .cra_driver_name =
4125 "authenc-hmac-sha384-rfc3686-ctr-aes-chcr",
4126 .cra_blocksize = 1,
4127 .cra_priority = CHCR_AEAD_PRIORITY,
4128 .cra_ctxsize = sizeof(struct chcr_context) +
4129 sizeof(struct chcr_aead_ctx) +
4130 sizeof(struct chcr_authenc_ctx),
4131
4132 },
4133 .ivsize = CTR_RFC3686_IV_SIZE,
4134 .maxauthsize = SHA384_DIGEST_SIZE,
4135 .setkey = chcr_authenc_setkey,
4136 .setauthsize = chcr_authenc_setauthsize,
4137 }
4138 },
4139 {
4140 .type = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_SUB_TYPE_CTR_SHA,
4141 .is_registered = 0,
4142 .alg.aead = {
4143 .base = {
4144 .cra_name = "authenc(hmac(sha512),rfc3686(ctr(aes)))",
4145 .cra_driver_name =
4146 "authenc-hmac-sha512-rfc3686-ctr-aes-chcr",
4147 .cra_blocksize = 1,
4148 .cra_priority = CHCR_AEAD_PRIORITY,
4149 .cra_ctxsize = sizeof(struct chcr_context) +
4150 sizeof(struct chcr_aead_ctx) +
4151 sizeof(struct chcr_authenc_ctx),
4152
4153 },
4154 .ivsize = CTR_RFC3686_IV_SIZE,
4155 .maxauthsize = SHA512_DIGEST_SIZE,
4156 .setkey = chcr_authenc_setkey,
4157 .setauthsize = chcr_authenc_setauthsize,
4158 }
4159 },
4160 {
4161 .type = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_SUB_TYPE_CTR_NULL,
4162 .is_registered = 0,
4163 .alg.aead = {
4164 .base = {
4165 .cra_name = "authenc(digest_null,rfc3686(ctr(aes)))",
4166 .cra_driver_name =
4167 "authenc-digest_null-rfc3686-ctr-aes-chcr",
4168 .cra_blocksize = 1,
4169 .cra_priority = CHCR_AEAD_PRIORITY,
4170 .cra_ctxsize = sizeof(struct chcr_context) +
4171 sizeof(struct chcr_aead_ctx) +
4172 sizeof(struct chcr_authenc_ctx),
4173
4174 },
4175 .ivsize = CTR_RFC3686_IV_SIZE,
4176 .maxauthsize = 0,
4177 .setkey = chcr_aead_digest_null_setkey,
4178 .setauthsize = chcr_authenc_null_setauthsize,
4179 }
4180 },
4181
4182};
4183
4184
4185
4186
4187
4188static int chcr_unregister_alg(void)
4189{
4190 int i;
4191
4192 for (i = 0; i < ARRAY_SIZE(driver_algs); i++) {
4193 switch (driver_algs[i].type & CRYPTO_ALG_TYPE_MASK) {
4194 case CRYPTO_ALG_TYPE_ABLKCIPHER:
4195 if (driver_algs[i].is_registered)
4196 crypto_unregister_alg(
4197 &driver_algs[i].alg.crypto);
4198 break;
4199 case CRYPTO_ALG_TYPE_AEAD:
4200 if (driver_algs[i].is_registered)
4201 crypto_unregister_aead(
4202 &driver_algs[i].alg.aead);
4203 break;
4204 case CRYPTO_ALG_TYPE_AHASH:
4205 if (driver_algs[i].is_registered)
4206 crypto_unregister_ahash(
4207 &driver_algs[i].alg.hash);
4208 break;
4209 }
4210 driver_algs[i].is_registered = 0;
4211 }
4212 return 0;
4213}
4214
4215#define SZ_AHASH_CTX sizeof(struct chcr_context)
4216#define SZ_AHASH_H_CTX (sizeof(struct chcr_context) + sizeof(struct hmac_ctx))
4217#define SZ_AHASH_REQ_CTX sizeof(struct chcr_ahash_req_ctx)
4218
4219
4220
4221
4222static int chcr_register_alg(void)
4223{
4224 struct crypto_alg ai;
4225 struct ahash_alg *a_hash;
4226 int err = 0, i;
4227 char *name = NULL;
4228
4229 for (i = 0; i < ARRAY_SIZE(driver_algs); i++) {
4230 if (driver_algs[i].is_registered)
4231 continue;
4232 switch (driver_algs[i].type & CRYPTO_ALG_TYPE_MASK) {
4233 case CRYPTO_ALG_TYPE_ABLKCIPHER:
4234 driver_algs[i].alg.crypto.cra_priority =
4235 CHCR_CRA_PRIORITY;
4236 driver_algs[i].alg.crypto.cra_module = THIS_MODULE;
4237 driver_algs[i].alg.crypto.cra_flags =
4238 CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC |
4239 CRYPTO_ALG_NEED_FALLBACK;
4240 driver_algs[i].alg.crypto.cra_ctxsize =
4241 sizeof(struct chcr_context) +
4242 sizeof(struct ablk_ctx);
4243 driver_algs[i].alg.crypto.cra_alignmask = 0;
4244 driver_algs[i].alg.crypto.cra_type =
4245 &crypto_ablkcipher_type;
4246 err = crypto_register_alg(&driver_algs[i].alg.crypto);
4247 name = driver_algs[i].alg.crypto.cra_driver_name;
4248 break;
4249 case CRYPTO_ALG_TYPE_AEAD:
4250 driver_algs[i].alg.aead.base.cra_flags =
4251 CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK;
4252 driver_algs[i].alg.aead.encrypt = chcr_aead_encrypt;
4253 driver_algs[i].alg.aead.decrypt = chcr_aead_decrypt;
4254 driver_algs[i].alg.aead.init = chcr_aead_cra_init;
4255 driver_algs[i].alg.aead.exit = chcr_aead_cra_exit;
4256 driver_algs[i].alg.aead.base.cra_module = THIS_MODULE;
4257 err = crypto_register_aead(&driver_algs[i].alg.aead);
4258 name = driver_algs[i].alg.aead.base.cra_driver_name;
4259 break;
4260 case CRYPTO_ALG_TYPE_AHASH:
4261 a_hash = &driver_algs[i].alg.hash;
4262 a_hash->update = chcr_ahash_update;
4263 a_hash->final = chcr_ahash_final;
4264 a_hash->finup = chcr_ahash_finup;
4265 a_hash->digest = chcr_ahash_digest;
4266 a_hash->export = chcr_ahash_export;
4267 a_hash->import = chcr_ahash_import;
4268 a_hash->halg.statesize = SZ_AHASH_REQ_CTX;
4269 a_hash->halg.base.cra_priority = CHCR_CRA_PRIORITY;
4270 a_hash->halg.base.cra_module = THIS_MODULE;
4271 a_hash->halg.base.cra_flags = CRYPTO_ALG_ASYNC;
4272 a_hash->halg.base.cra_alignmask = 0;
4273 a_hash->halg.base.cra_exit = NULL;
4274
4275 if (driver_algs[i].type == CRYPTO_ALG_TYPE_HMAC) {
4276 a_hash->halg.base.cra_init = chcr_hmac_cra_init;
4277 a_hash->halg.base.cra_exit = chcr_hmac_cra_exit;
4278 a_hash->init = chcr_hmac_init;
4279 a_hash->setkey = chcr_ahash_setkey;
4280 a_hash->halg.base.cra_ctxsize = SZ_AHASH_H_CTX;
4281 } else {
4282 a_hash->init = chcr_sha_init;
4283 a_hash->halg.base.cra_ctxsize = SZ_AHASH_CTX;
4284 a_hash->halg.base.cra_init = chcr_sha_cra_init;
4285 }
4286 err = crypto_register_ahash(&driver_algs[i].alg.hash);
4287 ai = driver_algs[i].alg.hash.halg.base;
4288 name = ai.cra_driver_name;
4289 break;
4290 }
4291 if (err) {
4292 pr_err("chcr : %s : Algorithm registration failed\n",
4293 name);
4294 goto register_err;
4295 } else {
4296 driver_algs[i].is_registered = 1;
4297 }
4298 }
4299 return 0;
4300
4301register_err:
4302 chcr_unregister_alg();
4303 return err;
4304}
4305
4306
4307
4308
4309
4310
4311int start_crypto(void)
4312{
4313 return chcr_register_alg();
4314}
4315
4316
4317
4318
4319
4320
4321int stop_crypto(void)
4322{
4323 chcr_unregister_alg();
4324 return 0;
4325}
4326