1
2
3
4
5
6#define pr_fmt(fmt) KBUILD_MODNAME ": " fmt
7
8#include <linux/bitfield.h>
9#include <linux/crypto.h>
10#include <linux/debugfs.h>
11#include <linux/delay.h>
12#include <linux/dma-mapping.h>
13#include <linux/fault-inject.h>
14#include <linux/init.h>
15#include <linux/interrupt.h>
16#include <linux/kernel.h>
17#include <linux/list.h>
18#include <linux/module.h>
19#include <linux/of.h>
20#include <linux/platform_device.h>
21#include <linux/scatterlist.h>
22#include <linux/slab.h>
23
24#include <crypto/aes.h>
25#include <crypto/internal/aead.h>
26#include <crypto/internal/hash.h>
27#include <crypto/internal/skcipher.h>
28#include <crypto/scatterwalk.h>
29#include <crypto/sha.h>
30#include <crypto/xts.h>
31
32
33#define ARTPEC_CACHE_LINE_MAX 32
34
35#define PDMA_OUT_CFG 0x0000
36#define PDMA_OUT_BUF_CFG 0x0004
37#define PDMA_OUT_CMD 0x0008
38#define PDMA_OUT_DESCRQ_PUSH 0x0010
39#define PDMA_OUT_DESCRQ_STAT 0x0014
40
41#define A6_PDMA_IN_CFG 0x0028
42#define A6_PDMA_IN_BUF_CFG 0x002c
43#define A6_PDMA_IN_CMD 0x0030
44#define A6_PDMA_IN_STATQ_PUSH 0x0038
45#define A6_PDMA_IN_DESCRQ_PUSH 0x0044
46#define A6_PDMA_IN_DESCRQ_STAT 0x0048
47#define A6_PDMA_INTR_MASK 0x0068
48#define A6_PDMA_ACK_INTR 0x006c
49#define A6_PDMA_MASKED_INTR 0x0074
50
51#define A7_PDMA_IN_CFG 0x002c
52#define A7_PDMA_IN_BUF_CFG 0x0030
53#define A7_PDMA_IN_CMD 0x0034
54#define A7_PDMA_IN_STATQ_PUSH 0x003c
55#define A7_PDMA_IN_DESCRQ_PUSH 0x0048
56#define A7_PDMA_IN_DESCRQ_STAT 0x004C
57#define A7_PDMA_INTR_MASK 0x006c
58#define A7_PDMA_ACK_INTR 0x0070
59#define A7_PDMA_MASKED_INTR 0x0078
60
61#define PDMA_OUT_CFG_EN BIT(0)
62
63#define PDMA_OUT_BUF_CFG_DATA_BUF_SIZE GENMASK(4, 0)
64#define PDMA_OUT_BUF_CFG_DESCR_BUF_SIZE GENMASK(9, 5)
65
66#define PDMA_OUT_CMD_START BIT(0)
67#define A6_PDMA_OUT_CMD_STOP BIT(3)
68#define A7_PDMA_OUT_CMD_STOP BIT(2)
69
70#define PDMA_OUT_DESCRQ_PUSH_LEN GENMASK(5, 0)
71#define PDMA_OUT_DESCRQ_PUSH_ADDR GENMASK(31, 6)
72
73#define PDMA_OUT_DESCRQ_STAT_LEVEL GENMASK(3, 0)
74#define PDMA_OUT_DESCRQ_STAT_SIZE GENMASK(7, 4)
75
76#define PDMA_IN_CFG_EN BIT(0)
77
78#define PDMA_IN_BUF_CFG_DATA_BUF_SIZE GENMASK(4, 0)
79#define PDMA_IN_BUF_CFG_DESCR_BUF_SIZE GENMASK(9, 5)
80#define PDMA_IN_BUF_CFG_STAT_BUF_SIZE GENMASK(14, 10)
81
82#define PDMA_IN_CMD_START BIT(0)
83#define A6_PDMA_IN_CMD_FLUSH_STAT BIT(2)
84#define A6_PDMA_IN_CMD_STOP BIT(3)
85#define A7_PDMA_IN_CMD_FLUSH_STAT BIT(1)
86#define A7_PDMA_IN_CMD_STOP BIT(2)
87
88#define PDMA_IN_STATQ_PUSH_LEN GENMASK(5, 0)
89#define PDMA_IN_STATQ_PUSH_ADDR GENMASK(31, 6)
90
91#define PDMA_IN_DESCRQ_PUSH_LEN GENMASK(5, 0)
92#define PDMA_IN_DESCRQ_PUSH_ADDR GENMASK(31, 6)
93
94#define PDMA_IN_DESCRQ_STAT_LEVEL GENMASK(3, 0)
95#define PDMA_IN_DESCRQ_STAT_SIZE GENMASK(7, 4)
96
97#define A6_PDMA_INTR_MASK_IN_DATA BIT(2)
98#define A6_PDMA_INTR_MASK_IN_EOP BIT(3)
99#define A6_PDMA_INTR_MASK_IN_EOP_FLUSH BIT(4)
100
101#define A7_PDMA_INTR_MASK_IN_DATA BIT(3)
102#define A7_PDMA_INTR_MASK_IN_EOP BIT(4)
103#define A7_PDMA_INTR_MASK_IN_EOP_FLUSH BIT(5)
104
105#define A6_CRY_MD_OPER GENMASK(19, 16)
106
107#define A6_CRY_MD_HASH_SEL_CTX GENMASK(21, 20)
108#define A6_CRY_MD_HASH_HMAC_FIN BIT(23)
109
110#define A6_CRY_MD_CIPHER_LEN GENMASK(21, 20)
111#define A6_CRY_MD_CIPHER_DECR BIT(22)
112#define A6_CRY_MD_CIPHER_TWEAK BIT(23)
113#define A6_CRY_MD_CIPHER_DSEQ BIT(24)
114
115#define A7_CRY_MD_OPER GENMASK(11, 8)
116
117#define A7_CRY_MD_HASH_SEL_CTX GENMASK(13, 12)
118#define A7_CRY_MD_HASH_HMAC_FIN BIT(15)
119
120#define A7_CRY_MD_CIPHER_LEN GENMASK(13, 12)
121#define A7_CRY_MD_CIPHER_DECR BIT(14)
122#define A7_CRY_MD_CIPHER_TWEAK BIT(15)
123#define A7_CRY_MD_CIPHER_DSEQ BIT(16)
124
125
126#define regk_crypto_aes_cbc 0x00000002
127#define regk_crypto_aes_ctr 0x00000003
128#define regk_crypto_aes_ecb 0x00000001
129#define regk_crypto_aes_gcm 0x00000004
130#define regk_crypto_aes_xts 0x00000005
131#define regk_crypto_cache 0x00000002
132#define a6_regk_crypto_dlkey 0x0000000a
133#define a7_regk_crypto_dlkey 0x0000000e
134#define regk_crypto_ext 0x00000001
135#define regk_crypto_hmac_sha1 0x00000007
136#define regk_crypto_hmac_sha256 0x00000009
137#define regk_crypto_hmac_sha384 0x0000000b
138#define regk_crypto_hmac_sha512 0x0000000d
139#define regk_crypto_init 0x00000000
140#define regk_crypto_key_128 0x00000000
141#define regk_crypto_key_192 0x00000001
142#define regk_crypto_key_256 0x00000002
143#define regk_crypto_null 0x00000000
144#define regk_crypto_sha1 0x00000006
145#define regk_crypto_sha256 0x00000008
146#define regk_crypto_sha384 0x0000000a
147#define regk_crypto_sha512 0x0000000c
148
149
150struct pdma_descr_ctrl {
151 unsigned char short_descr : 1;
152 unsigned char pad1 : 1;
153 unsigned char eop : 1;
154 unsigned char intr : 1;
155 unsigned char short_len : 3;
156 unsigned char pad2 : 1;
157} __packed;
158
159struct pdma_data_descr {
160 unsigned int len : 24;
161 unsigned int buf : 32;
162} __packed;
163
164struct pdma_short_descr {
165 unsigned char data[7];
166} __packed;
167
168struct pdma_descr {
169 struct pdma_descr_ctrl ctrl;
170 union {
171 struct pdma_data_descr data;
172 struct pdma_short_descr shrt;
173 };
174};
175
176struct pdma_stat_descr {
177 unsigned char pad1 : 1;
178 unsigned char pad2 : 1;
179 unsigned char eop : 1;
180 unsigned char pad3 : 5;
181 unsigned int len : 24;
182};
183
184
185#define PDMA_DESCR_COUNT 64
186
187#define MODULE_NAME "Artpec-6 CA"
188
189
190#define ARTPEC6_CRYPTO_HASH_SHA1 1
191#define ARTPEC6_CRYPTO_HASH_SHA256 2
192#define ARTPEC6_CRYPTO_HASH_SHA384 3
193#define ARTPEC6_CRYPTO_HASH_SHA512 4
194
195
196#define ARTPEC6_CRYPTO_CIPHER_AES_ECB 1
197#define ARTPEC6_CRYPTO_CIPHER_AES_CBC 2
198#define ARTPEC6_CRYPTO_CIPHER_AES_CTR 3
199#define ARTPEC6_CRYPTO_CIPHER_AES_XTS 5
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225struct artpec6_crypto_bounce_buffer {
226 struct list_head list;
227 size_t length;
228 struct scatterlist *sg;
229 size_t offset;
230
231
232
233 void *buf;
234};
235
236struct artpec6_crypto_dma_map {
237 dma_addr_t dma_addr;
238 size_t size;
239 enum dma_data_direction dir;
240};
241
242struct artpec6_crypto_dma_descriptors {
243 struct pdma_descr out[PDMA_DESCR_COUNT] __aligned(64);
244 struct pdma_descr in[PDMA_DESCR_COUNT] __aligned(64);
245 u32 stat[PDMA_DESCR_COUNT] __aligned(64);
246 struct list_head bounce_buffers;
247
248 struct artpec6_crypto_dma_map maps[PDMA_DESCR_COUNT * 2 + 2];
249 dma_addr_t out_dma_addr;
250 dma_addr_t in_dma_addr;
251 dma_addr_t stat_dma_addr;
252 size_t out_cnt;
253 size_t in_cnt;
254 size_t map_count;
255};
256
257enum artpec6_crypto_variant {
258 ARTPEC6_CRYPTO,
259 ARTPEC7_CRYPTO,
260};
261
262struct artpec6_crypto {
263 void __iomem *base;
264 spinlock_t queue_lock;
265 struct list_head queue;
266 struct list_head pending;
267 struct tasklet_struct task;
268 struct kmem_cache *dma_cache;
269 int pending_count;
270 struct timer_list timer;
271 enum artpec6_crypto_variant variant;
272 void *pad_buffer;
273 void *zero_buffer;
274};
275
276enum artpec6_crypto_hash_flags {
277 HASH_FLAG_INIT_CTX = 2,
278 HASH_FLAG_UPDATE = 4,
279 HASH_FLAG_FINALIZE = 8,
280 HASH_FLAG_HMAC = 16,
281 HASH_FLAG_UPDATE_KEY = 32,
282};
283
284struct artpec6_crypto_req_common {
285 struct list_head list;
286 struct artpec6_crypto_dma_descriptors *dma;
287 struct crypto_async_request *req;
288 void (*complete)(struct crypto_async_request *req);
289 gfp_t gfp_flags;
290};
291
292struct artpec6_hash_request_context {
293 char partial_buffer[SHA512_BLOCK_SIZE];
294 char partial_buffer_out[SHA512_BLOCK_SIZE];
295 char key_buffer[SHA512_BLOCK_SIZE];
296 char pad_buffer[SHA512_BLOCK_SIZE + 32];
297 unsigned char digeststate[SHA512_DIGEST_SIZE];
298 size_t partial_bytes;
299 u64 digcnt;
300 u32 key_md;
301 u32 hash_md;
302 enum artpec6_crypto_hash_flags hash_flags;
303 struct artpec6_crypto_req_common common;
304};
305
306struct artpec6_hash_export_state {
307 char partial_buffer[SHA512_BLOCK_SIZE];
308 unsigned char digeststate[SHA512_DIGEST_SIZE];
309 size_t partial_bytes;
310 u64 digcnt;
311 int oper;
312 unsigned int hash_flags;
313};
314
315struct artpec6_hashalg_context {
316 char hmac_key[SHA512_BLOCK_SIZE];
317 size_t hmac_key_length;
318 struct crypto_shash *child_hash;
319};
320
321struct artpec6_crypto_request_context {
322 u32 cipher_md;
323 bool decrypt;
324 struct artpec6_crypto_req_common common;
325};
326
327struct artpec6_cryptotfm_context {
328 unsigned char aes_key[2*AES_MAX_KEY_SIZE];
329 size_t key_length;
330 u32 key_md;
331 int crypto_type;
332 struct crypto_skcipher *fallback;
333};
334
335struct artpec6_crypto_aead_hw_ctx {
336 __be64 aad_length_bits;
337 __be64 text_length_bits;
338 __u8 J0[AES_BLOCK_SIZE];
339};
340
341struct artpec6_crypto_aead_req_ctx {
342 struct artpec6_crypto_aead_hw_ctx hw_ctx;
343 u32 cipher_md;
344 bool decrypt;
345 struct artpec6_crypto_req_common common;
346 __u8 decryption_tag[AES_BLOCK_SIZE] ____cacheline_aligned;
347};
348
349
350static struct device *artpec6_crypto_dev;
351
352#ifdef CONFIG_FAULT_INJECTION
353static DECLARE_FAULT_ATTR(artpec6_crypto_fail_status_read);
354static DECLARE_FAULT_ATTR(artpec6_crypto_fail_dma_array_full);
355#endif
356
357enum {
358 ARTPEC6_CRYPTO_PREPARE_HASH_NO_START,
359 ARTPEC6_CRYPTO_PREPARE_HASH_START,
360};
361
362static int artpec6_crypto_prepare_aead(struct aead_request *areq);
363static int artpec6_crypto_prepare_crypto(struct skcipher_request *areq);
364static int artpec6_crypto_prepare_hash(struct ahash_request *areq);
365
366static void
367artpec6_crypto_complete_crypto(struct crypto_async_request *req);
368static void
369artpec6_crypto_complete_cbc_encrypt(struct crypto_async_request *req);
370static void
371artpec6_crypto_complete_cbc_decrypt(struct crypto_async_request *req);
372static void
373artpec6_crypto_complete_aead(struct crypto_async_request *req);
374static void
375artpec6_crypto_complete_hash(struct crypto_async_request *req);
376
377static int
378artpec6_crypto_common_destroy(struct artpec6_crypto_req_common *common);
379
380static void
381artpec6_crypto_start_dma(struct artpec6_crypto_req_common *common);
382
383struct artpec6_crypto_walk {
384 struct scatterlist *sg;
385 size_t offset;
386};
387
388static void artpec6_crypto_walk_init(struct artpec6_crypto_walk *awalk,
389 struct scatterlist *sg)
390{
391 awalk->sg = sg;
392 awalk->offset = 0;
393}
394
395static size_t artpec6_crypto_walk_advance(struct artpec6_crypto_walk *awalk,
396 size_t nbytes)
397{
398 while (nbytes && awalk->sg) {
399 size_t piece;
400
401 WARN_ON(awalk->offset > awalk->sg->length);
402
403 piece = min(nbytes, (size_t)awalk->sg->length - awalk->offset);
404 nbytes -= piece;
405 awalk->offset += piece;
406 if (awalk->offset == awalk->sg->length) {
407 awalk->sg = sg_next(awalk->sg);
408 awalk->offset = 0;
409 }
410
411 }
412
413 return nbytes;
414}
415
416static size_t
417artpec6_crypto_walk_chunklen(const struct artpec6_crypto_walk *awalk)
418{
419 WARN_ON(awalk->sg->length == awalk->offset);
420
421 return awalk->sg->length - awalk->offset;
422}
423
424static dma_addr_t
425artpec6_crypto_walk_chunk_phys(const struct artpec6_crypto_walk *awalk)
426{
427 return sg_phys(awalk->sg) + awalk->offset;
428}
429
430static void
431artpec6_crypto_copy_bounce_buffers(struct artpec6_crypto_req_common *common)
432{
433 struct artpec6_crypto_dma_descriptors *dma = common->dma;
434 struct artpec6_crypto_bounce_buffer *b;
435 struct artpec6_crypto_bounce_buffer *next;
436
437 list_for_each_entry_safe(b, next, &dma->bounce_buffers, list) {
438 pr_debug("bounce entry %p: %zu bytes @ %zu from %p\n",
439 b, b->length, b->offset, b->buf);
440 sg_pcopy_from_buffer(b->sg,
441 1,
442 b->buf,
443 b->length,
444 b->offset);
445
446 list_del(&b->list);
447 kfree(b);
448 }
449}
450
451static inline bool artpec6_crypto_busy(void)
452{
453 struct artpec6_crypto *ac = dev_get_drvdata(artpec6_crypto_dev);
454 int fifo_count = ac->pending_count;
455
456 return fifo_count > 6;
457}
458
459static int artpec6_crypto_submit(struct artpec6_crypto_req_common *req)
460{
461 struct artpec6_crypto *ac = dev_get_drvdata(artpec6_crypto_dev);
462 int ret = -EBUSY;
463
464 spin_lock_bh(&ac->queue_lock);
465
466 if (!artpec6_crypto_busy()) {
467 list_add_tail(&req->list, &ac->pending);
468 artpec6_crypto_start_dma(req);
469 ret = -EINPROGRESS;
470 } else if (req->req->flags & CRYPTO_TFM_REQ_MAY_BACKLOG) {
471 list_add_tail(&req->list, &ac->queue);
472 } else {
473 artpec6_crypto_common_destroy(req);
474 }
475
476 spin_unlock_bh(&ac->queue_lock);
477
478 return ret;
479}
480
481static void artpec6_crypto_start_dma(struct artpec6_crypto_req_common *common)
482{
483 struct artpec6_crypto *ac = dev_get_drvdata(artpec6_crypto_dev);
484 enum artpec6_crypto_variant variant = ac->variant;
485 void __iomem *base = ac->base;
486 struct artpec6_crypto_dma_descriptors *dma = common->dma;
487 u32 ind, statd, outd;
488
489
490 wmb();
491
492 ind = FIELD_PREP(PDMA_IN_DESCRQ_PUSH_LEN, dma->in_cnt - 1) |
493 FIELD_PREP(PDMA_IN_DESCRQ_PUSH_ADDR, dma->in_dma_addr >> 6);
494
495 statd = FIELD_PREP(PDMA_IN_STATQ_PUSH_LEN, dma->in_cnt - 1) |
496 FIELD_PREP(PDMA_IN_STATQ_PUSH_ADDR, dma->stat_dma_addr >> 6);
497
498 outd = FIELD_PREP(PDMA_OUT_DESCRQ_PUSH_LEN, dma->out_cnt - 1) |
499 FIELD_PREP(PDMA_OUT_DESCRQ_PUSH_ADDR, dma->out_dma_addr >> 6);
500
501 if (variant == ARTPEC6_CRYPTO) {
502 writel_relaxed(ind, base + A6_PDMA_IN_DESCRQ_PUSH);
503 writel_relaxed(statd, base + A6_PDMA_IN_STATQ_PUSH);
504 writel_relaxed(PDMA_IN_CMD_START, base + A6_PDMA_IN_CMD);
505 } else {
506 writel_relaxed(ind, base + A7_PDMA_IN_DESCRQ_PUSH);
507 writel_relaxed(statd, base + A7_PDMA_IN_STATQ_PUSH);
508 writel_relaxed(PDMA_IN_CMD_START, base + A7_PDMA_IN_CMD);
509 }
510
511 writel_relaxed(outd, base + PDMA_OUT_DESCRQ_PUSH);
512 writel_relaxed(PDMA_OUT_CMD_START, base + PDMA_OUT_CMD);
513
514 ac->pending_count++;
515}
516
517static void
518artpec6_crypto_init_dma_operation(struct artpec6_crypto_req_common *common)
519{
520 struct artpec6_crypto_dma_descriptors *dma = common->dma;
521
522 dma->out_cnt = 0;
523 dma->in_cnt = 0;
524 dma->map_count = 0;
525 INIT_LIST_HEAD(&dma->bounce_buffers);
526}
527
528static bool fault_inject_dma_descr(void)
529{
530#ifdef CONFIG_FAULT_INJECTION
531 return should_fail(&artpec6_crypto_fail_dma_array_full, 1);
532#else
533 return false;
534#endif
535}
536
537
538
539
540
541
542
543
544
545
546static int
547artpec6_crypto_setup_out_descr_phys(struct artpec6_crypto_req_common *common,
548 dma_addr_t addr, size_t len, bool eop)
549{
550 struct artpec6_crypto_dma_descriptors *dma = common->dma;
551 struct pdma_descr *d;
552
553 if (dma->out_cnt >= PDMA_DESCR_COUNT ||
554 fault_inject_dma_descr()) {
555 pr_err("No free OUT DMA descriptors available!\n");
556 return -ENOSPC;
557 }
558
559 d = &dma->out[dma->out_cnt++];
560 memset(d, 0, sizeof(*d));
561
562 d->ctrl.short_descr = 0;
563 d->ctrl.eop = eop;
564 d->data.len = len;
565 d->data.buf = addr;
566 return 0;
567}
568
569
570
571
572
573
574
575
576
577
578
579static int
580artpec6_crypto_setup_out_descr_short(struct artpec6_crypto_req_common *common,
581 void *dst, unsigned int len, bool eop)
582{
583 struct artpec6_crypto_dma_descriptors *dma = common->dma;
584 struct pdma_descr *d;
585
586 if (dma->out_cnt >= PDMA_DESCR_COUNT ||
587 fault_inject_dma_descr()) {
588 pr_err("No free OUT DMA descriptors available!\n");
589 return -ENOSPC;
590 } else if (len > 7 || len < 1) {
591 return -EINVAL;
592 }
593 d = &dma->out[dma->out_cnt++];
594 memset(d, 0, sizeof(*d));
595
596 d->ctrl.short_descr = 1;
597 d->ctrl.short_len = len;
598 d->ctrl.eop = eop;
599 memcpy(d->shrt.data, dst, len);
600 return 0;
601}
602
603static int artpec6_crypto_dma_map_page(struct artpec6_crypto_req_common *common,
604 struct page *page, size_t offset,
605 size_t size,
606 enum dma_data_direction dir,
607 dma_addr_t *dma_addr_out)
608{
609 struct artpec6_crypto_dma_descriptors *dma = common->dma;
610 struct device *dev = artpec6_crypto_dev;
611 struct artpec6_crypto_dma_map *map;
612 dma_addr_t dma_addr;
613
614 *dma_addr_out = 0;
615
616 if (dma->map_count >= ARRAY_SIZE(dma->maps))
617 return -ENOMEM;
618
619 dma_addr = dma_map_page(dev, page, offset, size, dir);
620 if (dma_mapping_error(dev, dma_addr))
621 return -ENOMEM;
622
623 map = &dma->maps[dma->map_count++];
624 map->size = size;
625 map->dma_addr = dma_addr;
626 map->dir = dir;
627
628 *dma_addr_out = dma_addr;
629
630 return 0;
631}
632
633static int
634artpec6_crypto_dma_map_single(struct artpec6_crypto_req_common *common,
635 void *ptr, size_t size,
636 enum dma_data_direction dir,
637 dma_addr_t *dma_addr_out)
638{
639 struct page *page = virt_to_page(ptr);
640 size_t offset = (uintptr_t)ptr & ~PAGE_MASK;
641
642 return artpec6_crypto_dma_map_page(common, page, offset, size, dir,
643 dma_addr_out);
644}
645
646static int
647artpec6_crypto_dma_map_descs(struct artpec6_crypto_req_common *common)
648{
649 struct artpec6_crypto_dma_descriptors *dma = common->dma;
650 int ret;
651
652 ret = artpec6_crypto_dma_map_single(common, dma->in,
653 sizeof(dma->in[0]) * dma->in_cnt,
654 DMA_TO_DEVICE, &dma->in_dma_addr);
655 if (ret)
656 return ret;
657
658 ret = artpec6_crypto_dma_map_single(common, dma->out,
659 sizeof(dma->out[0]) * dma->out_cnt,
660 DMA_TO_DEVICE, &dma->out_dma_addr);
661 if (ret)
662 return ret;
663
664
665 dma->stat[dma->in_cnt - 1] = 0;
666
667
668
669
670
671 return artpec6_crypto_dma_map_single(common,
672 dma->stat + dma->in_cnt - 1,
673 sizeof(dma->stat[0]),
674 DMA_BIDIRECTIONAL,
675 &dma->stat_dma_addr);
676}
677
678static void
679artpec6_crypto_dma_unmap_all(struct artpec6_crypto_req_common *common)
680{
681 struct artpec6_crypto_dma_descriptors *dma = common->dma;
682 struct device *dev = artpec6_crypto_dev;
683 int i;
684
685 for (i = 0; i < dma->map_count; i++) {
686 struct artpec6_crypto_dma_map *map = &dma->maps[i];
687
688 dma_unmap_page(dev, map->dma_addr, map->size, map->dir);
689 }
690
691 dma->map_count = 0;
692}
693
694
695
696
697
698
699
700
701
702
703
704
705
706static int
707artpec6_crypto_setup_out_descr(struct artpec6_crypto_req_common *common,
708 void *dst, unsigned int len, bool eop,
709 bool use_short)
710{
711 if (use_short && len < 7) {
712 return artpec6_crypto_setup_out_descr_short(common, dst, len,
713 eop);
714 } else {
715 int ret;
716 dma_addr_t dma_addr;
717
718 ret = artpec6_crypto_dma_map_single(common, dst, len,
719 DMA_TO_DEVICE,
720 &dma_addr);
721 if (ret)
722 return ret;
723
724 return artpec6_crypto_setup_out_descr_phys(common, dma_addr,
725 len, eop);
726 }
727}
728
729
730
731
732
733
734
735
736
737
738static int
739artpec6_crypto_setup_in_descr_phys(struct artpec6_crypto_req_common *common,
740 dma_addr_t addr, unsigned int len, bool intr)
741{
742 struct artpec6_crypto_dma_descriptors *dma = common->dma;
743 struct pdma_descr *d;
744
745 if (dma->in_cnt >= PDMA_DESCR_COUNT ||
746 fault_inject_dma_descr()) {
747 pr_err("No free IN DMA descriptors available!\n");
748 return -ENOSPC;
749 }
750 d = &dma->in[dma->in_cnt++];
751 memset(d, 0, sizeof(*d));
752
753 d->ctrl.intr = intr;
754 d->data.len = len;
755 d->data.buf = addr;
756 return 0;
757}
758
759
760
761
762
763
764
765
766
767
768static int
769artpec6_crypto_setup_in_descr(struct artpec6_crypto_req_common *common,
770 void *buffer, unsigned int len, bool last)
771{
772 dma_addr_t dma_addr;
773 int ret;
774
775 ret = artpec6_crypto_dma_map_single(common, buffer, len,
776 DMA_FROM_DEVICE, &dma_addr);
777 if (ret)
778 return ret;
779
780 return artpec6_crypto_setup_in_descr_phys(common, dma_addr, len, last);
781}
782
783static struct artpec6_crypto_bounce_buffer *
784artpec6_crypto_alloc_bounce(gfp_t flags)
785{
786 void *base;
787 size_t alloc_size = sizeof(struct artpec6_crypto_bounce_buffer) +
788 2 * ARTPEC_CACHE_LINE_MAX;
789 struct artpec6_crypto_bounce_buffer *bbuf = kzalloc(alloc_size, flags);
790
791 if (!bbuf)
792 return NULL;
793
794 base = bbuf + 1;
795 bbuf->buf = PTR_ALIGN(base, ARTPEC_CACHE_LINE_MAX);
796 return bbuf;
797}
798
799static int setup_bounce_buffer_in(struct artpec6_crypto_req_common *common,
800 struct artpec6_crypto_walk *walk, size_t size)
801{
802 struct artpec6_crypto_bounce_buffer *bbuf;
803 int ret;
804
805 bbuf = artpec6_crypto_alloc_bounce(common->gfp_flags);
806 if (!bbuf)
807 return -ENOMEM;
808
809 bbuf->length = size;
810 bbuf->sg = walk->sg;
811 bbuf->offset = walk->offset;
812
813 ret = artpec6_crypto_setup_in_descr(common, bbuf->buf, size, false);
814 if (ret) {
815 kfree(bbuf);
816 return ret;
817 }
818
819 pr_debug("BOUNCE %zu offset %zu\n", size, walk->offset);
820 list_add_tail(&bbuf->list, &common->dma->bounce_buffers);
821 return 0;
822}
823
824static int
825artpec6_crypto_setup_sg_descrs_in(struct artpec6_crypto_req_common *common,
826 struct artpec6_crypto_walk *walk,
827 size_t count)
828{
829 size_t chunk;
830 int ret;
831 dma_addr_t addr;
832
833 while (walk->sg && count) {
834 chunk = min(count, artpec6_crypto_walk_chunklen(walk));
835 addr = artpec6_crypto_walk_chunk_phys(walk);
836
837
838
839
840
841
842 if (!IS_ALIGNED(addr, ARTPEC_CACHE_LINE_MAX)) {
843 chunk = min_t(dma_addr_t, chunk,
844 ALIGN(addr, ARTPEC_CACHE_LINE_MAX) -
845 addr);
846
847 pr_debug("CHUNK-b %pad:%zu\n", &addr, chunk);
848 ret = setup_bounce_buffer_in(common, walk, chunk);
849 } else if (chunk < ARTPEC_CACHE_LINE_MAX) {
850 pr_debug("CHUNK-b %pad:%zu\n", &addr, chunk);
851 ret = setup_bounce_buffer_in(common, walk, chunk);
852 } else {
853 dma_addr_t dma_addr;
854
855 chunk = chunk & ~(ARTPEC_CACHE_LINE_MAX-1);
856
857 pr_debug("CHUNK %pad:%zu\n", &addr, chunk);
858
859 ret = artpec6_crypto_dma_map_page(common,
860 sg_page(walk->sg),
861 walk->sg->offset +
862 walk->offset,
863 chunk,
864 DMA_FROM_DEVICE,
865 &dma_addr);
866 if (ret)
867 return ret;
868
869 ret = artpec6_crypto_setup_in_descr_phys(common,
870 dma_addr,
871 chunk, false);
872 }
873
874 if (ret)
875 return ret;
876
877 count = count - chunk;
878 artpec6_crypto_walk_advance(walk, chunk);
879 }
880
881 if (count)
882 pr_err("EOL unexpected %zu bytes left\n", count);
883
884 return count ? -EINVAL : 0;
885}
886
887static int
888artpec6_crypto_setup_sg_descrs_out(struct artpec6_crypto_req_common *common,
889 struct artpec6_crypto_walk *walk,
890 size_t count)
891{
892 size_t chunk;
893 int ret;
894 dma_addr_t addr;
895
896 while (walk->sg && count) {
897 chunk = min(count, artpec6_crypto_walk_chunklen(walk));
898 addr = artpec6_crypto_walk_chunk_phys(walk);
899
900 pr_debug("OUT-CHUNK %pad:%zu\n", &addr, chunk);
901
902 if (addr & 3) {
903 char buf[3];
904
905 chunk = min_t(size_t, chunk, (4-(addr&3)));
906
907 sg_pcopy_to_buffer(walk->sg, 1, buf, chunk,
908 walk->offset);
909
910 ret = artpec6_crypto_setup_out_descr_short(common, buf,
911 chunk,
912 false);
913 } else {
914 dma_addr_t dma_addr;
915
916 ret = artpec6_crypto_dma_map_page(common,
917 sg_page(walk->sg),
918 walk->sg->offset +
919 walk->offset,
920 chunk,
921 DMA_TO_DEVICE,
922 &dma_addr);
923 if (ret)
924 return ret;
925
926 ret = artpec6_crypto_setup_out_descr_phys(common,
927 dma_addr,
928 chunk, false);
929 }
930
931 if (ret)
932 return ret;
933
934 count = count - chunk;
935 artpec6_crypto_walk_advance(walk, chunk);
936 }
937
938 if (count)
939 pr_err("EOL unexpected %zu bytes left\n", count);
940
941 return count ? -EINVAL : 0;
942}
943
944
945
946
947
948
949
950
951
952
953static int
954artpec6_crypto_terminate_out_descrs(struct artpec6_crypto_req_common *common)
955{
956 struct artpec6_crypto_dma_descriptors *dma = common->dma;
957 struct pdma_descr *d;
958
959 if (!dma->out_cnt || dma->out_cnt > PDMA_DESCR_COUNT) {
960 pr_err("%s: OUT descriptor list is %s\n",
961 MODULE_NAME, dma->out_cnt ? "empty" : "full");
962 return -EINVAL;
963
964 }
965
966 d = &dma->out[dma->out_cnt-1];
967 d->ctrl.eop = 1;
968
969 return 0;
970}
971
972
973
974
975
976
977static int
978artpec6_crypto_terminate_in_descrs(struct artpec6_crypto_req_common *common)
979{
980 struct artpec6_crypto_dma_descriptors *dma = common->dma;
981 struct pdma_descr *d;
982
983 if (!dma->in_cnt || dma->in_cnt > PDMA_DESCR_COUNT) {
984 pr_err("%s: IN descriptor list is %s\n",
985 MODULE_NAME, dma->in_cnt ? "empty" : "full");
986 return -EINVAL;
987 }
988
989 d = &dma->in[dma->in_cnt-1];
990 d->ctrl.intr = 1;
991 return 0;
992}
993
994
995
996
997
998
999
1000
1001
1002static size_t
1003create_hash_pad(int oper, unsigned char *dst, u64 dgstlen, u64 bitcount)
1004{
1005 unsigned int mod, target, diff, pad_bytes, size_bytes;
1006 __be64 bits = __cpu_to_be64(bitcount);
1007
1008 switch (oper) {
1009 case regk_crypto_sha1:
1010 case regk_crypto_sha256:
1011 case regk_crypto_hmac_sha1:
1012 case regk_crypto_hmac_sha256:
1013 target = 448 / 8;
1014 mod = 512 / 8;
1015 size_bytes = 8;
1016 break;
1017 default:
1018 target = 896 / 8;
1019 mod = 1024 / 8;
1020 size_bytes = 16;
1021 break;
1022 }
1023
1024 target -= 1;
1025 diff = dgstlen & (mod - 1);
1026 pad_bytes = diff > target ? target + mod - diff : target - diff;
1027
1028 memset(dst + 1, 0, pad_bytes);
1029 dst[0] = 0x80;
1030
1031 if (size_bytes == 16) {
1032 memset(dst + 1 + pad_bytes, 0, 8);
1033 memcpy(dst + 1 + pad_bytes + 8, &bits, 8);
1034 } else {
1035 memcpy(dst + 1 + pad_bytes, &bits, 8);
1036 }
1037
1038 return pad_bytes + size_bytes + 1;
1039}
1040
1041static int artpec6_crypto_common_init(struct artpec6_crypto_req_common *common,
1042 struct crypto_async_request *parent,
1043 void (*complete)(struct crypto_async_request *req),
1044 struct scatterlist *dstsg, unsigned int nbytes)
1045{
1046 gfp_t flags;
1047 struct artpec6_crypto *ac = dev_get_drvdata(artpec6_crypto_dev);
1048
1049 flags = (parent->flags & CRYPTO_TFM_REQ_MAY_SLEEP) ?
1050 GFP_KERNEL : GFP_ATOMIC;
1051
1052 common->gfp_flags = flags;
1053 common->dma = kmem_cache_alloc(ac->dma_cache, flags);
1054 if (!common->dma)
1055 return -ENOMEM;
1056
1057 common->req = parent;
1058 common->complete = complete;
1059 return 0;
1060}
1061
1062static void
1063artpec6_crypto_bounce_destroy(struct artpec6_crypto_dma_descriptors *dma)
1064{
1065 struct artpec6_crypto_bounce_buffer *b;
1066 struct artpec6_crypto_bounce_buffer *next;
1067
1068 list_for_each_entry_safe(b, next, &dma->bounce_buffers, list) {
1069 kfree(b);
1070 }
1071}
1072
1073static int
1074artpec6_crypto_common_destroy(struct artpec6_crypto_req_common *common)
1075{
1076 struct artpec6_crypto *ac = dev_get_drvdata(artpec6_crypto_dev);
1077
1078 artpec6_crypto_dma_unmap_all(common);
1079 artpec6_crypto_bounce_destroy(common->dma);
1080 kmem_cache_free(ac->dma_cache, common->dma);
1081 common->dma = NULL;
1082 return 0;
1083}
1084
1085
1086
1087
1088static int artpec6_crypto_encrypt(struct skcipher_request *req)
1089{
1090 struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(req);
1091 struct artpec6_cryptotfm_context *ctx = crypto_skcipher_ctx(cipher);
1092 struct artpec6_crypto_request_context *req_ctx = NULL;
1093 void (*complete)(struct crypto_async_request *req);
1094 int ret;
1095
1096 req_ctx = skcipher_request_ctx(req);
1097
1098 switch (ctx->crypto_type) {
1099 case ARTPEC6_CRYPTO_CIPHER_AES_CBC:
1100 case ARTPEC6_CRYPTO_CIPHER_AES_ECB:
1101 case ARTPEC6_CRYPTO_CIPHER_AES_XTS:
1102 req_ctx->decrypt = 0;
1103 break;
1104 default:
1105 break;
1106 }
1107
1108 switch (ctx->crypto_type) {
1109 case ARTPEC6_CRYPTO_CIPHER_AES_CBC:
1110 complete = artpec6_crypto_complete_cbc_encrypt;
1111 break;
1112 default:
1113 complete = artpec6_crypto_complete_crypto;
1114 break;
1115 }
1116
1117 ret = artpec6_crypto_common_init(&req_ctx->common,
1118 &req->base,
1119 complete,
1120 req->dst, req->cryptlen);
1121 if (ret)
1122 return ret;
1123
1124 ret = artpec6_crypto_prepare_crypto(req);
1125 if (ret) {
1126 artpec6_crypto_common_destroy(&req_ctx->common);
1127 return ret;
1128 }
1129
1130 return artpec6_crypto_submit(&req_ctx->common);
1131}
1132
1133static int artpec6_crypto_decrypt(struct skcipher_request *req)
1134{
1135 int ret;
1136 struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(req);
1137 struct artpec6_cryptotfm_context *ctx = crypto_skcipher_ctx(cipher);
1138 struct artpec6_crypto_request_context *req_ctx = NULL;
1139 void (*complete)(struct crypto_async_request *req);
1140
1141 req_ctx = skcipher_request_ctx(req);
1142
1143 switch (ctx->crypto_type) {
1144 case ARTPEC6_CRYPTO_CIPHER_AES_CBC:
1145 case ARTPEC6_CRYPTO_CIPHER_AES_ECB:
1146 case ARTPEC6_CRYPTO_CIPHER_AES_XTS:
1147 req_ctx->decrypt = 1;
1148 break;
1149 default:
1150 break;
1151 }
1152
1153
1154 switch (ctx->crypto_type) {
1155 case ARTPEC6_CRYPTO_CIPHER_AES_CBC:
1156 complete = artpec6_crypto_complete_cbc_decrypt;
1157 break;
1158 default:
1159 complete = artpec6_crypto_complete_crypto;
1160 break;
1161 }
1162
1163 ret = artpec6_crypto_common_init(&req_ctx->common, &req->base,
1164 complete,
1165 req->dst, req->cryptlen);
1166 if (ret)
1167 return ret;
1168
1169 ret = artpec6_crypto_prepare_crypto(req);
1170 if (ret) {
1171 artpec6_crypto_common_destroy(&req_ctx->common);
1172 return ret;
1173 }
1174
1175 return artpec6_crypto_submit(&req_ctx->common);
1176}
1177
1178static int
1179artpec6_crypto_ctr_crypt(struct skcipher_request *req, bool encrypt)
1180{
1181 struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(req);
1182 struct artpec6_cryptotfm_context *ctx = crypto_skcipher_ctx(cipher);
1183 size_t iv_len = crypto_skcipher_ivsize(cipher);
1184 unsigned int counter = be32_to_cpup((__be32 *)
1185 (req->iv + iv_len - 4));
1186 unsigned int nblks = ALIGN(req->cryptlen, AES_BLOCK_SIZE) /
1187 AES_BLOCK_SIZE;
1188
1189
1190
1191
1192
1193
1194
1195 if (counter + nblks < counter) {
1196 int ret;
1197
1198 pr_debug("counter %x will overflow (nblks %u), falling back\n",
1199 counter, counter + nblks);
1200
1201 ret = crypto_skcipher_setkey(ctx->fallback, ctx->aes_key,
1202 ctx->key_length);
1203 if (ret)
1204 return ret;
1205
1206 {
1207 SKCIPHER_REQUEST_ON_STACK(subreq, ctx->fallback);
1208
1209 skcipher_request_set_tfm(subreq, ctx->fallback);
1210 skcipher_request_set_callback(subreq, req->base.flags,
1211 NULL, NULL);
1212 skcipher_request_set_crypt(subreq, req->src, req->dst,
1213 req->cryptlen, req->iv);
1214 ret = encrypt ? crypto_skcipher_encrypt(subreq)
1215 : crypto_skcipher_decrypt(subreq);
1216 skcipher_request_zero(subreq);
1217 }
1218 return ret;
1219 }
1220
1221 return encrypt ? artpec6_crypto_encrypt(req)
1222 : artpec6_crypto_decrypt(req);
1223}
1224
1225static int artpec6_crypto_ctr_encrypt(struct skcipher_request *req)
1226{
1227 return artpec6_crypto_ctr_crypt(req, true);
1228}
1229
1230static int artpec6_crypto_ctr_decrypt(struct skcipher_request *req)
1231{
1232 return artpec6_crypto_ctr_crypt(req, false);
1233}
1234
1235
1236
1237
1238static int artpec6_crypto_aead_init(struct crypto_aead *tfm)
1239{
1240 struct artpec6_cryptotfm_context *tfm_ctx = crypto_aead_ctx(tfm);
1241
1242 memset(tfm_ctx, 0, sizeof(*tfm_ctx));
1243
1244 crypto_aead_set_reqsize(tfm,
1245 sizeof(struct artpec6_crypto_aead_req_ctx));
1246
1247 return 0;
1248}
1249
1250static int artpec6_crypto_aead_set_key(struct crypto_aead *tfm, const u8 *key,
1251 unsigned int len)
1252{
1253 struct artpec6_cryptotfm_context *ctx = crypto_tfm_ctx(&tfm->base);
1254
1255 if (len != 16 && len != 24 && len != 32) {
1256 crypto_aead_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
1257 return -1;
1258 }
1259
1260 ctx->key_length = len;
1261
1262 memcpy(ctx->aes_key, key, len);
1263 return 0;
1264}
1265
1266static int artpec6_crypto_aead_encrypt(struct aead_request *req)
1267{
1268 int ret;
1269 struct artpec6_crypto_aead_req_ctx *req_ctx = aead_request_ctx(req);
1270
1271 req_ctx->decrypt = false;
1272 ret = artpec6_crypto_common_init(&req_ctx->common, &req->base,
1273 artpec6_crypto_complete_aead,
1274 NULL, 0);
1275 if (ret)
1276 return ret;
1277
1278 ret = artpec6_crypto_prepare_aead(req);
1279 if (ret) {
1280 artpec6_crypto_common_destroy(&req_ctx->common);
1281 return ret;
1282 }
1283
1284 return artpec6_crypto_submit(&req_ctx->common);
1285}
1286
1287static int artpec6_crypto_aead_decrypt(struct aead_request *req)
1288{
1289 int ret;
1290 struct artpec6_crypto_aead_req_ctx *req_ctx = aead_request_ctx(req);
1291
1292 req_ctx->decrypt = true;
1293 if (req->cryptlen < AES_BLOCK_SIZE)
1294 return -EINVAL;
1295
1296 ret = artpec6_crypto_common_init(&req_ctx->common,
1297 &req->base,
1298 artpec6_crypto_complete_aead,
1299 NULL, 0);
1300 if (ret)
1301 return ret;
1302
1303 ret = artpec6_crypto_prepare_aead(req);
1304 if (ret) {
1305 artpec6_crypto_common_destroy(&req_ctx->common);
1306 return ret;
1307 }
1308
1309 return artpec6_crypto_submit(&req_ctx->common);
1310}
1311
1312static int artpec6_crypto_prepare_hash(struct ahash_request *areq)
1313{
1314 struct artpec6_hashalg_context *ctx = crypto_tfm_ctx(areq->base.tfm);
1315 struct artpec6_hash_request_context *req_ctx = ahash_request_ctx(areq);
1316 size_t digestsize = crypto_ahash_digestsize(crypto_ahash_reqtfm(areq));
1317 size_t contextsize = digestsize == SHA384_DIGEST_SIZE ?
1318 SHA512_DIGEST_SIZE : digestsize;
1319 size_t blocksize = crypto_tfm_alg_blocksize(
1320 crypto_ahash_tfm(crypto_ahash_reqtfm(areq)));
1321 struct artpec6_crypto_req_common *common = &req_ctx->common;
1322 struct artpec6_crypto *ac = dev_get_drvdata(artpec6_crypto_dev);
1323 enum artpec6_crypto_variant variant = ac->variant;
1324 u32 sel_ctx;
1325 bool ext_ctx = false;
1326 bool run_hw = false;
1327 int error = 0;
1328
1329 artpec6_crypto_init_dma_operation(common);
1330
1331
1332 if (req_ctx->hash_flags & HASH_FLAG_HMAC) {
1333 if (variant == ARTPEC6_CRYPTO) {
1334 req_ctx->key_md = FIELD_PREP(A6_CRY_MD_OPER,
1335 a6_regk_crypto_dlkey);
1336 } else {
1337 req_ctx->key_md = FIELD_PREP(A7_CRY_MD_OPER,
1338 a7_regk_crypto_dlkey);
1339 }
1340
1341
1342 memcpy(req_ctx->key_buffer, ctx->hmac_key,
1343 ctx->hmac_key_length);
1344 memset(req_ctx->key_buffer + ctx->hmac_key_length, 0,
1345 blocksize - ctx->hmac_key_length);
1346
1347 error = artpec6_crypto_setup_out_descr(common,
1348 (void *)&req_ctx->key_md,
1349 sizeof(req_ctx->key_md), false, false);
1350 if (error)
1351 return error;
1352
1353 error = artpec6_crypto_setup_out_descr(common,
1354 req_ctx->key_buffer, blocksize,
1355 true, false);
1356 if (error)
1357 return error;
1358 }
1359
1360 if (!(req_ctx->hash_flags & HASH_FLAG_INIT_CTX)) {
1361
1362 sel_ctx = regk_crypto_ext;
1363 ext_ctx = true;
1364 } else {
1365 sel_ctx = regk_crypto_init;
1366 }
1367
1368 if (variant == ARTPEC6_CRYPTO) {
1369 req_ctx->hash_md &= ~A6_CRY_MD_HASH_SEL_CTX;
1370 req_ctx->hash_md |= FIELD_PREP(A6_CRY_MD_HASH_SEL_CTX, sel_ctx);
1371
1372
1373 if (req_ctx->hash_flags & HASH_FLAG_FINALIZE)
1374 req_ctx->hash_md |= A6_CRY_MD_HASH_HMAC_FIN;
1375 } else {
1376 req_ctx->hash_md &= ~A7_CRY_MD_HASH_SEL_CTX;
1377 req_ctx->hash_md |= FIELD_PREP(A7_CRY_MD_HASH_SEL_CTX, sel_ctx);
1378
1379
1380 if (req_ctx->hash_flags & HASH_FLAG_FINALIZE)
1381 req_ctx->hash_md |= A7_CRY_MD_HASH_HMAC_FIN;
1382 }
1383
1384
1385 error = artpec6_crypto_setup_out_descr(common,
1386 (void *)&req_ctx->hash_md,
1387 sizeof(req_ctx->hash_md), false, false);
1388 if (error)
1389 return error;
1390
1391 error = artpec6_crypto_setup_in_descr(common, ac->pad_buffer, 4, false);
1392 if (error)
1393 return error;
1394
1395 if (ext_ctx) {
1396 error = artpec6_crypto_setup_out_descr(common,
1397 req_ctx->digeststate,
1398 contextsize, false, false);
1399
1400 if (error)
1401 return error;
1402 }
1403
1404 if (req_ctx->hash_flags & HASH_FLAG_UPDATE) {
1405 size_t done_bytes = 0;
1406 size_t total_bytes = areq->nbytes + req_ctx->partial_bytes;
1407 size_t ready_bytes = round_down(total_bytes, blocksize);
1408 struct artpec6_crypto_walk walk;
1409
1410 run_hw = ready_bytes > 0;
1411 if (req_ctx->partial_bytes && ready_bytes) {
1412
1413
1414
1415
1416 memcpy(req_ctx->partial_buffer_out,
1417 req_ctx->partial_buffer,
1418 req_ctx->partial_bytes);
1419
1420 error = artpec6_crypto_setup_out_descr(common,
1421 req_ctx->partial_buffer_out,
1422 req_ctx->partial_bytes,
1423 false, true);
1424 if (error)
1425 return error;
1426
1427
1428 done_bytes += req_ctx->partial_bytes;
1429 req_ctx->partial_bytes = 0;
1430 }
1431
1432 artpec6_crypto_walk_init(&walk, areq->src);
1433
1434 error = artpec6_crypto_setup_sg_descrs_out(common, &walk,
1435 ready_bytes -
1436 done_bytes);
1437 if (error)
1438 return error;
1439
1440 if (walk.sg) {
1441 size_t sg_skip = ready_bytes - done_bytes;
1442 size_t sg_rem = areq->nbytes - sg_skip;
1443
1444 sg_pcopy_to_buffer(areq->src, sg_nents(areq->src),
1445 req_ctx->partial_buffer +
1446 req_ctx->partial_bytes,
1447 sg_rem, sg_skip);
1448
1449 req_ctx->partial_bytes += sg_rem;
1450 }
1451
1452 req_ctx->digcnt += ready_bytes;
1453 req_ctx->hash_flags &= ~(HASH_FLAG_UPDATE);
1454 }
1455
1456
1457 if (req_ctx->hash_flags & HASH_FLAG_FINALIZE) {
1458 bool needtrim = contextsize != digestsize;
1459 size_t hash_pad_len;
1460 u64 digest_bits;
1461 u32 oper;
1462
1463 if (variant == ARTPEC6_CRYPTO)
1464 oper = FIELD_GET(A6_CRY_MD_OPER, req_ctx->hash_md);
1465 else
1466 oper = FIELD_GET(A7_CRY_MD_OPER, req_ctx->hash_md);
1467
1468
1469 if (req_ctx->partial_bytes) {
1470 memcpy(req_ctx->partial_buffer_out,
1471 req_ctx->partial_buffer,
1472 req_ctx->partial_bytes);
1473 error = artpec6_crypto_setup_out_descr(common,
1474 req_ctx->partial_buffer_out,
1475 req_ctx->partial_bytes,
1476 false, true);
1477 if (error)
1478 return error;
1479
1480 req_ctx->digcnt += req_ctx->partial_bytes;
1481 req_ctx->partial_bytes = 0;
1482 }
1483
1484 if (req_ctx->hash_flags & HASH_FLAG_HMAC)
1485 digest_bits = 8 * (req_ctx->digcnt + blocksize);
1486 else
1487 digest_bits = 8 * req_ctx->digcnt;
1488
1489
1490 hash_pad_len = create_hash_pad(oper, req_ctx->pad_buffer,
1491 req_ctx->digcnt, digest_bits);
1492 error = artpec6_crypto_setup_out_descr(common,
1493 req_ctx->pad_buffer,
1494 hash_pad_len, false,
1495 true);
1496 req_ctx->digcnt = 0;
1497
1498 if (error)
1499 return error;
1500
1501
1502 error = artpec6_crypto_setup_in_descr(common, areq->result,
1503 digestsize,
1504 !needtrim);
1505 if (error)
1506 return error;
1507
1508 if (needtrim) {
1509
1510 error = artpec6_crypto_setup_in_descr(common,
1511 req_ctx->partial_buffer,
1512 digestsize - contextsize, true);
1513 if (error)
1514 return error;
1515 }
1516
1517 } else {
1518 if (!run_hw)
1519 return ARTPEC6_CRYPTO_PREPARE_HASH_NO_START;
1520
1521
1522 error = artpec6_crypto_setup_in_descr(common,
1523 req_ctx->digeststate,
1524 contextsize, false);
1525 if (error)
1526 return error;
1527
1528 }
1529
1530 req_ctx->hash_flags &= ~(HASH_FLAG_INIT_CTX | HASH_FLAG_UPDATE |
1531 HASH_FLAG_FINALIZE);
1532
1533 error = artpec6_crypto_terminate_in_descrs(common);
1534 if (error)
1535 return error;
1536
1537 error = artpec6_crypto_terminate_out_descrs(common);
1538 if (error)
1539 return error;
1540
1541 error = artpec6_crypto_dma_map_descs(common);
1542 if (error)
1543 return error;
1544
1545 return ARTPEC6_CRYPTO_PREPARE_HASH_START;
1546}
1547
1548
1549static int artpec6_crypto_aes_ecb_init(struct crypto_skcipher *tfm)
1550{
1551 struct artpec6_cryptotfm_context *ctx = crypto_skcipher_ctx(tfm);
1552
1553 tfm->reqsize = sizeof(struct artpec6_crypto_request_context);
1554 ctx->crypto_type = ARTPEC6_CRYPTO_CIPHER_AES_ECB;
1555
1556 return 0;
1557}
1558
1559static int artpec6_crypto_aes_ctr_init(struct crypto_skcipher *tfm)
1560{
1561 struct artpec6_cryptotfm_context *ctx = crypto_skcipher_ctx(tfm);
1562
1563 ctx->fallback = crypto_alloc_skcipher(crypto_tfm_alg_name(&tfm->base),
1564 0,
1565 CRYPTO_ALG_ASYNC |
1566 CRYPTO_ALG_NEED_FALLBACK);
1567 if (IS_ERR(ctx->fallback))
1568 return PTR_ERR(ctx->fallback);
1569
1570 tfm->reqsize = sizeof(struct artpec6_crypto_request_context);
1571 ctx->crypto_type = ARTPEC6_CRYPTO_CIPHER_AES_CTR;
1572
1573 return 0;
1574}
1575
1576static int artpec6_crypto_aes_cbc_init(struct crypto_skcipher *tfm)
1577{
1578 struct artpec6_cryptotfm_context *ctx = crypto_skcipher_ctx(tfm);
1579
1580 tfm->reqsize = sizeof(struct artpec6_crypto_request_context);
1581 ctx->crypto_type = ARTPEC6_CRYPTO_CIPHER_AES_CBC;
1582
1583 return 0;
1584}
1585
1586static int artpec6_crypto_aes_xts_init(struct crypto_skcipher *tfm)
1587{
1588 struct artpec6_cryptotfm_context *ctx = crypto_skcipher_ctx(tfm);
1589
1590 tfm->reqsize = sizeof(struct artpec6_crypto_request_context);
1591 ctx->crypto_type = ARTPEC6_CRYPTO_CIPHER_AES_XTS;
1592
1593 return 0;
1594}
1595
1596static void artpec6_crypto_aes_exit(struct crypto_skcipher *tfm)
1597{
1598 struct artpec6_cryptotfm_context *ctx = crypto_skcipher_ctx(tfm);
1599
1600 memset(ctx, 0, sizeof(*ctx));
1601}
1602
1603static void artpec6_crypto_aes_ctr_exit(struct crypto_skcipher *tfm)
1604{
1605 struct artpec6_cryptotfm_context *ctx = crypto_skcipher_ctx(tfm);
1606
1607 crypto_free_skcipher(ctx->fallback);
1608 artpec6_crypto_aes_exit(tfm);
1609}
1610
1611static int
1612artpec6_crypto_cipher_set_key(struct crypto_skcipher *cipher, const u8 *key,
1613 unsigned int keylen)
1614{
1615 struct artpec6_cryptotfm_context *ctx =
1616 crypto_skcipher_ctx(cipher);
1617
1618 switch (keylen) {
1619 case 16:
1620 case 24:
1621 case 32:
1622 break;
1623 default:
1624 crypto_skcipher_set_flags(cipher,
1625 CRYPTO_TFM_RES_BAD_KEY_LEN);
1626 return -EINVAL;
1627 }
1628
1629 memcpy(ctx->aes_key, key, keylen);
1630 ctx->key_length = keylen;
1631 return 0;
1632}
1633
1634static int
1635artpec6_crypto_xts_set_key(struct crypto_skcipher *cipher, const u8 *key,
1636 unsigned int keylen)
1637{
1638 struct artpec6_cryptotfm_context *ctx =
1639 crypto_skcipher_ctx(cipher);
1640 int ret;
1641
1642 ret = xts_check_key(&cipher->base, key, keylen);
1643 if (ret)
1644 return ret;
1645
1646 switch (keylen) {
1647 case 32:
1648 case 48:
1649 case 64:
1650 break;
1651 default:
1652 crypto_skcipher_set_flags(cipher,
1653 CRYPTO_TFM_RES_BAD_KEY_LEN);
1654 return -EINVAL;
1655 }
1656
1657 memcpy(ctx->aes_key, key, keylen);
1658 ctx->key_length = keylen;
1659 return 0;
1660}
1661
1662
1663
1664
1665
1666
1667
1668
1669
1670
1671
1672
1673
1674
1675
1676
1677
1678
1679
1680static int artpec6_crypto_prepare_crypto(struct skcipher_request *areq)
1681{
1682 int ret;
1683 struct artpec6_crypto_walk walk;
1684 struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(areq);
1685 struct artpec6_cryptotfm_context *ctx = crypto_skcipher_ctx(cipher);
1686 struct artpec6_crypto_request_context *req_ctx = NULL;
1687 size_t iv_len = crypto_skcipher_ivsize(cipher);
1688 struct artpec6_crypto *ac = dev_get_drvdata(artpec6_crypto_dev);
1689 enum artpec6_crypto_variant variant = ac->variant;
1690 struct artpec6_crypto_req_common *common;
1691 bool cipher_decr = false;
1692 size_t cipher_klen;
1693 u32 cipher_len = 0;
1694 u32 oper;
1695
1696 req_ctx = skcipher_request_ctx(areq);
1697 common = &req_ctx->common;
1698
1699 artpec6_crypto_init_dma_operation(common);
1700
1701 if (variant == ARTPEC6_CRYPTO)
1702 ctx->key_md = FIELD_PREP(A6_CRY_MD_OPER, a6_regk_crypto_dlkey);
1703 else
1704 ctx->key_md = FIELD_PREP(A7_CRY_MD_OPER, a7_regk_crypto_dlkey);
1705
1706 ret = artpec6_crypto_setup_out_descr(common, (void *)&ctx->key_md,
1707 sizeof(ctx->key_md), false, false);
1708 if (ret)
1709 return ret;
1710
1711 ret = artpec6_crypto_setup_out_descr(common, ctx->aes_key,
1712 ctx->key_length, true, false);
1713 if (ret)
1714 return ret;
1715
1716 req_ctx->cipher_md = 0;
1717
1718 if (ctx->crypto_type == ARTPEC6_CRYPTO_CIPHER_AES_XTS)
1719 cipher_klen = ctx->key_length/2;
1720 else
1721 cipher_klen = ctx->key_length;
1722
1723
1724 switch (cipher_klen) {
1725 case 16:
1726 cipher_len = regk_crypto_key_128;
1727 break;
1728 case 24:
1729 cipher_len = regk_crypto_key_192;
1730 break;
1731 case 32:
1732 cipher_len = regk_crypto_key_256;
1733 break;
1734 default:
1735 pr_err("%s: Invalid key length %d!\n",
1736 MODULE_NAME, ctx->key_length);
1737 return -EINVAL;
1738 }
1739
1740 switch (ctx->crypto_type) {
1741 case ARTPEC6_CRYPTO_CIPHER_AES_ECB:
1742 oper = regk_crypto_aes_ecb;
1743 cipher_decr = req_ctx->decrypt;
1744 break;
1745
1746 case ARTPEC6_CRYPTO_CIPHER_AES_CBC:
1747 oper = regk_crypto_aes_cbc;
1748 cipher_decr = req_ctx->decrypt;
1749 break;
1750
1751 case ARTPEC6_CRYPTO_CIPHER_AES_CTR:
1752 oper = regk_crypto_aes_ctr;
1753 cipher_decr = false;
1754 break;
1755
1756 case ARTPEC6_CRYPTO_CIPHER_AES_XTS:
1757 oper = regk_crypto_aes_xts;
1758 cipher_decr = req_ctx->decrypt;
1759
1760 if (variant == ARTPEC6_CRYPTO)
1761 req_ctx->cipher_md |= A6_CRY_MD_CIPHER_DSEQ;
1762 else
1763 req_ctx->cipher_md |= A7_CRY_MD_CIPHER_DSEQ;
1764 break;
1765
1766 default:
1767 pr_err("%s: Invalid cipher mode %d!\n",
1768 MODULE_NAME, ctx->crypto_type);
1769 return -EINVAL;
1770 }
1771
1772 if (variant == ARTPEC6_CRYPTO) {
1773 req_ctx->cipher_md |= FIELD_PREP(A6_CRY_MD_OPER, oper);
1774 req_ctx->cipher_md |= FIELD_PREP(A6_CRY_MD_CIPHER_LEN,
1775 cipher_len);
1776 if (cipher_decr)
1777 req_ctx->cipher_md |= A6_CRY_MD_CIPHER_DECR;
1778 } else {
1779 req_ctx->cipher_md |= FIELD_PREP(A7_CRY_MD_OPER, oper);
1780 req_ctx->cipher_md |= FIELD_PREP(A7_CRY_MD_CIPHER_LEN,
1781 cipher_len);
1782 if (cipher_decr)
1783 req_ctx->cipher_md |= A7_CRY_MD_CIPHER_DECR;
1784 }
1785
1786 ret = artpec6_crypto_setup_out_descr(common,
1787 &req_ctx->cipher_md,
1788 sizeof(req_ctx->cipher_md),
1789 false, false);
1790 if (ret)
1791 return ret;
1792
1793 ret = artpec6_crypto_setup_in_descr(common, ac->pad_buffer, 4, false);
1794 if (ret)
1795 return ret;
1796
1797 if (iv_len) {
1798 ret = artpec6_crypto_setup_out_descr(common, areq->iv, iv_len,
1799 false, false);
1800 if (ret)
1801 return ret;
1802 }
1803
1804 artpec6_crypto_walk_init(&walk, areq->src);
1805 ret = artpec6_crypto_setup_sg_descrs_out(common, &walk, areq->cryptlen);
1806 if (ret)
1807 return ret;
1808
1809
1810 artpec6_crypto_walk_init(&walk, areq->dst);
1811 ret = artpec6_crypto_setup_sg_descrs_in(common, &walk, areq->cryptlen);
1812 if (ret)
1813 return ret;
1814
1815
1816 if (ctx->crypto_type == ARTPEC6_CRYPTO_CIPHER_AES_CTR ||
1817 ctx->crypto_type == ARTPEC6_CRYPTO_CIPHER_AES_XTS) {
1818 size_t pad = ALIGN(areq->cryptlen, AES_BLOCK_SIZE) -
1819 areq->cryptlen;
1820
1821 if (pad) {
1822 ret = artpec6_crypto_setup_out_descr(common,
1823 ac->pad_buffer,
1824 pad, false, false);
1825 if (ret)
1826 return ret;
1827
1828 ret = artpec6_crypto_setup_in_descr(common,
1829 ac->pad_buffer, pad,
1830 false);
1831 if (ret)
1832 return ret;
1833 }
1834 }
1835
1836 ret = artpec6_crypto_terminate_out_descrs(common);
1837 if (ret)
1838 return ret;
1839
1840 ret = artpec6_crypto_terminate_in_descrs(common);
1841 if (ret)
1842 return ret;
1843
1844 return artpec6_crypto_dma_map_descs(common);
1845}
1846
1847static int artpec6_crypto_prepare_aead(struct aead_request *areq)
1848{
1849 size_t count;
1850 int ret;
1851 size_t input_length;
1852 struct artpec6_cryptotfm_context *ctx = crypto_tfm_ctx(areq->base.tfm);
1853 struct artpec6_crypto_aead_req_ctx *req_ctx = aead_request_ctx(areq);
1854 struct crypto_aead *cipher = crypto_aead_reqtfm(areq);
1855 struct artpec6_crypto_req_common *common = &req_ctx->common;
1856 struct artpec6_crypto *ac = dev_get_drvdata(artpec6_crypto_dev);
1857 enum artpec6_crypto_variant variant = ac->variant;
1858 u32 md_cipher_len;
1859
1860 artpec6_crypto_init_dma_operation(common);
1861
1862
1863 if (variant == ARTPEC6_CRYPTO) {
1864 ctx->key_md = FIELD_PREP(A6_CRY_MD_OPER,
1865 a6_regk_crypto_dlkey);
1866 } else {
1867 ctx->key_md = FIELD_PREP(A7_CRY_MD_OPER,
1868 a7_regk_crypto_dlkey);
1869 }
1870 ret = artpec6_crypto_setup_out_descr(common, (void *)&ctx->key_md,
1871 sizeof(ctx->key_md), false, false);
1872 if (ret)
1873 return ret;
1874
1875 ret = artpec6_crypto_setup_out_descr(common, ctx->aes_key,
1876 ctx->key_length, true, false);
1877 if (ret)
1878 return ret;
1879
1880 req_ctx->cipher_md = 0;
1881
1882 switch (ctx->key_length) {
1883 case 16:
1884 md_cipher_len = regk_crypto_key_128;
1885 break;
1886 case 24:
1887 md_cipher_len = regk_crypto_key_192;
1888 break;
1889 case 32:
1890 md_cipher_len = regk_crypto_key_256;
1891 break;
1892 default:
1893 return -EINVAL;
1894 }
1895
1896 if (variant == ARTPEC6_CRYPTO) {
1897 req_ctx->cipher_md |= FIELD_PREP(A6_CRY_MD_OPER,
1898 regk_crypto_aes_gcm);
1899 req_ctx->cipher_md |= FIELD_PREP(A6_CRY_MD_CIPHER_LEN,
1900 md_cipher_len);
1901 if (req_ctx->decrypt)
1902 req_ctx->cipher_md |= A6_CRY_MD_CIPHER_DECR;
1903 } else {
1904 req_ctx->cipher_md |= FIELD_PREP(A7_CRY_MD_OPER,
1905 regk_crypto_aes_gcm);
1906 req_ctx->cipher_md |= FIELD_PREP(A7_CRY_MD_CIPHER_LEN,
1907 md_cipher_len);
1908 if (req_ctx->decrypt)
1909 req_ctx->cipher_md |= A7_CRY_MD_CIPHER_DECR;
1910 }
1911
1912 ret = artpec6_crypto_setup_out_descr(common,
1913 (void *) &req_ctx->cipher_md,
1914 sizeof(req_ctx->cipher_md), false,
1915 false);
1916 if (ret)
1917 return ret;
1918
1919 ret = artpec6_crypto_setup_in_descr(common, ac->pad_buffer, 4, false);
1920 if (ret)
1921 return ret;
1922
1923
1924 input_length = areq->cryptlen;
1925 if (req_ctx->decrypt)
1926 input_length -= AES_BLOCK_SIZE;
1927
1928
1929 req_ctx->hw_ctx.aad_length_bits =
1930 __cpu_to_be64(8*areq->assoclen);
1931
1932 req_ctx->hw_ctx.text_length_bits =
1933 __cpu_to_be64(8*input_length);
1934
1935 memcpy(req_ctx->hw_ctx.J0, areq->iv, crypto_aead_ivsize(cipher));
1936
1937 crypto_inc(req_ctx->hw_ctx.J0+12, 4);
1938
1939 ret = artpec6_crypto_setup_out_descr(common, &req_ctx->hw_ctx,
1940 sizeof(struct artpec6_crypto_aead_hw_ctx), false, false);
1941 if (ret)
1942 return ret;
1943
1944 {
1945 struct artpec6_crypto_walk walk;
1946
1947 artpec6_crypto_walk_init(&walk, areq->src);
1948
1949
1950 count = areq->assoclen;
1951 ret = artpec6_crypto_setup_sg_descrs_out(common, &walk, count);
1952 if (ret)
1953 return ret;
1954
1955 if (!IS_ALIGNED(areq->assoclen, 16)) {
1956 size_t assoc_pad = 16 - (areq->assoclen % 16);
1957
1958 ret = artpec6_crypto_setup_out_descr(common,
1959 ac->zero_buffer,
1960 assoc_pad, false,
1961 false);
1962 if (ret)
1963 return ret;
1964 }
1965
1966
1967 count = input_length;
1968 ret = artpec6_crypto_setup_sg_descrs_out(common, &walk, count);
1969 if (ret)
1970 return ret;
1971
1972 if (!IS_ALIGNED(input_length, 16)) {
1973 size_t crypto_pad = 16 - (input_length % 16);
1974
1975 ret = artpec6_crypto_setup_out_descr(common,
1976 ac->zero_buffer,
1977 crypto_pad,
1978 false,
1979 false);
1980 if (ret)
1981 return ret;
1982 }
1983 }
1984
1985
1986 {
1987 struct artpec6_crypto_walk walk;
1988 size_t output_len = areq->cryptlen;
1989
1990 if (req_ctx->decrypt)
1991 output_len -= AES_BLOCK_SIZE;
1992
1993 artpec6_crypto_walk_init(&walk, areq->dst);
1994
1995
1996 count = artpec6_crypto_walk_advance(&walk, areq->assoclen);
1997 if (count)
1998 return -EINVAL;
1999
2000 count = output_len;
2001 ret = artpec6_crypto_setup_sg_descrs_in(common, &walk, count);
2002 if (ret)
2003 return ret;
2004
2005
2006 if (!IS_ALIGNED(output_len, 16)) {
2007 size_t crypto_pad = 16 - (output_len % 16);
2008
2009 ret = artpec6_crypto_setup_in_descr(common,
2010 ac->pad_buffer,
2011 crypto_pad, false);
2012 if (ret)
2013 return ret;
2014 }
2015
2016
2017
2018
2019
2020 count = AES_BLOCK_SIZE;
2021
2022 if (req_ctx->decrypt) {
2023 ret = artpec6_crypto_setup_in_descr(common,
2024 req_ctx->decryption_tag, count, false);
2025 if (ret)
2026 return ret;
2027
2028 } else {
2029 ret = artpec6_crypto_setup_sg_descrs_in(common, &walk,
2030 count);
2031 if (ret)
2032 return ret;
2033 }
2034
2035 }
2036
2037 ret = artpec6_crypto_terminate_in_descrs(common);
2038 if (ret)
2039 return ret;
2040
2041 ret = artpec6_crypto_terminate_out_descrs(common);
2042 if (ret)
2043 return ret;
2044
2045 return artpec6_crypto_dma_map_descs(common);
2046}
2047
2048static void artpec6_crypto_process_queue(struct artpec6_crypto *ac)
2049{
2050 struct artpec6_crypto_req_common *req;
2051
2052 while (!list_empty(&ac->queue) && !artpec6_crypto_busy()) {
2053 req = list_first_entry(&ac->queue,
2054 struct artpec6_crypto_req_common,
2055 list);
2056 list_move_tail(&req->list, &ac->pending);
2057 artpec6_crypto_start_dma(req);
2058
2059 req->req->complete(req->req, -EINPROGRESS);
2060 }
2061
2062
2063
2064
2065
2066
2067
2068
2069 if (ac->pending_count)
2070 mod_timer(&ac->timer, jiffies + msecs_to_jiffies(100));
2071 else
2072 del_timer(&ac->timer);
2073}
2074
2075static void artpec6_crypto_timeout(unsigned long data)
2076{
2077 struct artpec6_crypto *ac = (struct artpec6_crypto *) data;
2078
2079 dev_info_ratelimited(artpec6_crypto_dev, "timeout\n");
2080
2081 tasklet_schedule(&ac->task);
2082}
2083
2084static void artpec6_crypto_task(unsigned long data)
2085{
2086 struct artpec6_crypto *ac = (struct artpec6_crypto *)data;
2087 struct artpec6_crypto_req_common *req;
2088 struct artpec6_crypto_req_common *n;
2089
2090 if (list_empty(&ac->pending)) {
2091 pr_debug("Spurious IRQ\n");
2092 return;
2093 }
2094
2095 spin_lock_bh(&ac->queue_lock);
2096
2097 list_for_each_entry_safe(req, n, &ac->pending, list) {
2098 struct artpec6_crypto_dma_descriptors *dma = req->dma;
2099 u32 stat;
2100
2101 dma_sync_single_for_cpu(artpec6_crypto_dev, dma->stat_dma_addr,
2102 sizeof(dma->stat[0]),
2103 DMA_BIDIRECTIONAL);
2104
2105 stat = req->dma->stat[req->dma->in_cnt-1];
2106
2107
2108
2109
2110 pr_debug("Request %p status is %X\n", req, stat);
2111 if (!stat)
2112 break;
2113
2114
2115#ifdef CONFIG_FAULT_INJECTION
2116 if (should_fail(&artpec6_crypto_fail_status_read, 1))
2117 continue;
2118#endif
2119
2120 pr_debug("Completing request %p\n", req);
2121
2122 list_del(&req->list);
2123
2124 artpec6_crypto_dma_unmap_all(req);
2125 artpec6_crypto_copy_bounce_buffers(req);
2126
2127 ac->pending_count--;
2128 artpec6_crypto_common_destroy(req);
2129 req->complete(req->req);
2130 }
2131
2132 artpec6_crypto_process_queue(ac);
2133
2134 spin_unlock_bh(&ac->queue_lock);
2135}
2136
2137static void artpec6_crypto_complete_crypto(struct crypto_async_request *req)
2138{
2139 req->complete(req, 0);
2140}
2141
2142static void
2143artpec6_crypto_complete_cbc_decrypt(struct crypto_async_request *req)
2144{
2145 struct skcipher_request *cipher_req = container_of(req,
2146 struct skcipher_request, base);
2147
2148 scatterwalk_map_and_copy(cipher_req->iv, cipher_req->src,
2149 cipher_req->cryptlen - AES_BLOCK_SIZE,
2150 AES_BLOCK_SIZE, 0);
2151 req->complete(req, 0);
2152}
2153
2154static void
2155artpec6_crypto_complete_cbc_encrypt(struct crypto_async_request *req)
2156{
2157 struct skcipher_request *cipher_req = container_of(req,
2158 struct skcipher_request, base);
2159
2160 scatterwalk_map_and_copy(cipher_req->iv, cipher_req->dst,
2161 cipher_req->cryptlen - AES_BLOCK_SIZE,
2162 AES_BLOCK_SIZE, 0);
2163 req->complete(req, 0);
2164}
2165
2166static void artpec6_crypto_complete_aead(struct crypto_async_request *req)
2167{
2168 int result = 0;
2169
2170
2171 struct aead_request *areq = container_of(req,
2172 struct aead_request, base);
2173 struct artpec6_crypto_aead_req_ctx *req_ctx = aead_request_ctx(areq);
2174
2175 if (req_ctx->decrypt) {
2176 u8 input_tag[AES_BLOCK_SIZE];
2177
2178 sg_pcopy_to_buffer(areq->src,
2179 sg_nents(areq->src),
2180 input_tag,
2181 AES_BLOCK_SIZE,
2182 areq->assoclen + areq->cryptlen -
2183 AES_BLOCK_SIZE);
2184
2185 if (memcmp(req_ctx->decryption_tag,
2186 input_tag,
2187 AES_BLOCK_SIZE)) {
2188 pr_debug("***EBADMSG:\n");
2189 print_hex_dump_debug("ref:", DUMP_PREFIX_ADDRESS, 32, 1,
2190 input_tag, AES_BLOCK_SIZE, true);
2191 print_hex_dump_debug("out:", DUMP_PREFIX_ADDRESS, 32, 1,
2192 req_ctx->decryption_tag,
2193 AES_BLOCK_SIZE, true);
2194
2195 result = -EBADMSG;
2196 }
2197 }
2198
2199 req->complete(req, result);
2200}
2201
2202static void artpec6_crypto_complete_hash(struct crypto_async_request *req)
2203{
2204 req->complete(req, 0);
2205}
2206
2207
2208
2209static int
2210artpec6_crypto_hash_set_key(struct crypto_ahash *tfm,
2211 const u8 *key, unsigned int keylen)
2212{
2213 struct artpec6_hashalg_context *tfm_ctx = crypto_tfm_ctx(&tfm->base);
2214 size_t blocksize;
2215 int ret;
2216
2217 if (!keylen) {
2218 pr_err("Invalid length (%d) of HMAC key\n",
2219 keylen);
2220 return -EINVAL;
2221 }
2222
2223 memset(tfm_ctx->hmac_key, 0, sizeof(tfm_ctx->hmac_key));
2224
2225 blocksize = crypto_tfm_alg_blocksize(crypto_ahash_tfm(tfm));
2226
2227 if (keylen > blocksize) {
2228 SHASH_DESC_ON_STACK(hdesc, tfm_ctx->child_hash);
2229
2230 hdesc->tfm = tfm_ctx->child_hash;
2231 hdesc->flags = crypto_ahash_get_flags(tfm) &
2232 CRYPTO_TFM_REQ_MAY_SLEEP;
2233
2234 tfm_ctx->hmac_key_length = blocksize;
2235 ret = crypto_shash_digest(hdesc, key, keylen,
2236 tfm_ctx->hmac_key);
2237 if (ret)
2238 return ret;
2239
2240 } else {
2241 memcpy(tfm_ctx->hmac_key, key, keylen);
2242 tfm_ctx->hmac_key_length = keylen;
2243 }
2244
2245 return 0;
2246}
2247
2248static int
2249artpec6_crypto_init_hash(struct ahash_request *req, u8 type, int hmac)
2250{
2251 struct artpec6_crypto *ac = dev_get_drvdata(artpec6_crypto_dev);
2252 enum artpec6_crypto_variant variant = ac->variant;
2253 struct artpec6_hash_request_context *req_ctx = ahash_request_ctx(req);
2254 u32 oper;
2255
2256 memset(req_ctx, 0, sizeof(*req_ctx));
2257
2258 req_ctx->hash_flags = HASH_FLAG_INIT_CTX;
2259 if (hmac)
2260 req_ctx->hash_flags |= (HASH_FLAG_HMAC | HASH_FLAG_UPDATE_KEY);
2261
2262 switch (type) {
2263 case ARTPEC6_CRYPTO_HASH_SHA1:
2264 oper = hmac ? regk_crypto_hmac_sha1 : regk_crypto_sha1;
2265 break;
2266 case ARTPEC6_CRYPTO_HASH_SHA256:
2267 oper = hmac ? regk_crypto_hmac_sha256 : regk_crypto_sha256;
2268 break;
2269 case ARTPEC6_CRYPTO_HASH_SHA384:
2270 oper = hmac ? regk_crypto_hmac_sha384 : regk_crypto_sha384;
2271 break;
2272 case ARTPEC6_CRYPTO_HASH_SHA512:
2273 oper = hmac ? regk_crypto_hmac_sha512 : regk_crypto_sha512;
2274 break;
2275
2276 default:
2277 pr_err("%s: Unsupported hash type 0x%x\n", MODULE_NAME, type);
2278 return -EINVAL;
2279 }
2280
2281 if (variant == ARTPEC6_CRYPTO)
2282 req_ctx->hash_md = FIELD_PREP(A6_CRY_MD_OPER, oper);
2283 else
2284 req_ctx->hash_md = FIELD_PREP(A7_CRY_MD_OPER, oper);
2285
2286 return 0;
2287}
2288
2289static int artpec6_crypto_prepare_submit_hash(struct ahash_request *req)
2290{
2291 struct artpec6_hash_request_context *req_ctx = ahash_request_ctx(req);
2292 int ret;
2293
2294 if (!req_ctx->common.dma) {
2295 ret = artpec6_crypto_common_init(&req_ctx->common,
2296 &req->base,
2297 artpec6_crypto_complete_hash,
2298 NULL, 0);
2299
2300 if (ret)
2301 return ret;
2302 }
2303
2304 ret = artpec6_crypto_prepare_hash(req);
2305 switch (ret) {
2306 case ARTPEC6_CRYPTO_PREPARE_HASH_START:
2307 ret = artpec6_crypto_submit(&req_ctx->common);
2308 break;
2309
2310 case ARTPEC6_CRYPTO_PREPARE_HASH_NO_START:
2311 ret = 0;
2312
2313
2314 default:
2315 artpec6_crypto_common_destroy(&req_ctx->common);
2316 break;
2317 }
2318
2319 return ret;
2320}
2321
2322static int artpec6_crypto_hash_final(struct ahash_request *req)
2323{
2324 struct artpec6_hash_request_context *req_ctx = ahash_request_ctx(req);
2325
2326 req_ctx->hash_flags |= HASH_FLAG_FINALIZE;
2327
2328 return artpec6_crypto_prepare_submit_hash(req);
2329}
2330
2331static int artpec6_crypto_hash_update(struct ahash_request *req)
2332{
2333 struct artpec6_hash_request_context *req_ctx = ahash_request_ctx(req);
2334
2335 req_ctx->hash_flags |= HASH_FLAG_UPDATE;
2336
2337 return artpec6_crypto_prepare_submit_hash(req);
2338}
2339
2340static int artpec6_crypto_sha1_init(struct ahash_request *req)
2341{
2342 return artpec6_crypto_init_hash(req, ARTPEC6_CRYPTO_HASH_SHA1, 0);
2343}
2344
2345static int artpec6_crypto_sha1_digest(struct ahash_request *req)
2346{
2347 struct artpec6_hash_request_context *req_ctx = ahash_request_ctx(req);
2348
2349 artpec6_crypto_init_hash(req, ARTPEC6_CRYPTO_HASH_SHA1, 0);
2350
2351 req_ctx->hash_flags |= HASH_FLAG_UPDATE | HASH_FLAG_FINALIZE;
2352
2353 return artpec6_crypto_prepare_submit_hash(req);
2354}
2355
2356static int artpec6_crypto_sha256_init(struct ahash_request *req)
2357{
2358 return artpec6_crypto_init_hash(req, ARTPEC6_CRYPTO_HASH_SHA256, 0);
2359}
2360
2361static int artpec6_crypto_sha256_digest(struct ahash_request *req)
2362{
2363 struct artpec6_hash_request_context *req_ctx = ahash_request_ctx(req);
2364
2365 artpec6_crypto_init_hash(req, ARTPEC6_CRYPTO_HASH_SHA256, 0);
2366 req_ctx->hash_flags |= HASH_FLAG_UPDATE | HASH_FLAG_FINALIZE;
2367
2368 return artpec6_crypto_prepare_submit_hash(req);
2369}
2370
2371static int __maybe_unused artpec6_crypto_sha384_init(struct ahash_request *req)
2372{
2373 return artpec6_crypto_init_hash(req, ARTPEC6_CRYPTO_HASH_SHA384, 0);
2374}
2375
2376static int __maybe_unused
2377artpec6_crypto_sha384_digest(struct ahash_request *req)
2378{
2379 struct artpec6_hash_request_context *req_ctx = ahash_request_ctx(req);
2380
2381 artpec6_crypto_init_hash(req, ARTPEC6_CRYPTO_HASH_SHA384, 0);
2382 req_ctx->hash_flags |= HASH_FLAG_UPDATE | HASH_FLAG_FINALIZE;
2383
2384 return artpec6_crypto_prepare_submit_hash(req);
2385}
2386
2387static int artpec6_crypto_sha512_init(struct ahash_request *req)
2388{
2389 return artpec6_crypto_init_hash(req, ARTPEC6_CRYPTO_HASH_SHA512, 0);
2390}
2391
2392static int artpec6_crypto_sha512_digest(struct ahash_request *req)
2393{
2394 struct artpec6_hash_request_context *req_ctx = ahash_request_ctx(req);
2395
2396 artpec6_crypto_init_hash(req, ARTPEC6_CRYPTO_HASH_SHA512, 0);
2397 req_ctx->hash_flags |= HASH_FLAG_UPDATE | HASH_FLAG_FINALIZE;
2398
2399 return artpec6_crypto_prepare_submit_hash(req);
2400}
2401
2402static int artpec6_crypto_hmac_sha256_init(struct ahash_request *req)
2403{
2404 return artpec6_crypto_init_hash(req, ARTPEC6_CRYPTO_HASH_SHA256, 1);
2405}
2406
2407static int __maybe_unused
2408artpec6_crypto_hmac_sha384_init(struct ahash_request *req)
2409{
2410 return artpec6_crypto_init_hash(req, ARTPEC6_CRYPTO_HASH_SHA384, 1);
2411}
2412
2413static int artpec6_crypto_hmac_sha512_init(struct ahash_request *req)
2414{
2415 return artpec6_crypto_init_hash(req, ARTPEC6_CRYPTO_HASH_SHA512, 1);
2416}
2417
2418static int artpec6_crypto_hmac_sha256_digest(struct ahash_request *req)
2419{
2420 struct artpec6_hash_request_context *req_ctx = ahash_request_ctx(req);
2421
2422 artpec6_crypto_init_hash(req, ARTPEC6_CRYPTO_HASH_SHA256, 1);
2423 req_ctx->hash_flags |= HASH_FLAG_UPDATE | HASH_FLAG_FINALIZE;
2424
2425 return artpec6_crypto_prepare_submit_hash(req);
2426}
2427
2428static int __maybe_unused
2429artpec6_crypto_hmac_sha384_digest(struct ahash_request *req)
2430{
2431 struct artpec6_hash_request_context *req_ctx = ahash_request_ctx(req);
2432
2433 artpec6_crypto_init_hash(req, ARTPEC6_CRYPTO_HASH_SHA384, 1);
2434 req_ctx->hash_flags |= HASH_FLAG_UPDATE | HASH_FLAG_FINALIZE;
2435
2436 return artpec6_crypto_prepare_submit_hash(req);
2437}
2438
2439static int artpec6_crypto_hmac_sha512_digest(struct ahash_request *req)
2440{
2441 struct artpec6_hash_request_context *req_ctx = ahash_request_ctx(req);
2442
2443 artpec6_crypto_init_hash(req, ARTPEC6_CRYPTO_HASH_SHA512, 1);
2444 req_ctx->hash_flags |= HASH_FLAG_UPDATE | HASH_FLAG_FINALIZE;
2445
2446 return artpec6_crypto_prepare_submit_hash(req);
2447}
2448
2449static int artpec6_crypto_ahash_init_common(struct crypto_tfm *tfm,
2450 const char *base_hash_name)
2451{
2452 struct artpec6_hashalg_context *tfm_ctx = crypto_tfm_ctx(tfm);
2453
2454 crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm),
2455 sizeof(struct artpec6_hash_request_context));
2456 memset(tfm_ctx, 0, sizeof(*tfm_ctx));
2457
2458 if (base_hash_name) {
2459 struct crypto_shash *child;
2460
2461 child = crypto_alloc_shash(base_hash_name, 0,
2462 CRYPTO_ALG_NEED_FALLBACK);
2463
2464 if (IS_ERR(child))
2465 return PTR_ERR(child);
2466
2467 tfm_ctx->child_hash = child;
2468 }
2469
2470 return 0;
2471}
2472
2473static int artpec6_crypto_ahash_init(struct crypto_tfm *tfm)
2474{
2475 return artpec6_crypto_ahash_init_common(tfm, NULL);
2476}
2477
2478static int artpec6_crypto_ahash_init_hmac_sha256(struct crypto_tfm *tfm)
2479{
2480 return artpec6_crypto_ahash_init_common(tfm, "sha256");
2481}
2482
2483static int __maybe_unused
2484artpec6_crypto_ahash_init_hmac_sha384(struct crypto_tfm *tfm)
2485{
2486 return artpec6_crypto_ahash_init_common(tfm, "sha384");
2487}
2488
2489static int artpec6_crypto_ahash_init_hmac_sha512(struct crypto_tfm *tfm)
2490{
2491 return artpec6_crypto_ahash_init_common(tfm, "sha512");
2492}
2493
2494static void artpec6_crypto_ahash_exit(struct crypto_tfm *tfm)
2495{
2496 struct artpec6_hashalg_context *tfm_ctx = crypto_tfm_ctx(tfm);
2497
2498 if (tfm_ctx->child_hash)
2499 crypto_free_shash(tfm_ctx->child_hash);
2500
2501 memset(tfm_ctx->hmac_key, 0, sizeof(tfm_ctx->hmac_key));
2502 tfm_ctx->hmac_key_length = 0;
2503}
2504
2505static int artpec6_crypto_hash_export(struct ahash_request *req, void *out)
2506{
2507 const struct artpec6_hash_request_context *ctx = ahash_request_ctx(req);
2508 struct artpec6_hash_export_state *state = out;
2509 struct artpec6_crypto *ac = dev_get_drvdata(artpec6_crypto_dev);
2510 enum artpec6_crypto_variant variant = ac->variant;
2511
2512 BUILD_BUG_ON(sizeof(state->partial_buffer) !=
2513 sizeof(ctx->partial_buffer));
2514 BUILD_BUG_ON(sizeof(state->digeststate) != sizeof(ctx->digeststate));
2515
2516 state->digcnt = ctx->digcnt;
2517 state->partial_bytes = ctx->partial_bytes;
2518 state->hash_flags = ctx->hash_flags;
2519
2520 if (variant == ARTPEC6_CRYPTO)
2521 state->oper = FIELD_GET(A6_CRY_MD_OPER, ctx->hash_md);
2522 else
2523 state->oper = FIELD_GET(A7_CRY_MD_OPER, ctx->hash_md);
2524
2525 memcpy(state->partial_buffer, ctx->partial_buffer,
2526 sizeof(state->partial_buffer));
2527 memcpy(state->digeststate, ctx->digeststate,
2528 sizeof(state->digeststate));
2529
2530 return 0;
2531}
2532
2533static int artpec6_crypto_hash_import(struct ahash_request *req, const void *in)
2534{
2535 struct artpec6_hash_request_context *ctx = ahash_request_ctx(req);
2536 const struct artpec6_hash_export_state *state = in;
2537 struct artpec6_crypto *ac = dev_get_drvdata(artpec6_crypto_dev);
2538 enum artpec6_crypto_variant variant = ac->variant;
2539
2540 memset(ctx, 0, sizeof(*ctx));
2541
2542 ctx->digcnt = state->digcnt;
2543 ctx->partial_bytes = state->partial_bytes;
2544 ctx->hash_flags = state->hash_flags;
2545
2546 if (variant == ARTPEC6_CRYPTO)
2547 ctx->hash_md = FIELD_PREP(A6_CRY_MD_OPER, state->oper);
2548 else
2549 ctx->hash_md = FIELD_PREP(A7_CRY_MD_OPER, state->oper);
2550
2551 memcpy(ctx->partial_buffer, state->partial_buffer,
2552 sizeof(state->partial_buffer));
2553 memcpy(ctx->digeststate, state->digeststate,
2554 sizeof(state->digeststate));
2555
2556 return 0;
2557}
2558
2559static int init_crypto_hw(struct artpec6_crypto *ac)
2560{
2561 enum artpec6_crypto_variant variant = ac->variant;
2562 void __iomem *base = ac->base;
2563 u32 out_descr_buf_size;
2564 u32 out_data_buf_size;
2565 u32 in_data_buf_size;
2566 u32 in_descr_buf_size;
2567 u32 in_stat_buf_size;
2568 u32 in, out;
2569
2570
2571
2572
2573
2574
2575
2576
2577 out_data_buf_size = 16;
2578 out_descr_buf_size = 15;
2579 in_data_buf_size = 8;
2580 in_descr_buf_size = 4;
2581 in_stat_buf_size = 4;
2582
2583 BUILD_BUG_ON_MSG((out_data_buf_size
2584 + out_descr_buf_size) * 64 > 1984,
2585 "Invalid OUT configuration");
2586
2587 BUILD_BUG_ON_MSG((in_data_buf_size
2588 + in_descr_buf_size
2589 + in_stat_buf_size) * 64 > 1024,
2590 "Invalid IN configuration");
2591
2592 in = FIELD_PREP(PDMA_IN_BUF_CFG_DATA_BUF_SIZE, in_data_buf_size) |
2593 FIELD_PREP(PDMA_IN_BUF_CFG_DESCR_BUF_SIZE, in_descr_buf_size) |
2594 FIELD_PREP(PDMA_IN_BUF_CFG_STAT_BUF_SIZE, in_stat_buf_size);
2595
2596 out = FIELD_PREP(PDMA_OUT_BUF_CFG_DATA_BUF_SIZE, out_data_buf_size) |
2597 FIELD_PREP(PDMA_OUT_BUF_CFG_DESCR_BUF_SIZE, out_descr_buf_size);
2598
2599 writel_relaxed(out, base + PDMA_OUT_BUF_CFG);
2600 writel_relaxed(PDMA_OUT_CFG_EN, base + PDMA_OUT_CFG);
2601
2602 if (variant == ARTPEC6_CRYPTO) {
2603 writel_relaxed(in, base + A6_PDMA_IN_BUF_CFG);
2604 writel_relaxed(PDMA_IN_CFG_EN, base + A6_PDMA_IN_CFG);
2605 writel_relaxed(A6_PDMA_INTR_MASK_IN_DATA |
2606 A6_PDMA_INTR_MASK_IN_EOP_FLUSH,
2607 base + A6_PDMA_INTR_MASK);
2608 } else {
2609 writel_relaxed(in, base + A7_PDMA_IN_BUF_CFG);
2610 writel_relaxed(PDMA_IN_CFG_EN, base + A7_PDMA_IN_CFG);
2611 writel_relaxed(A7_PDMA_INTR_MASK_IN_DATA |
2612 A7_PDMA_INTR_MASK_IN_EOP_FLUSH,
2613 base + A7_PDMA_INTR_MASK);
2614 }
2615
2616 return 0;
2617}
2618
2619static void artpec6_crypto_disable_hw(struct artpec6_crypto *ac)
2620{
2621 enum artpec6_crypto_variant variant = ac->variant;
2622 void __iomem *base = ac->base;
2623
2624 if (variant == ARTPEC6_CRYPTO) {
2625 writel_relaxed(A6_PDMA_IN_CMD_STOP, base + A6_PDMA_IN_CMD);
2626 writel_relaxed(0, base + A6_PDMA_IN_CFG);
2627 writel_relaxed(A6_PDMA_OUT_CMD_STOP, base + PDMA_OUT_CMD);
2628 } else {
2629 writel_relaxed(A7_PDMA_IN_CMD_STOP, base + A7_PDMA_IN_CMD);
2630 writel_relaxed(0, base + A7_PDMA_IN_CFG);
2631 writel_relaxed(A7_PDMA_OUT_CMD_STOP, base + PDMA_OUT_CMD);
2632 }
2633
2634 writel_relaxed(0, base + PDMA_OUT_CFG);
2635
2636}
2637
2638static irqreturn_t artpec6_crypto_irq(int irq, void *dev_id)
2639{
2640 struct artpec6_crypto *ac = dev_id;
2641 enum artpec6_crypto_variant variant = ac->variant;
2642 void __iomem *base = ac->base;
2643 u32 mask_in_data, mask_in_eop_flush;
2644 u32 in_cmd_flush_stat, in_cmd_reg;
2645 u32 ack_intr_reg;
2646 u32 ack = 0;
2647 u32 intr;
2648
2649 if (variant == ARTPEC6_CRYPTO) {
2650 intr = readl_relaxed(base + A6_PDMA_MASKED_INTR);
2651 mask_in_data = A6_PDMA_INTR_MASK_IN_DATA;
2652 mask_in_eop_flush = A6_PDMA_INTR_MASK_IN_EOP_FLUSH;
2653 in_cmd_flush_stat = A6_PDMA_IN_CMD_FLUSH_STAT;
2654 in_cmd_reg = A6_PDMA_IN_CMD;
2655 ack_intr_reg = A6_PDMA_ACK_INTR;
2656 } else {
2657 intr = readl_relaxed(base + A7_PDMA_MASKED_INTR);
2658 mask_in_data = A7_PDMA_INTR_MASK_IN_DATA;
2659 mask_in_eop_flush = A7_PDMA_INTR_MASK_IN_EOP_FLUSH;
2660 in_cmd_flush_stat = A7_PDMA_IN_CMD_FLUSH_STAT;
2661 in_cmd_reg = A7_PDMA_IN_CMD;
2662 ack_intr_reg = A7_PDMA_ACK_INTR;
2663 }
2664
2665
2666
2667
2668
2669
2670
2671
2672 if (intr & mask_in_data)
2673 ack |= mask_in_data;
2674
2675 if (intr & mask_in_eop_flush)
2676 ack |= mask_in_eop_flush;
2677 else
2678 writel_relaxed(in_cmd_flush_stat, base + in_cmd_reg);
2679
2680 writel_relaxed(ack, base + ack_intr_reg);
2681
2682 if (intr & mask_in_eop_flush)
2683 tasklet_schedule(&ac->task);
2684
2685 return IRQ_HANDLED;
2686}
2687
2688
2689
2690
2691static struct ahash_alg hash_algos[] = {
2692
2693 {
2694 .init = artpec6_crypto_sha1_init,
2695 .update = artpec6_crypto_hash_update,
2696 .final = artpec6_crypto_hash_final,
2697 .digest = artpec6_crypto_sha1_digest,
2698 .import = artpec6_crypto_hash_import,
2699 .export = artpec6_crypto_hash_export,
2700 .halg.digestsize = SHA1_DIGEST_SIZE,
2701 .halg.statesize = sizeof(struct artpec6_hash_export_state),
2702 .halg.base = {
2703 .cra_name = "sha1",
2704 .cra_driver_name = "artpec-sha1",
2705 .cra_priority = 300,
2706 .cra_flags = CRYPTO_ALG_TYPE_AHASH | CRYPTO_ALG_ASYNC,
2707 .cra_blocksize = SHA1_BLOCK_SIZE,
2708 .cra_ctxsize = sizeof(struct artpec6_hashalg_context),
2709 .cra_alignmask = 3,
2710 .cra_module = THIS_MODULE,
2711 .cra_init = artpec6_crypto_ahash_init,
2712 .cra_exit = artpec6_crypto_ahash_exit,
2713 }
2714 },
2715
2716 {
2717 .init = artpec6_crypto_sha256_init,
2718 .update = artpec6_crypto_hash_update,
2719 .final = artpec6_crypto_hash_final,
2720 .digest = artpec6_crypto_sha256_digest,
2721 .import = artpec6_crypto_hash_import,
2722 .export = artpec6_crypto_hash_export,
2723 .halg.digestsize = SHA256_DIGEST_SIZE,
2724 .halg.statesize = sizeof(struct artpec6_hash_export_state),
2725 .halg.base = {
2726 .cra_name = "sha256",
2727 .cra_driver_name = "artpec-sha256",
2728 .cra_priority = 300,
2729 .cra_flags = CRYPTO_ALG_TYPE_AHASH | CRYPTO_ALG_ASYNC,
2730 .cra_blocksize = SHA256_BLOCK_SIZE,
2731 .cra_ctxsize = sizeof(struct artpec6_hashalg_context),
2732 .cra_alignmask = 3,
2733 .cra_module = THIS_MODULE,
2734 .cra_init = artpec6_crypto_ahash_init,
2735 .cra_exit = artpec6_crypto_ahash_exit,
2736 }
2737 },
2738
2739 {
2740 .init = artpec6_crypto_hmac_sha256_init,
2741 .update = artpec6_crypto_hash_update,
2742 .final = artpec6_crypto_hash_final,
2743 .digest = artpec6_crypto_hmac_sha256_digest,
2744 .import = artpec6_crypto_hash_import,
2745 .export = artpec6_crypto_hash_export,
2746 .setkey = artpec6_crypto_hash_set_key,
2747 .halg.digestsize = SHA256_DIGEST_SIZE,
2748 .halg.statesize = sizeof(struct artpec6_hash_export_state),
2749 .halg.base = {
2750 .cra_name = "hmac(sha256)",
2751 .cra_driver_name = "artpec-hmac-sha256",
2752 .cra_priority = 300,
2753 .cra_flags = CRYPTO_ALG_TYPE_AHASH | CRYPTO_ALG_ASYNC,
2754 .cra_blocksize = SHA256_BLOCK_SIZE,
2755 .cra_ctxsize = sizeof(struct artpec6_hashalg_context),
2756 .cra_alignmask = 3,
2757 .cra_module = THIS_MODULE,
2758 .cra_init = artpec6_crypto_ahash_init_hmac_sha256,
2759 .cra_exit = artpec6_crypto_ahash_exit,
2760 }
2761 },
2762};
2763
2764static struct ahash_alg artpec7_hash_algos[] = {
2765
2766 {
2767 .init = artpec6_crypto_sha384_init,
2768 .update = artpec6_crypto_hash_update,
2769 .final = artpec6_crypto_hash_final,
2770 .digest = artpec6_crypto_sha384_digest,
2771 .import = artpec6_crypto_hash_import,
2772 .export = artpec6_crypto_hash_export,
2773 .halg.digestsize = SHA384_DIGEST_SIZE,
2774 .halg.statesize = sizeof(struct artpec6_hash_export_state),
2775 .halg.base = {
2776 .cra_name = "sha384",
2777 .cra_driver_name = "artpec-sha384",
2778 .cra_priority = 300,
2779 .cra_flags = CRYPTO_ALG_TYPE_AHASH | CRYPTO_ALG_ASYNC,
2780 .cra_blocksize = SHA384_BLOCK_SIZE,
2781 .cra_ctxsize = sizeof(struct artpec6_hashalg_context),
2782 .cra_alignmask = 3,
2783 .cra_module = THIS_MODULE,
2784 .cra_init = artpec6_crypto_ahash_init,
2785 .cra_exit = artpec6_crypto_ahash_exit,
2786 }
2787 },
2788
2789 {
2790 .init = artpec6_crypto_hmac_sha384_init,
2791 .update = artpec6_crypto_hash_update,
2792 .final = artpec6_crypto_hash_final,
2793 .digest = artpec6_crypto_hmac_sha384_digest,
2794 .import = artpec6_crypto_hash_import,
2795 .export = artpec6_crypto_hash_export,
2796 .setkey = artpec6_crypto_hash_set_key,
2797 .halg.digestsize = SHA384_DIGEST_SIZE,
2798 .halg.statesize = sizeof(struct artpec6_hash_export_state),
2799 .halg.base = {
2800 .cra_name = "hmac(sha384)",
2801 .cra_driver_name = "artpec-hmac-sha384",
2802 .cra_priority = 300,
2803 .cra_flags = CRYPTO_ALG_TYPE_AHASH | CRYPTO_ALG_ASYNC,
2804 .cra_blocksize = SHA384_BLOCK_SIZE,
2805 .cra_ctxsize = sizeof(struct artpec6_hashalg_context),
2806 .cra_alignmask = 3,
2807 .cra_module = THIS_MODULE,
2808 .cra_init = artpec6_crypto_ahash_init_hmac_sha384,
2809 .cra_exit = artpec6_crypto_ahash_exit,
2810 }
2811 },
2812
2813 {
2814 .init = artpec6_crypto_sha512_init,
2815 .update = artpec6_crypto_hash_update,
2816 .final = artpec6_crypto_hash_final,
2817 .digest = artpec6_crypto_sha512_digest,
2818 .import = artpec6_crypto_hash_import,
2819 .export = artpec6_crypto_hash_export,
2820 .halg.digestsize = SHA512_DIGEST_SIZE,
2821 .halg.statesize = sizeof(struct artpec6_hash_export_state),
2822 .halg.base = {
2823 .cra_name = "sha512",
2824 .cra_driver_name = "artpec-sha512",
2825 .cra_priority = 300,
2826 .cra_flags = CRYPTO_ALG_TYPE_AHASH | CRYPTO_ALG_ASYNC,
2827 .cra_blocksize = SHA512_BLOCK_SIZE,
2828 .cra_ctxsize = sizeof(struct artpec6_hashalg_context),
2829 .cra_alignmask = 3,
2830 .cra_module = THIS_MODULE,
2831 .cra_init = artpec6_crypto_ahash_init,
2832 .cra_exit = artpec6_crypto_ahash_exit,
2833 }
2834 },
2835
2836 {
2837 .init = artpec6_crypto_hmac_sha512_init,
2838 .update = artpec6_crypto_hash_update,
2839 .final = artpec6_crypto_hash_final,
2840 .digest = artpec6_crypto_hmac_sha512_digest,
2841 .import = artpec6_crypto_hash_import,
2842 .export = artpec6_crypto_hash_export,
2843 .setkey = artpec6_crypto_hash_set_key,
2844 .halg.digestsize = SHA512_DIGEST_SIZE,
2845 .halg.statesize = sizeof(struct artpec6_hash_export_state),
2846 .halg.base = {
2847 .cra_name = "hmac(sha512)",
2848 .cra_driver_name = "artpec-hmac-sha512",
2849 .cra_priority = 300,
2850 .cra_flags = CRYPTO_ALG_TYPE_AHASH | CRYPTO_ALG_ASYNC,
2851 .cra_blocksize = SHA512_BLOCK_SIZE,
2852 .cra_ctxsize = sizeof(struct artpec6_hashalg_context),
2853 .cra_alignmask = 3,
2854 .cra_module = THIS_MODULE,
2855 .cra_init = artpec6_crypto_ahash_init_hmac_sha512,
2856 .cra_exit = artpec6_crypto_ahash_exit,
2857 }
2858 },
2859};
2860
2861
2862static struct skcipher_alg crypto_algos[] = {
2863
2864 {
2865 .base = {
2866 .cra_name = "ecb(aes)",
2867 .cra_driver_name = "artpec6-ecb-aes",
2868 .cra_priority = 300,
2869 .cra_flags = CRYPTO_ALG_TYPE_SKCIPHER |
2870 CRYPTO_ALG_ASYNC,
2871 .cra_blocksize = AES_BLOCK_SIZE,
2872 .cra_ctxsize = sizeof(struct artpec6_cryptotfm_context),
2873 .cra_alignmask = 3,
2874 .cra_module = THIS_MODULE,
2875 },
2876 .min_keysize = AES_MIN_KEY_SIZE,
2877 .max_keysize = AES_MAX_KEY_SIZE,
2878 .setkey = artpec6_crypto_cipher_set_key,
2879 .encrypt = artpec6_crypto_encrypt,
2880 .decrypt = artpec6_crypto_decrypt,
2881 .init = artpec6_crypto_aes_ecb_init,
2882 .exit = artpec6_crypto_aes_exit,
2883 },
2884
2885 {
2886 .base = {
2887 .cra_name = "ctr(aes)",
2888 .cra_driver_name = "artpec6-ctr-aes",
2889 .cra_priority = 300,
2890 .cra_flags = CRYPTO_ALG_TYPE_SKCIPHER |
2891 CRYPTO_ALG_ASYNC |
2892 CRYPTO_ALG_NEED_FALLBACK,
2893 .cra_blocksize = 1,
2894 .cra_ctxsize = sizeof(struct artpec6_cryptotfm_context),
2895 .cra_alignmask = 3,
2896 .cra_module = THIS_MODULE,
2897 },
2898 .min_keysize = AES_MIN_KEY_SIZE,
2899 .max_keysize = AES_MAX_KEY_SIZE,
2900 .ivsize = AES_BLOCK_SIZE,
2901 .setkey = artpec6_crypto_cipher_set_key,
2902 .encrypt = artpec6_crypto_ctr_encrypt,
2903 .decrypt = artpec6_crypto_ctr_decrypt,
2904 .init = artpec6_crypto_aes_ctr_init,
2905 .exit = artpec6_crypto_aes_ctr_exit,
2906 },
2907
2908 {
2909 .base = {
2910 .cra_name = "cbc(aes)",
2911 .cra_driver_name = "artpec6-cbc-aes",
2912 .cra_priority = 300,
2913 .cra_flags = CRYPTO_ALG_TYPE_SKCIPHER |
2914 CRYPTO_ALG_ASYNC,
2915 .cra_blocksize = AES_BLOCK_SIZE,
2916 .cra_ctxsize = sizeof(struct artpec6_cryptotfm_context),
2917 .cra_alignmask = 3,
2918 .cra_module = THIS_MODULE,
2919 },
2920 .min_keysize = AES_MIN_KEY_SIZE,
2921 .max_keysize = AES_MAX_KEY_SIZE,
2922 .ivsize = AES_BLOCK_SIZE,
2923 .setkey = artpec6_crypto_cipher_set_key,
2924 .encrypt = artpec6_crypto_encrypt,
2925 .decrypt = artpec6_crypto_decrypt,
2926 .init = artpec6_crypto_aes_cbc_init,
2927 .exit = artpec6_crypto_aes_exit
2928 },
2929
2930 {
2931 .base = {
2932 .cra_name = "xts(aes)",
2933 .cra_driver_name = "artpec6-xts-aes",
2934 .cra_priority = 300,
2935 .cra_flags = CRYPTO_ALG_TYPE_SKCIPHER |
2936 CRYPTO_ALG_ASYNC,
2937 .cra_blocksize = 1,
2938 .cra_ctxsize = sizeof(struct artpec6_cryptotfm_context),
2939 .cra_alignmask = 3,
2940 .cra_module = THIS_MODULE,
2941 },
2942 .min_keysize = 2*AES_MIN_KEY_SIZE,
2943 .max_keysize = 2*AES_MAX_KEY_SIZE,
2944 .ivsize = 16,
2945 .setkey = artpec6_crypto_xts_set_key,
2946 .encrypt = artpec6_crypto_encrypt,
2947 .decrypt = artpec6_crypto_decrypt,
2948 .init = artpec6_crypto_aes_xts_init,
2949 .exit = artpec6_crypto_aes_exit,
2950 },
2951};
2952
2953static struct aead_alg aead_algos[] = {
2954 {
2955 .init = artpec6_crypto_aead_init,
2956 .setkey = artpec6_crypto_aead_set_key,
2957 .encrypt = artpec6_crypto_aead_encrypt,
2958 .decrypt = artpec6_crypto_aead_decrypt,
2959 .ivsize = AES_BLOCK_SIZE,
2960 .maxauthsize = AES_BLOCK_SIZE,
2961
2962 .base = {
2963 .cra_name = "gcm(aes)",
2964 .cra_driver_name = "artpec-gcm-aes",
2965 .cra_priority = 300,
2966 .cra_flags = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_ASYNC |
2967 CRYPTO_ALG_KERN_DRIVER_ONLY,
2968 .cra_blocksize = 1,
2969 .cra_ctxsize = sizeof(struct artpec6_cryptotfm_context),
2970 .cra_alignmask = 3,
2971 .cra_module = THIS_MODULE,
2972 },
2973 }
2974};
2975
2976#ifdef CONFIG_DEBUG_FS
2977
2978struct dbgfs_u32 {
2979 char *name;
2980 mode_t mode;
2981 u32 *flag;
2982 char *desc;
2983};
2984
2985static struct dentry *dbgfs_root;
2986
2987static void artpec6_crypto_init_debugfs(void)
2988{
2989 dbgfs_root = debugfs_create_dir("artpec6_crypto", NULL);
2990
2991 if (!dbgfs_root || IS_ERR(dbgfs_root)) {
2992 dbgfs_root = NULL;
2993 pr_err("%s: Could not initialise debugfs!\n", MODULE_NAME);
2994 return;
2995 }
2996
2997#ifdef CONFIG_FAULT_INJECTION
2998 fault_create_debugfs_attr("fail_status_read", dbgfs_root,
2999 &artpec6_crypto_fail_status_read);
3000
3001 fault_create_debugfs_attr("fail_dma_array_full", dbgfs_root,
3002 &artpec6_crypto_fail_dma_array_full);
3003#endif
3004}
3005
3006static void artpec6_crypto_free_debugfs(void)
3007{
3008 if (!dbgfs_root)
3009 return;
3010
3011 debugfs_remove_recursive(dbgfs_root);
3012 dbgfs_root = NULL;
3013}
3014#endif
3015
3016static const struct of_device_id artpec6_crypto_of_match[] = {
3017 { .compatible = "axis,artpec6-crypto", .data = (void *)ARTPEC6_CRYPTO },
3018 { .compatible = "axis,artpec7-crypto", .data = (void *)ARTPEC7_CRYPTO },
3019 {}
3020};
3021MODULE_DEVICE_TABLE(of, artpec6_crypto_of_match);
3022
3023static int artpec6_crypto_probe(struct platform_device *pdev)
3024{
3025 const struct of_device_id *match;
3026 enum artpec6_crypto_variant variant;
3027 struct artpec6_crypto *ac;
3028 struct device *dev = &pdev->dev;
3029 void __iomem *base;
3030 struct resource *res;
3031 int irq;
3032 int err;
3033
3034 if (artpec6_crypto_dev)
3035 return -ENODEV;
3036
3037 match = of_match_node(artpec6_crypto_of_match, dev->of_node);
3038 if (!match)
3039 return -EINVAL;
3040
3041 variant = (enum artpec6_crypto_variant)match->data;
3042
3043 res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
3044 if (!res)
3045 return -ENODEV;
3046
3047 base = devm_ioremap_resource(&pdev->dev, res);
3048 if (IS_ERR(base))
3049 return PTR_ERR(base);
3050
3051 irq = platform_get_irq(pdev, 0);
3052 if (irq < 0)
3053 return -ENODEV;
3054
3055 ac = devm_kzalloc(&pdev->dev, sizeof(struct artpec6_crypto),
3056 GFP_KERNEL);
3057 if (!ac)
3058 return -ENOMEM;
3059
3060 platform_set_drvdata(pdev, ac);
3061 ac->variant = variant;
3062
3063 spin_lock_init(&ac->queue_lock);
3064 INIT_LIST_HEAD(&ac->queue);
3065 INIT_LIST_HEAD(&ac->pending);
3066 setup_timer(&ac->timer, artpec6_crypto_timeout, (unsigned long) ac);
3067
3068 ac->base = base;
3069
3070 ac->dma_cache = kmem_cache_create("artpec6_crypto_dma",
3071 sizeof(struct artpec6_crypto_dma_descriptors),
3072 64,
3073 0,
3074 NULL);
3075 if (!ac->dma_cache)
3076 return -ENOMEM;
3077
3078#ifdef CONFIG_DEBUG_FS
3079 artpec6_crypto_init_debugfs();
3080#endif
3081
3082 tasklet_init(&ac->task, artpec6_crypto_task,
3083 (unsigned long)ac);
3084
3085 ac->pad_buffer = devm_kzalloc(&pdev->dev, 2 * ARTPEC_CACHE_LINE_MAX,
3086 GFP_KERNEL);
3087 if (!ac->pad_buffer)
3088 return -ENOMEM;
3089 ac->pad_buffer = PTR_ALIGN(ac->pad_buffer, ARTPEC_CACHE_LINE_MAX);
3090
3091 ac->zero_buffer = devm_kzalloc(&pdev->dev, 2 * ARTPEC_CACHE_LINE_MAX,
3092 GFP_KERNEL);
3093 if (!ac->zero_buffer)
3094 return -ENOMEM;
3095 ac->zero_buffer = PTR_ALIGN(ac->zero_buffer, ARTPEC_CACHE_LINE_MAX);
3096
3097 err = init_crypto_hw(ac);
3098 if (err)
3099 goto free_cache;
3100
3101 err = devm_request_irq(&pdev->dev, irq, artpec6_crypto_irq, 0,
3102 "artpec6-crypto", ac);
3103 if (err)
3104 goto disable_hw;
3105
3106 artpec6_crypto_dev = &pdev->dev;
3107
3108 err = crypto_register_ahashes(hash_algos, ARRAY_SIZE(hash_algos));
3109 if (err) {
3110 dev_err(dev, "Failed to register ahashes\n");
3111 goto disable_hw;
3112 }
3113
3114 if (variant != ARTPEC6_CRYPTO) {
3115 err = crypto_register_ahashes(artpec7_hash_algos,
3116 ARRAY_SIZE(artpec7_hash_algos));
3117 if (err) {
3118 dev_err(dev, "Failed to register ahashes\n");
3119 goto unregister_ahashes;
3120 }
3121 }
3122
3123 err = crypto_register_skciphers(crypto_algos, ARRAY_SIZE(crypto_algos));
3124 if (err) {
3125 dev_err(dev, "Failed to register ciphers\n");
3126 goto unregister_a7_ahashes;
3127 }
3128
3129 err = crypto_register_aeads(aead_algos, ARRAY_SIZE(aead_algos));
3130 if (err) {
3131 dev_err(dev, "Failed to register aeads\n");
3132 goto unregister_algs;
3133 }
3134
3135 return 0;
3136
3137unregister_algs:
3138 crypto_unregister_skciphers(crypto_algos, ARRAY_SIZE(crypto_algos));
3139unregister_a7_ahashes:
3140 if (variant != ARTPEC6_CRYPTO)
3141 crypto_unregister_ahashes(artpec7_hash_algos,
3142 ARRAY_SIZE(artpec7_hash_algos));
3143unregister_ahashes:
3144 crypto_unregister_ahashes(hash_algos, ARRAY_SIZE(hash_algos));
3145disable_hw:
3146 artpec6_crypto_disable_hw(ac);
3147free_cache:
3148 kmem_cache_destroy(ac->dma_cache);
3149 return err;
3150}
3151
3152static int artpec6_crypto_remove(struct platform_device *pdev)
3153{
3154 struct artpec6_crypto *ac = platform_get_drvdata(pdev);
3155 int irq = platform_get_irq(pdev, 0);
3156
3157 crypto_unregister_ahashes(hash_algos, ARRAY_SIZE(hash_algos));
3158 if (ac->variant != ARTPEC6_CRYPTO)
3159 crypto_unregister_ahashes(artpec7_hash_algos,
3160 ARRAY_SIZE(artpec7_hash_algos));
3161 crypto_unregister_skciphers(crypto_algos, ARRAY_SIZE(crypto_algos));
3162 crypto_unregister_aeads(aead_algos, ARRAY_SIZE(aead_algos));
3163
3164 tasklet_disable(&ac->task);
3165 devm_free_irq(&pdev->dev, irq, ac);
3166 tasklet_kill(&ac->task);
3167 del_timer_sync(&ac->timer);
3168
3169 artpec6_crypto_disable_hw(ac);
3170
3171 kmem_cache_destroy(ac->dma_cache);
3172#ifdef CONFIG_DEBUG_FS
3173 artpec6_crypto_free_debugfs();
3174#endif
3175 return 0;
3176}
3177
3178static struct platform_driver artpec6_crypto_driver = {
3179 .probe = artpec6_crypto_probe,
3180 .remove = artpec6_crypto_remove,
3181 .driver = {
3182 .name = "artpec6-crypto",
3183 .owner = THIS_MODULE,
3184 .of_match_table = artpec6_crypto_of_match,
3185 },
3186};
3187
3188module_platform_driver(artpec6_crypto_driver);
3189
3190MODULE_AUTHOR("Axis Communications AB");
3191MODULE_DESCRIPTION("ARTPEC-6 Crypto driver");
3192MODULE_LICENSE("GPL");
3193