1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28#include <linux/kernel.h>
29#include <linux/module.h>
30#include <linux/mod_devicetable.h>
31#include <linux/device.h>
32#include <linux/interrupt.h>
33#include <linux/crypto.h>
34#include <linux/hw_random.h>
35#include <linux/of_address.h>
36#include <linux/of_irq.h>
37#include <linux/of_platform.h>
38#include <linux/dma-mapping.h>
39#include <linux/io.h>
40#include <linux/spinlock.h>
41#include <linux/rtnetlink.h>
42#include <linux/slab.h>
43
44#include <crypto/algapi.h>
45#include <crypto/aes.h>
46#include <crypto/des.h>
47#include <crypto/sha.h>
48#include <crypto/md5.h>
49#include <crypto/internal/aead.h>
50#include <crypto/authenc.h>
51#include <crypto/skcipher.h>
52#include <crypto/hash.h>
53#include <crypto/internal/hash.h>
54#include <crypto/scatterwalk.h>
55
56#include "talitos.h"
57
58static void to_talitos_ptr(struct talitos_ptr *ptr, dma_addr_t dma_addr,
59 unsigned int len, bool is_sec1)
60{
61 ptr->ptr = cpu_to_be32(lower_32_bits(dma_addr));
62 if (is_sec1) {
63 ptr->len1 = cpu_to_be16(len);
64 } else {
65 ptr->len = cpu_to_be16(len);
66 ptr->eptr = upper_32_bits(dma_addr);
67 }
68}
69
70static void copy_talitos_ptr(struct talitos_ptr *dst_ptr,
71 struct talitos_ptr *src_ptr, bool is_sec1)
72{
73 dst_ptr->ptr = src_ptr->ptr;
74 if (is_sec1) {
75 dst_ptr->len1 = src_ptr->len1;
76 } else {
77 dst_ptr->len = src_ptr->len;
78 dst_ptr->eptr = src_ptr->eptr;
79 }
80}
81
82static unsigned short from_talitos_ptr_len(struct talitos_ptr *ptr,
83 bool is_sec1)
84{
85 if (is_sec1)
86 return be16_to_cpu(ptr->len1);
87 else
88 return be16_to_cpu(ptr->len);
89}
90
91static void to_talitos_ptr_ext_set(struct talitos_ptr *ptr, u8 val,
92 bool is_sec1)
93{
94 if (!is_sec1)
95 ptr->j_extent = val;
96}
97
98static void to_talitos_ptr_ext_or(struct talitos_ptr *ptr, u8 val, bool is_sec1)
99{
100 if (!is_sec1)
101 ptr->j_extent |= val;
102}
103
104
105
106
107static void __map_single_talitos_ptr(struct device *dev,
108 struct talitos_ptr *ptr,
109 unsigned int len, void *data,
110 enum dma_data_direction dir,
111 unsigned long attrs)
112{
113 dma_addr_t dma_addr = dma_map_single_attrs(dev, data, len, dir, attrs);
114 struct talitos_private *priv = dev_get_drvdata(dev);
115 bool is_sec1 = has_ftr_sec1(priv);
116
117 to_talitos_ptr(ptr, dma_addr, len, is_sec1);
118}
119
120static void map_single_talitos_ptr(struct device *dev,
121 struct talitos_ptr *ptr,
122 unsigned int len, void *data,
123 enum dma_data_direction dir)
124{
125 __map_single_talitos_ptr(dev, ptr, len, data, dir, 0);
126}
127
128static void map_single_talitos_ptr_nosync(struct device *dev,
129 struct talitos_ptr *ptr,
130 unsigned int len, void *data,
131 enum dma_data_direction dir)
132{
133 __map_single_talitos_ptr(dev, ptr, len, data, dir,
134 DMA_ATTR_SKIP_CPU_SYNC);
135}
136
137
138
139
140static void unmap_single_talitos_ptr(struct device *dev,
141 struct talitos_ptr *ptr,
142 enum dma_data_direction dir)
143{
144 struct talitos_private *priv = dev_get_drvdata(dev);
145 bool is_sec1 = has_ftr_sec1(priv);
146
147 dma_unmap_single(dev, be32_to_cpu(ptr->ptr),
148 from_talitos_ptr_len(ptr, is_sec1), dir);
149}
150
151static int reset_channel(struct device *dev, int ch)
152{
153 struct talitos_private *priv = dev_get_drvdata(dev);
154 unsigned int timeout = TALITOS_TIMEOUT;
155 bool is_sec1 = has_ftr_sec1(priv);
156
157 if (is_sec1) {
158 setbits32(priv->chan[ch].reg + TALITOS_CCCR_LO,
159 TALITOS1_CCCR_LO_RESET);
160
161 while ((in_be32(priv->chan[ch].reg + TALITOS_CCCR_LO) &
162 TALITOS1_CCCR_LO_RESET) && --timeout)
163 cpu_relax();
164 } else {
165 setbits32(priv->chan[ch].reg + TALITOS_CCCR,
166 TALITOS2_CCCR_RESET);
167
168 while ((in_be32(priv->chan[ch].reg + TALITOS_CCCR) &
169 TALITOS2_CCCR_RESET) && --timeout)
170 cpu_relax();
171 }
172
173 if (timeout == 0) {
174 dev_err(dev, "failed to reset channel %d\n", ch);
175 return -EIO;
176 }
177
178
179 setbits32(priv->chan[ch].reg + TALITOS_CCCR_LO, TALITOS_CCCR_LO_EAE |
180 TALITOS_CCCR_LO_CDWE | TALITOS_CCCR_LO_CDIE);
181
182 if (is_sec1)
183 setbits32(priv->chan[ch].reg + TALITOS_CCCR_LO,
184 TALITOS_CCCR_LO_NE);
185
186
187 if (priv->features & TALITOS_FTR_HW_AUTH_CHECK)
188 setbits32(priv->chan[ch].reg + TALITOS_CCCR_LO,
189 TALITOS_CCCR_LO_IWSE);
190
191 return 0;
192}
193
194static int reset_device(struct device *dev)
195{
196 struct talitos_private *priv = dev_get_drvdata(dev);
197 unsigned int timeout = TALITOS_TIMEOUT;
198 bool is_sec1 = has_ftr_sec1(priv);
199 u32 mcr = is_sec1 ? TALITOS1_MCR_SWR : TALITOS2_MCR_SWR;
200
201 setbits32(priv->reg + TALITOS_MCR, mcr);
202
203 while ((in_be32(priv->reg + TALITOS_MCR) & mcr)
204 && --timeout)
205 cpu_relax();
206
207 if (priv->irq[1]) {
208 mcr = TALITOS_MCR_RCA1 | TALITOS_MCR_RCA3;
209 setbits32(priv->reg + TALITOS_MCR, mcr);
210 }
211
212 if (timeout == 0) {
213 dev_err(dev, "failed to reset device\n");
214 return -EIO;
215 }
216
217 return 0;
218}
219
220
221
222
223static int init_device(struct device *dev)
224{
225 struct talitos_private *priv = dev_get_drvdata(dev);
226 int ch, err;
227 bool is_sec1 = has_ftr_sec1(priv);
228
229
230
231
232
233
234
235 err = reset_device(dev);
236 if (err)
237 return err;
238
239 err = reset_device(dev);
240 if (err)
241 return err;
242
243
244 for (ch = 0; ch < priv->num_channels; ch++) {
245 err = reset_channel(dev, ch);
246 if (err)
247 return err;
248 }
249
250
251 if (is_sec1) {
252 clrbits32(priv->reg + TALITOS_IMR, TALITOS1_IMR_INIT);
253 clrbits32(priv->reg + TALITOS_IMR_LO, TALITOS1_IMR_LO_INIT);
254
255 setbits32(priv->reg_deu + TALITOS_EUICR, TALITOS1_DEUICR_KPE);
256 } else {
257 setbits32(priv->reg + TALITOS_IMR, TALITOS2_IMR_INIT);
258 setbits32(priv->reg + TALITOS_IMR_LO, TALITOS2_IMR_LO_INIT);
259 }
260
261
262 if (priv->features & TALITOS_FTR_HW_AUTH_CHECK)
263 setbits32(priv->reg_mdeu + TALITOS_EUICR_LO,
264 TALITOS_MDEUICR_LO_ICE);
265
266 return 0;
267}
268
269
270
271
272
273
274
275
276
277
278
279
280
281int talitos_submit(struct device *dev, int ch, struct talitos_desc *desc,
282 void (*callback)(struct device *dev,
283 struct talitos_desc *desc,
284 void *context, int error),
285 void *context)
286{
287 struct talitos_private *priv = dev_get_drvdata(dev);
288 struct talitos_request *request;
289 unsigned long flags;
290 int head;
291 bool is_sec1 = has_ftr_sec1(priv);
292
293 spin_lock_irqsave(&priv->chan[ch].head_lock, flags);
294
295 if (!atomic_inc_not_zero(&priv->chan[ch].submit_count)) {
296
297 spin_unlock_irqrestore(&priv->chan[ch].head_lock, flags);
298 return -EAGAIN;
299 }
300
301 head = priv->chan[ch].head;
302 request = &priv->chan[ch].fifo[head];
303
304
305 if (is_sec1) {
306 desc->hdr1 = desc->hdr;
307 request->dma_desc = dma_map_single(dev, &desc->hdr1,
308 TALITOS_DESC_SIZE,
309 DMA_BIDIRECTIONAL);
310 } else {
311 request->dma_desc = dma_map_single(dev, desc,
312 TALITOS_DESC_SIZE,
313 DMA_BIDIRECTIONAL);
314 }
315 request->callback = callback;
316 request->context = context;
317
318
319 priv->chan[ch].head = (priv->chan[ch].head + 1) & (priv->fifo_len - 1);
320
321 smp_wmb();
322 request->desc = desc;
323
324
325 wmb();
326 out_be32(priv->chan[ch].reg + TALITOS_FF,
327 upper_32_bits(request->dma_desc));
328 out_be32(priv->chan[ch].reg + TALITOS_FF_LO,
329 lower_32_bits(request->dma_desc));
330
331 spin_unlock_irqrestore(&priv->chan[ch].head_lock, flags);
332
333 return -EINPROGRESS;
334}
335EXPORT_SYMBOL(talitos_submit);
336
337
338
339
340static void flush_channel(struct device *dev, int ch, int error, int reset_ch)
341{
342 struct talitos_private *priv = dev_get_drvdata(dev);
343 struct talitos_request *request, saved_req;
344 unsigned long flags;
345 int tail, status;
346 bool is_sec1 = has_ftr_sec1(priv);
347
348 spin_lock_irqsave(&priv->chan[ch].tail_lock, flags);
349
350 tail = priv->chan[ch].tail;
351 while (priv->chan[ch].fifo[tail].desc) {
352 __be32 hdr;
353
354 request = &priv->chan[ch].fifo[tail];
355
356
357 rmb();
358 if (!is_sec1)
359 hdr = request->desc->hdr;
360 else if (request->desc->next_desc)
361 hdr = (request->desc + 1)->hdr1;
362 else
363 hdr = request->desc->hdr1;
364
365 if ((hdr & DESC_HDR_DONE) == DESC_HDR_DONE)
366 status = 0;
367 else
368 if (!error)
369 break;
370 else
371 status = error;
372
373 dma_unmap_single(dev, request->dma_desc,
374 TALITOS_DESC_SIZE,
375 DMA_BIDIRECTIONAL);
376
377
378 saved_req.desc = request->desc;
379 saved_req.callback = request->callback;
380 saved_req.context = request->context;
381
382
383 smp_wmb();
384 request->desc = NULL;
385
386
387 priv->chan[ch].tail = (tail + 1) & (priv->fifo_len - 1);
388
389 spin_unlock_irqrestore(&priv->chan[ch].tail_lock, flags);
390
391 atomic_dec(&priv->chan[ch].submit_count);
392
393 saved_req.callback(dev, saved_req.desc, saved_req.context,
394 status);
395
396 if (error && !reset_ch && status == error)
397 return;
398 spin_lock_irqsave(&priv->chan[ch].tail_lock, flags);
399 tail = priv->chan[ch].tail;
400 }
401
402 spin_unlock_irqrestore(&priv->chan[ch].tail_lock, flags);
403}
404
405
406
407
408#define DEF_TALITOS1_DONE(name, ch_done_mask) \
409static void talitos1_done_##name(unsigned long data) \
410{ \
411 struct device *dev = (struct device *)data; \
412 struct talitos_private *priv = dev_get_drvdata(dev); \
413 unsigned long flags; \
414 \
415 if (ch_done_mask & 0x10000000) \
416 flush_channel(dev, 0, 0, 0); \
417 if (ch_done_mask & 0x40000000) \
418 flush_channel(dev, 1, 0, 0); \
419 if (ch_done_mask & 0x00010000) \
420 flush_channel(dev, 2, 0, 0); \
421 if (ch_done_mask & 0x00040000) \
422 flush_channel(dev, 3, 0, 0); \
423 \
424 \
425 \
426 spin_lock_irqsave(&priv->reg_lock, flags); \
427 clrbits32(priv->reg + TALITOS_IMR, ch_done_mask); \
428 clrbits32(priv->reg + TALITOS_IMR_LO, TALITOS1_IMR_LO_INIT); \
429 spin_unlock_irqrestore(&priv->reg_lock, flags); \
430}
431
432DEF_TALITOS1_DONE(4ch, TALITOS1_ISR_4CHDONE)
433DEF_TALITOS1_DONE(ch0, TALITOS1_ISR_CH_0_DONE)
434
435#define DEF_TALITOS2_DONE(name, ch_done_mask) \
436static void talitos2_done_##name(unsigned long data) \
437{ \
438 struct device *dev = (struct device *)data; \
439 struct talitos_private *priv = dev_get_drvdata(dev); \
440 unsigned long flags; \
441 \
442 if (ch_done_mask & 1) \
443 flush_channel(dev, 0, 0, 0); \
444 if (ch_done_mask & (1 << 2)) \
445 flush_channel(dev, 1, 0, 0); \
446 if (ch_done_mask & (1 << 4)) \
447 flush_channel(dev, 2, 0, 0); \
448 if (ch_done_mask & (1 << 6)) \
449 flush_channel(dev, 3, 0, 0); \
450 \
451 \
452 \
453 spin_lock_irqsave(&priv->reg_lock, flags); \
454 setbits32(priv->reg + TALITOS_IMR, ch_done_mask); \
455 setbits32(priv->reg + TALITOS_IMR_LO, TALITOS2_IMR_LO_INIT); \
456 spin_unlock_irqrestore(&priv->reg_lock, flags); \
457}
458
459DEF_TALITOS2_DONE(4ch, TALITOS2_ISR_4CHDONE)
460DEF_TALITOS2_DONE(ch0, TALITOS2_ISR_CH_0_DONE)
461DEF_TALITOS2_DONE(ch0_2, TALITOS2_ISR_CH_0_2_DONE)
462DEF_TALITOS2_DONE(ch1_3, TALITOS2_ISR_CH_1_3_DONE)
463
464
465
466
467static u32 current_desc_hdr(struct device *dev, int ch)
468{
469 struct talitos_private *priv = dev_get_drvdata(dev);
470 int tail, iter;
471 dma_addr_t cur_desc;
472
473 cur_desc = ((u64)in_be32(priv->chan[ch].reg + TALITOS_CDPR)) << 32;
474 cur_desc |= in_be32(priv->chan[ch].reg + TALITOS_CDPR_LO);
475
476 if (!cur_desc) {
477 dev_err(dev, "CDPR is NULL, giving up search for offending descriptor\n");
478 return 0;
479 }
480
481 tail = priv->chan[ch].tail;
482
483 iter = tail;
484 while (priv->chan[ch].fifo[iter].dma_desc != cur_desc &&
485 priv->chan[ch].fifo[iter].desc->next_desc != cur_desc) {
486 iter = (iter + 1) & (priv->fifo_len - 1);
487 if (iter == tail) {
488 dev_err(dev, "couldn't locate current descriptor\n");
489 return 0;
490 }
491 }
492
493 if (priv->chan[ch].fifo[iter].desc->next_desc == cur_desc)
494 return (priv->chan[ch].fifo[iter].desc + 1)->hdr;
495
496 return priv->chan[ch].fifo[iter].desc->hdr;
497}
498
499
500
501
502static void report_eu_error(struct device *dev, int ch, u32 desc_hdr)
503{
504 struct talitos_private *priv = dev_get_drvdata(dev);
505 int i;
506
507 if (!desc_hdr)
508 desc_hdr = in_be32(priv->chan[ch].reg + TALITOS_DESCBUF);
509
510 switch (desc_hdr & DESC_HDR_SEL0_MASK) {
511 case DESC_HDR_SEL0_AFEU:
512 dev_err(dev, "AFEUISR 0x%08x_%08x\n",
513 in_be32(priv->reg_afeu + TALITOS_EUISR),
514 in_be32(priv->reg_afeu + TALITOS_EUISR_LO));
515 break;
516 case DESC_HDR_SEL0_DEU:
517 dev_err(dev, "DEUISR 0x%08x_%08x\n",
518 in_be32(priv->reg_deu + TALITOS_EUISR),
519 in_be32(priv->reg_deu + TALITOS_EUISR_LO));
520 break;
521 case DESC_HDR_SEL0_MDEUA:
522 case DESC_HDR_SEL0_MDEUB:
523 dev_err(dev, "MDEUISR 0x%08x_%08x\n",
524 in_be32(priv->reg_mdeu + TALITOS_EUISR),
525 in_be32(priv->reg_mdeu + TALITOS_EUISR_LO));
526 break;
527 case DESC_HDR_SEL0_RNG:
528 dev_err(dev, "RNGUISR 0x%08x_%08x\n",
529 in_be32(priv->reg_rngu + TALITOS_ISR),
530 in_be32(priv->reg_rngu + TALITOS_ISR_LO));
531 break;
532 case DESC_HDR_SEL0_PKEU:
533 dev_err(dev, "PKEUISR 0x%08x_%08x\n",
534 in_be32(priv->reg_pkeu + TALITOS_EUISR),
535 in_be32(priv->reg_pkeu + TALITOS_EUISR_LO));
536 break;
537 case DESC_HDR_SEL0_AESU:
538 dev_err(dev, "AESUISR 0x%08x_%08x\n",
539 in_be32(priv->reg_aesu + TALITOS_EUISR),
540 in_be32(priv->reg_aesu + TALITOS_EUISR_LO));
541 break;
542 case DESC_HDR_SEL0_CRCU:
543 dev_err(dev, "CRCUISR 0x%08x_%08x\n",
544 in_be32(priv->reg_crcu + TALITOS_EUISR),
545 in_be32(priv->reg_crcu + TALITOS_EUISR_LO));
546 break;
547 case DESC_HDR_SEL0_KEU:
548 dev_err(dev, "KEUISR 0x%08x_%08x\n",
549 in_be32(priv->reg_pkeu + TALITOS_EUISR),
550 in_be32(priv->reg_pkeu + TALITOS_EUISR_LO));
551 break;
552 }
553
554 switch (desc_hdr & DESC_HDR_SEL1_MASK) {
555 case DESC_HDR_SEL1_MDEUA:
556 case DESC_HDR_SEL1_MDEUB:
557 dev_err(dev, "MDEUISR 0x%08x_%08x\n",
558 in_be32(priv->reg_mdeu + TALITOS_EUISR),
559 in_be32(priv->reg_mdeu + TALITOS_EUISR_LO));
560 break;
561 case DESC_HDR_SEL1_CRCU:
562 dev_err(dev, "CRCUISR 0x%08x_%08x\n",
563 in_be32(priv->reg_crcu + TALITOS_EUISR),
564 in_be32(priv->reg_crcu + TALITOS_EUISR_LO));
565 break;
566 }
567
568 for (i = 0; i < 8; i++)
569 dev_err(dev, "DESCBUF 0x%08x_%08x\n",
570 in_be32(priv->chan[ch].reg + TALITOS_DESCBUF + 8*i),
571 in_be32(priv->chan[ch].reg + TALITOS_DESCBUF_LO + 8*i));
572}
573
574
575
576
577static void talitos_error(struct device *dev, u32 isr, u32 isr_lo)
578{
579 struct talitos_private *priv = dev_get_drvdata(dev);
580 unsigned int timeout = TALITOS_TIMEOUT;
581 int ch, error, reset_dev = 0;
582 u32 v_lo;
583 bool is_sec1 = has_ftr_sec1(priv);
584 int reset_ch = is_sec1 ? 1 : 0;
585
586 for (ch = 0; ch < priv->num_channels; ch++) {
587
588 if (is_sec1) {
589
590 if (!(isr & (1 << (29 + (ch & 1) * 2 - (ch & 2) * 6))))
591 continue;
592 } else {
593 if (!(isr & (1 << (ch * 2 + 1))))
594 continue;
595 }
596
597 error = -EINVAL;
598
599 v_lo = in_be32(priv->chan[ch].reg + TALITOS_CCPSR_LO);
600
601 if (v_lo & TALITOS_CCPSR_LO_DOF) {
602 dev_err(dev, "double fetch fifo overflow error\n");
603 error = -EAGAIN;
604 reset_ch = 1;
605 }
606 if (v_lo & TALITOS_CCPSR_LO_SOF) {
607
608 dev_err(dev, "single fetch fifo overflow error\n");
609 error = -EAGAIN;
610 }
611 if (v_lo & TALITOS_CCPSR_LO_MDTE)
612 dev_err(dev, "master data transfer error\n");
613 if (v_lo & TALITOS_CCPSR_LO_SGDLZ)
614 dev_err(dev, is_sec1 ? "pointer not complete error\n"
615 : "s/g data length zero error\n");
616 if (v_lo & TALITOS_CCPSR_LO_FPZ)
617 dev_err(dev, is_sec1 ? "parity error\n"
618 : "fetch pointer zero error\n");
619 if (v_lo & TALITOS_CCPSR_LO_IDH)
620 dev_err(dev, "illegal descriptor header error\n");
621 if (v_lo & TALITOS_CCPSR_LO_IEU)
622 dev_err(dev, is_sec1 ? "static assignment error\n"
623 : "invalid exec unit error\n");
624 if (v_lo & TALITOS_CCPSR_LO_EU)
625 report_eu_error(dev, ch, current_desc_hdr(dev, ch));
626 if (!is_sec1) {
627 if (v_lo & TALITOS_CCPSR_LO_GB)
628 dev_err(dev, "gather boundary error\n");
629 if (v_lo & TALITOS_CCPSR_LO_GRL)
630 dev_err(dev, "gather return/length error\n");
631 if (v_lo & TALITOS_CCPSR_LO_SB)
632 dev_err(dev, "scatter boundary error\n");
633 if (v_lo & TALITOS_CCPSR_LO_SRL)
634 dev_err(dev, "scatter return/length error\n");
635 }
636
637 flush_channel(dev, ch, error, reset_ch);
638
639 if (reset_ch) {
640 reset_channel(dev, ch);
641 } else {
642 setbits32(priv->chan[ch].reg + TALITOS_CCCR,
643 TALITOS2_CCCR_CONT);
644 setbits32(priv->chan[ch].reg + TALITOS_CCCR_LO, 0);
645 while ((in_be32(priv->chan[ch].reg + TALITOS_CCCR) &
646 TALITOS2_CCCR_CONT) && --timeout)
647 cpu_relax();
648 if (timeout == 0) {
649 dev_err(dev, "failed to restart channel %d\n",
650 ch);
651 reset_dev = 1;
652 }
653 }
654 }
655 if (reset_dev || (is_sec1 && isr & ~TALITOS1_ISR_4CHERR) ||
656 (!is_sec1 && isr & ~TALITOS2_ISR_4CHERR) || isr_lo) {
657 if (is_sec1 && (isr_lo & TALITOS1_ISR_TEA_ERR))
658 dev_err(dev, "TEA error: ISR 0x%08x_%08x\n",
659 isr, isr_lo);
660 else
661 dev_err(dev, "done overflow, internal time out, or "
662 "rngu error: ISR 0x%08x_%08x\n", isr, isr_lo);
663
664
665 for (ch = 0; ch < priv->num_channels; ch++)
666 flush_channel(dev, ch, -EIO, 1);
667
668
669 init_device(dev);
670 }
671}
672
673#define DEF_TALITOS1_INTERRUPT(name, ch_done_mask, ch_err_mask, tlet) \
674static irqreturn_t talitos1_interrupt_##name(int irq, void *data) \
675{ \
676 struct device *dev = data; \
677 struct talitos_private *priv = dev_get_drvdata(dev); \
678 u32 isr, isr_lo; \
679 unsigned long flags; \
680 \
681 spin_lock_irqsave(&priv->reg_lock, flags); \
682 isr = in_be32(priv->reg + TALITOS_ISR); \
683 isr_lo = in_be32(priv->reg + TALITOS_ISR_LO); \
684 \
685 out_be32(priv->reg + TALITOS_ICR, isr & (ch_done_mask | ch_err_mask)); \
686 out_be32(priv->reg + TALITOS_ICR_LO, isr_lo); \
687 \
688 if (unlikely(isr & ch_err_mask || isr_lo & TALITOS1_IMR_LO_INIT)) { \
689 spin_unlock_irqrestore(&priv->reg_lock, flags); \
690 talitos_error(dev, isr & ch_err_mask, isr_lo); \
691 } \
692 else { \
693 if (likely(isr & ch_done_mask)) { \
694 \
695 setbits32(priv->reg + TALITOS_IMR, ch_done_mask); \
696 \
697 tasklet_schedule(&priv->done_task[tlet]); \
698 } \
699 spin_unlock_irqrestore(&priv->reg_lock, flags); \
700 } \
701 \
702 return (isr & (ch_done_mask | ch_err_mask) || isr_lo) ? IRQ_HANDLED : \
703 IRQ_NONE; \
704}
705
706DEF_TALITOS1_INTERRUPT(4ch, TALITOS1_ISR_4CHDONE, TALITOS1_ISR_4CHERR, 0)
707
708#define DEF_TALITOS2_INTERRUPT(name, ch_done_mask, ch_err_mask, tlet) \
709static irqreturn_t talitos2_interrupt_##name(int irq, void *data) \
710{ \
711 struct device *dev = data; \
712 struct talitos_private *priv = dev_get_drvdata(dev); \
713 u32 isr, isr_lo; \
714 unsigned long flags; \
715 \
716 spin_lock_irqsave(&priv->reg_lock, flags); \
717 isr = in_be32(priv->reg + TALITOS_ISR); \
718 isr_lo = in_be32(priv->reg + TALITOS_ISR_LO); \
719 \
720 out_be32(priv->reg + TALITOS_ICR, isr & (ch_done_mask | ch_err_mask)); \
721 out_be32(priv->reg + TALITOS_ICR_LO, isr_lo); \
722 \
723 if (unlikely(isr & ch_err_mask || isr_lo)) { \
724 spin_unlock_irqrestore(&priv->reg_lock, flags); \
725 talitos_error(dev, isr & ch_err_mask, isr_lo); \
726 } \
727 else { \
728 if (likely(isr & ch_done_mask)) { \
729 \
730 clrbits32(priv->reg + TALITOS_IMR, ch_done_mask); \
731 \
732 tasklet_schedule(&priv->done_task[tlet]); \
733 } \
734 spin_unlock_irqrestore(&priv->reg_lock, flags); \
735 } \
736 \
737 return (isr & (ch_done_mask | ch_err_mask) || isr_lo) ? IRQ_HANDLED : \
738 IRQ_NONE; \
739}
740
741DEF_TALITOS2_INTERRUPT(4ch, TALITOS2_ISR_4CHDONE, TALITOS2_ISR_4CHERR, 0)
742DEF_TALITOS2_INTERRUPT(ch0_2, TALITOS2_ISR_CH_0_2_DONE, TALITOS2_ISR_CH_0_2_ERR,
743 0)
744DEF_TALITOS2_INTERRUPT(ch1_3, TALITOS2_ISR_CH_1_3_DONE, TALITOS2_ISR_CH_1_3_ERR,
745 1)
746
747
748
749
750static int talitos_rng_data_present(struct hwrng *rng, int wait)
751{
752 struct device *dev = (struct device *)rng->priv;
753 struct talitos_private *priv = dev_get_drvdata(dev);
754 u32 ofl;
755 int i;
756
757 for (i = 0; i < 20; i++) {
758 ofl = in_be32(priv->reg_rngu + TALITOS_EUSR_LO) &
759 TALITOS_RNGUSR_LO_OFL;
760 if (ofl || !wait)
761 break;
762 udelay(10);
763 }
764
765 return !!ofl;
766}
767
768static int talitos_rng_data_read(struct hwrng *rng, u32 *data)
769{
770 struct device *dev = (struct device *)rng->priv;
771 struct talitos_private *priv = dev_get_drvdata(dev);
772
773
774 *data = in_be32(priv->reg_rngu + TALITOS_EU_FIFO);
775 *data = in_be32(priv->reg_rngu + TALITOS_EU_FIFO_LO);
776
777 return sizeof(u32);
778}
779
780static int talitos_rng_init(struct hwrng *rng)
781{
782 struct device *dev = (struct device *)rng->priv;
783 struct talitos_private *priv = dev_get_drvdata(dev);
784 unsigned int timeout = TALITOS_TIMEOUT;
785
786 setbits32(priv->reg_rngu + TALITOS_EURCR_LO, TALITOS_RNGURCR_LO_SR);
787 while (!(in_be32(priv->reg_rngu + TALITOS_EUSR_LO)
788 & TALITOS_RNGUSR_LO_RD)
789 && --timeout)
790 cpu_relax();
791 if (timeout == 0) {
792 dev_err(dev, "failed to reset rng hw\n");
793 return -ENODEV;
794 }
795
796
797 setbits32(priv->reg_rngu + TALITOS_EUDSR_LO, 0);
798
799 return 0;
800}
801
802static int talitos_register_rng(struct device *dev)
803{
804 struct talitos_private *priv = dev_get_drvdata(dev);
805 int err;
806
807 priv->rng.name = dev_driver_string(dev),
808 priv->rng.init = talitos_rng_init,
809 priv->rng.data_present = talitos_rng_data_present,
810 priv->rng.data_read = talitos_rng_data_read,
811 priv->rng.priv = (unsigned long)dev;
812
813 err = hwrng_register(&priv->rng);
814 if (!err)
815 priv->rng_registered = true;
816
817 return err;
818}
819
820static void talitos_unregister_rng(struct device *dev)
821{
822 struct talitos_private *priv = dev_get_drvdata(dev);
823
824 if (!priv->rng_registered)
825 return;
826
827 hwrng_unregister(&priv->rng);
828 priv->rng_registered = false;
829}
830
831
832
833
834#define TALITOS_CRA_PRIORITY 3000
835
836
837
838
839#define TALITOS_CRA_PRIORITY_AEAD_HSNA (TALITOS_CRA_PRIORITY - 1)
840#define TALITOS_MAX_KEY_SIZE (AES_MAX_KEY_SIZE + SHA512_BLOCK_SIZE)
841#define TALITOS_MAX_IV_LENGTH 16
842
843struct talitos_ctx {
844 struct device *dev;
845 int ch;
846 __be32 desc_hdr_template;
847 u8 key[TALITOS_MAX_KEY_SIZE];
848 u8 iv[TALITOS_MAX_IV_LENGTH];
849 dma_addr_t dma_key;
850 unsigned int keylen;
851 unsigned int enckeylen;
852 unsigned int authkeylen;
853};
854
855#define HASH_MAX_BLOCK_SIZE SHA512_BLOCK_SIZE
856#define TALITOS_MDEU_MAX_CONTEXT_SIZE TALITOS_MDEU_CONTEXT_SIZE_SHA384_SHA512
857
858struct talitos_ahash_req_ctx {
859 u32 hw_context[TALITOS_MDEU_MAX_CONTEXT_SIZE / sizeof(u32)];
860 unsigned int hw_context_size;
861 u8 buf[2][HASH_MAX_BLOCK_SIZE];
862 int buf_idx;
863 unsigned int swinit;
864 unsigned int first;
865 unsigned int last;
866 unsigned int to_hash_later;
867 unsigned int nbuf;
868 struct scatterlist bufsl[2];
869 struct scatterlist *psrc;
870};
871
872struct talitos_export_state {
873 u32 hw_context[TALITOS_MDEU_MAX_CONTEXT_SIZE / sizeof(u32)];
874 u8 buf[HASH_MAX_BLOCK_SIZE];
875 unsigned int swinit;
876 unsigned int first;
877 unsigned int last;
878 unsigned int to_hash_later;
879 unsigned int nbuf;
880};
881
882static int aead_setkey(struct crypto_aead *authenc,
883 const u8 *key, unsigned int keylen)
884{
885 struct talitos_ctx *ctx = crypto_aead_ctx(authenc);
886 struct device *dev = ctx->dev;
887 struct crypto_authenc_keys keys;
888
889 if (crypto_authenc_extractkeys(&keys, key, keylen) != 0)
890 goto badkey;
891
892 if (keys.authkeylen + keys.enckeylen > TALITOS_MAX_KEY_SIZE)
893 goto badkey;
894
895 if (ctx->keylen)
896 dma_unmap_single(dev, ctx->dma_key, ctx->keylen, DMA_TO_DEVICE);
897
898 memcpy(ctx->key, keys.authkey, keys.authkeylen);
899 memcpy(&ctx->key[keys.authkeylen], keys.enckey, keys.enckeylen);
900
901 ctx->keylen = keys.authkeylen + keys.enckeylen;
902 ctx->enckeylen = keys.enckeylen;
903 ctx->authkeylen = keys.authkeylen;
904 ctx->dma_key = dma_map_single(dev, ctx->key, ctx->keylen,
905 DMA_TO_DEVICE);
906
907 memzero_explicit(&keys, sizeof(keys));
908 return 0;
909
910badkey:
911 crypto_aead_set_flags(authenc, CRYPTO_TFM_RES_BAD_KEY_LEN);
912 memzero_explicit(&keys, sizeof(keys));
913 return -EINVAL;
914}
915
916static int aead_des3_setkey(struct crypto_aead *authenc,
917 const u8 *key, unsigned int keylen)
918{
919 struct talitos_ctx *ctx = crypto_aead_ctx(authenc);
920 struct device *dev = ctx->dev;
921 struct crypto_authenc_keys keys;
922 u32 flags;
923 int err;
924
925 err = crypto_authenc_extractkeys(&keys, key, keylen);
926 if (unlikely(err))
927 goto badkey;
928
929 err = -EINVAL;
930 if (keys.authkeylen + keys.enckeylen > TALITOS_MAX_KEY_SIZE)
931 goto badkey;
932
933 if (keys.enckeylen != DES3_EDE_KEY_SIZE)
934 goto badkey;
935
936 flags = crypto_aead_get_flags(authenc);
937 err = __des3_verify_key(&flags, keys.enckey);
938 if (unlikely(err)) {
939 crypto_aead_set_flags(authenc, flags);
940 goto out;
941 }
942
943 if (ctx->keylen)
944 dma_unmap_single(dev, ctx->dma_key, ctx->keylen, DMA_TO_DEVICE);
945
946 memcpy(ctx->key, keys.authkey, keys.authkeylen);
947 memcpy(&ctx->key[keys.authkeylen], keys.enckey, keys.enckeylen);
948
949 ctx->keylen = keys.authkeylen + keys.enckeylen;
950 ctx->enckeylen = keys.enckeylen;
951 ctx->authkeylen = keys.authkeylen;
952 ctx->dma_key = dma_map_single(dev, ctx->key, ctx->keylen,
953 DMA_TO_DEVICE);
954
955out:
956 memzero_explicit(&keys, sizeof(keys));
957 return err;
958
959badkey:
960 crypto_aead_set_flags(authenc, CRYPTO_TFM_RES_BAD_KEY_LEN);
961 goto out;
962}
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980struct talitos_edesc {
981 int src_nents;
982 int dst_nents;
983 bool icv_ool;
984 dma_addr_t iv_dma;
985 int dma_len;
986 dma_addr_t dma_link_tbl;
987 struct talitos_desc desc;
988 union {
989 struct talitos_ptr link_tbl[0];
990 u8 buf[0];
991 };
992};
993
994static void talitos_sg_unmap(struct device *dev,
995 struct talitos_edesc *edesc,
996 struct scatterlist *src,
997 struct scatterlist *dst,
998 unsigned int len, unsigned int offset)
999{
1000 struct talitos_private *priv = dev_get_drvdata(dev);
1001 bool is_sec1 = has_ftr_sec1(priv);
1002 unsigned int src_nents = edesc->src_nents ? : 1;
1003 unsigned int dst_nents = edesc->dst_nents ? : 1;
1004
1005 if (is_sec1 && dst && dst_nents > 1) {
1006 dma_sync_single_for_device(dev, edesc->dma_link_tbl + offset,
1007 len, DMA_FROM_DEVICE);
1008 sg_pcopy_from_buffer(dst, dst_nents, edesc->buf + offset, len,
1009 offset);
1010 }
1011 if (src != dst) {
1012 if (src_nents == 1 || !is_sec1)
1013 dma_unmap_sg(dev, src, src_nents, DMA_TO_DEVICE);
1014
1015 if (dst && (dst_nents == 1 || !is_sec1))
1016 dma_unmap_sg(dev, dst, dst_nents, DMA_FROM_DEVICE);
1017 } else if (src_nents == 1 || !is_sec1) {
1018 dma_unmap_sg(dev, src, src_nents, DMA_BIDIRECTIONAL);
1019 }
1020}
1021
1022static void ipsec_esp_unmap(struct device *dev,
1023 struct talitos_edesc *edesc,
1024 struct aead_request *areq)
1025{
1026 struct crypto_aead *aead = crypto_aead_reqtfm(areq);
1027 struct talitos_ctx *ctx = crypto_aead_ctx(aead);
1028 unsigned int ivsize = crypto_aead_ivsize(aead);
1029 bool is_ipsec_esp = edesc->desc.hdr & DESC_HDR_TYPE_IPSEC_ESP;
1030 struct talitos_ptr *civ_ptr = &edesc->desc.ptr[is_ipsec_esp ? 2 : 3];
1031
1032 if (is_ipsec_esp)
1033 unmap_single_talitos_ptr(dev, &edesc->desc.ptr[6],
1034 DMA_FROM_DEVICE);
1035 unmap_single_talitos_ptr(dev, civ_ptr, DMA_TO_DEVICE);
1036
1037 talitos_sg_unmap(dev, edesc, areq->src, areq->dst, areq->cryptlen,
1038 areq->assoclen);
1039
1040 if (edesc->dma_len)
1041 dma_unmap_single(dev, edesc->dma_link_tbl, edesc->dma_len,
1042 DMA_BIDIRECTIONAL);
1043
1044 if (!is_ipsec_esp) {
1045 unsigned int dst_nents = edesc->dst_nents ? : 1;
1046
1047 sg_pcopy_to_buffer(areq->dst, dst_nents, ctx->iv, ivsize,
1048 areq->assoclen + areq->cryptlen - ivsize);
1049 }
1050}
1051
1052
1053
1054
1055static void ipsec_esp_encrypt_done(struct device *dev,
1056 struct talitos_desc *desc, void *context,
1057 int err)
1058{
1059 struct talitos_private *priv = dev_get_drvdata(dev);
1060 bool is_sec1 = has_ftr_sec1(priv);
1061 struct aead_request *areq = context;
1062 struct crypto_aead *authenc = crypto_aead_reqtfm(areq);
1063 unsigned int authsize = crypto_aead_authsize(authenc);
1064 unsigned int ivsize = crypto_aead_ivsize(authenc);
1065 struct talitos_edesc *edesc;
1066 struct scatterlist *sg;
1067 void *icvdata;
1068
1069 edesc = container_of(desc, struct talitos_edesc, desc);
1070
1071 ipsec_esp_unmap(dev, edesc, areq);
1072
1073
1074 if (edesc->icv_ool) {
1075 if (is_sec1)
1076 icvdata = edesc->buf + areq->assoclen + areq->cryptlen;
1077 else
1078 icvdata = &edesc->link_tbl[edesc->src_nents +
1079 edesc->dst_nents + 2];
1080 sg = sg_last(areq->dst, edesc->dst_nents);
1081 memcpy((char *)sg_virt(sg) + sg->length - authsize,
1082 icvdata, authsize);
1083 }
1084
1085 dma_unmap_single(dev, edesc->iv_dma, ivsize, DMA_TO_DEVICE);
1086
1087 kfree(edesc);
1088
1089 aead_request_complete(areq, err);
1090}
1091
1092static void ipsec_esp_decrypt_swauth_done(struct device *dev,
1093 struct talitos_desc *desc,
1094 void *context, int err)
1095{
1096 struct aead_request *req = context;
1097 struct crypto_aead *authenc = crypto_aead_reqtfm(req);
1098 unsigned int authsize = crypto_aead_authsize(authenc);
1099 struct talitos_edesc *edesc;
1100 struct scatterlist *sg;
1101 char *oicv, *icv;
1102 struct talitos_private *priv = dev_get_drvdata(dev);
1103 bool is_sec1 = has_ftr_sec1(priv);
1104
1105 edesc = container_of(desc, struct talitos_edesc, desc);
1106
1107 ipsec_esp_unmap(dev, edesc, req);
1108
1109 if (!err) {
1110
1111 sg = sg_last(req->dst, edesc->dst_nents ? : 1);
1112 icv = (char *)sg_virt(sg) + sg->length - authsize;
1113
1114 if (edesc->dma_len) {
1115 if (is_sec1)
1116 oicv = (char *)&edesc->dma_link_tbl +
1117 req->assoclen + req->cryptlen;
1118 else
1119 oicv = (char *)
1120 &edesc->link_tbl[edesc->src_nents +
1121 edesc->dst_nents + 2];
1122 if (edesc->icv_ool)
1123 icv = oicv + authsize;
1124 } else
1125 oicv = (char *)&edesc->link_tbl[0];
1126
1127 err = crypto_memneq(oicv, icv, authsize) ? -EBADMSG : 0;
1128 }
1129
1130 kfree(edesc);
1131
1132 aead_request_complete(req, err);
1133}
1134
1135static void ipsec_esp_decrypt_hwauth_done(struct device *dev,
1136 struct talitos_desc *desc,
1137 void *context, int err)
1138{
1139 struct aead_request *req = context;
1140 struct talitos_edesc *edesc;
1141
1142 edesc = container_of(desc, struct talitos_edesc, desc);
1143
1144 ipsec_esp_unmap(dev, edesc, req);
1145
1146
1147 if (!err && ((desc->hdr_lo & DESC_HDR_LO_ICCR1_MASK) !=
1148 DESC_HDR_LO_ICCR1_PASS))
1149 err = -EBADMSG;
1150
1151 kfree(edesc);
1152
1153 aead_request_complete(req, err);
1154}
1155
1156
1157
1158
1159
1160static int sg_to_link_tbl_offset(struct scatterlist *sg, int sg_count,
1161 unsigned int offset, int cryptlen,
1162 struct talitos_ptr *link_tbl_ptr)
1163{
1164 int n_sg = sg_count;
1165 int count = 0;
1166
1167 while (cryptlen && sg && n_sg--) {
1168 unsigned int len = sg_dma_len(sg);
1169
1170 if (offset >= len) {
1171 offset -= len;
1172 goto next;
1173 }
1174
1175 len -= offset;
1176
1177 if (len > cryptlen)
1178 len = cryptlen;
1179
1180 to_talitos_ptr(link_tbl_ptr + count,
1181 sg_dma_address(sg) + offset, len, 0);
1182 to_talitos_ptr_ext_set(link_tbl_ptr + count, 0, 0);
1183 count++;
1184 cryptlen -= len;
1185 offset = 0;
1186
1187next:
1188 sg = sg_next(sg);
1189 }
1190
1191
1192 if (count > 0)
1193 to_talitos_ptr_ext_set(link_tbl_ptr + count - 1,
1194 DESC_PTR_LNKTBL_RETURN, 0);
1195
1196 return count;
1197}
1198
1199static int talitos_sg_map_ext(struct device *dev, struct scatterlist *src,
1200 unsigned int len, struct talitos_edesc *edesc,
1201 struct talitos_ptr *ptr, int sg_count,
1202 unsigned int offset, int tbl_off, int elen)
1203{
1204 struct talitos_private *priv = dev_get_drvdata(dev);
1205 bool is_sec1 = has_ftr_sec1(priv);
1206
1207 if (!src) {
1208 to_talitos_ptr(ptr, 0, 0, is_sec1);
1209 return 1;
1210 }
1211 to_talitos_ptr_ext_set(ptr, elen, is_sec1);
1212 if (sg_count == 1) {
1213 to_talitos_ptr(ptr, sg_dma_address(src) + offset, len, is_sec1);
1214 return sg_count;
1215 }
1216 if (is_sec1) {
1217 to_talitos_ptr(ptr, edesc->dma_link_tbl + offset, len, is_sec1);
1218 return sg_count;
1219 }
1220 sg_count = sg_to_link_tbl_offset(src, sg_count, offset, len + elen,
1221 &edesc->link_tbl[tbl_off]);
1222 if (sg_count == 1) {
1223
1224 copy_talitos_ptr(ptr, &edesc->link_tbl[tbl_off], is_sec1);
1225 return sg_count;
1226 }
1227 to_talitos_ptr(ptr, edesc->dma_link_tbl +
1228 tbl_off * sizeof(struct talitos_ptr), len, is_sec1);
1229 to_talitos_ptr_ext_or(ptr, DESC_PTR_LNKTBL_JUMP, is_sec1);
1230
1231 return sg_count;
1232}
1233
1234static int talitos_sg_map(struct device *dev, struct scatterlist *src,
1235 unsigned int len, struct talitos_edesc *edesc,
1236 struct talitos_ptr *ptr, int sg_count,
1237 unsigned int offset, int tbl_off)
1238{
1239 return talitos_sg_map_ext(dev, src, len, edesc, ptr, sg_count, offset,
1240 tbl_off, 0);
1241}
1242
1243
1244
1245
1246static int ipsec_esp(struct talitos_edesc *edesc, struct aead_request *areq,
1247 void (*callback)(struct device *dev,
1248 struct talitos_desc *desc,
1249 void *context, int error))
1250{
1251 struct crypto_aead *aead = crypto_aead_reqtfm(areq);
1252 unsigned int authsize = crypto_aead_authsize(aead);
1253 struct talitos_ctx *ctx = crypto_aead_ctx(aead);
1254 struct device *dev = ctx->dev;
1255 struct talitos_desc *desc = &edesc->desc;
1256 unsigned int cryptlen = areq->cryptlen;
1257 unsigned int ivsize = crypto_aead_ivsize(aead);
1258 int tbl_off = 0;
1259 int sg_count, ret;
1260 int elen = 0;
1261 bool sync_needed = false;
1262 struct talitos_private *priv = dev_get_drvdata(dev);
1263 bool is_sec1 = has_ftr_sec1(priv);
1264 bool is_ipsec_esp = desc->hdr & DESC_HDR_TYPE_IPSEC_ESP;
1265 struct talitos_ptr *civ_ptr = &desc->ptr[is_ipsec_esp ? 2 : 3];
1266 struct talitos_ptr *ckey_ptr = &desc->ptr[is_ipsec_esp ? 3 : 2];
1267
1268
1269 to_talitos_ptr(&desc->ptr[0], ctx->dma_key, ctx->authkeylen, is_sec1);
1270
1271 sg_count = edesc->src_nents ?: 1;
1272 if (is_sec1 && sg_count > 1)
1273 sg_copy_to_buffer(areq->src, sg_count, edesc->buf,
1274 areq->assoclen + cryptlen);
1275 else
1276 sg_count = dma_map_sg(dev, areq->src, sg_count,
1277 (areq->src == areq->dst) ?
1278 DMA_BIDIRECTIONAL : DMA_TO_DEVICE);
1279
1280
1281 ret = talitos_sg_map(dev, areq->src, areq->assoclen, edesc,
1282 &desc->ptr[1], sg_count, 0, tbl_off);
1283
1284 if (ret > 1) {
1285 tbl_off += ret;
1286 sync_needed = true;
1287 }
1288
1289
1290 to_talitos_ptr(civ_ptr, edesc->iv_dma, ivsize, is_sec1);
1291
1292
1293 to_talitos_ptr(ckey_ptr, ctx->dma_key + ctx->authkeylen,
1294 ctx->enckeylen, is_sec1);
1295
1296
1297
1298
1299
1300
1301
1302 if (is_ipsec_esp && (desc->hdr & DESC_HDR_MODE1_MDEU_CICV))
1303 elen = authsize;
1304
1305 ret = talitos_sg_map_ext(dev, areq->src, cryptlen, edesc, &desc->ptr[4],
1306 sg_count, areq->assoclen, tbl_off, elen);
1307
1308 if (ret > 1) {
1309 tbl_off += ret;
1310 sync_needed = true;
1311 }
1312
1313
1314 if (areq->src != areq->dst) {
1315 sg_count = edesc->dst_nents ? : 1;
1316 if (!is_sec1 || sg_count == 1)
1317 dma_map_sg(dev, areq->dst, sg_count, DMA_FROM_DEVICE);
1318 }
1319
1320 ret = talitos_sg_map(dev, areq->dst, cryptlen, edesc, &desc->ptr[5],
1321 sg_count, areq->assoclen, tbl_off);
1322
1323 if (is_ipsec_esp)
1324 to_talitos_ptr_ext_or(&desc->ptr[5], authsize, is_sec1);
1325
1326
1327 if (ret > 1) {
1328 tbl_off += ret;
1329 edesc->icv_ool = true;
1330 sync_needed = true;
1331
1332 if (is_ipsec_esp) {
1333 struct talitos_ptr *tbl_ptr = &edesc->link_tbl[tbl_off];
1334 int offset = (edesc->src_nents + edesc->dst_nents + 2) *
1335 sizeof(struct talitos_ptr) + authsize;
1336
1337
1338 to_talitos_ptr_ext_set(tbl_ptr - 1, 0, is_sec1);
1339 to_talitos_ptr_ext_set(tbl_ptr, DESC_PTR_LNKTBL_RETURN,
1340 is_sec1);
1341
1342
1343 to_talitos_ptr(tbl_ptr, edesc->dma_link_tbl + offset,
1344 authsize, is_sec1);
1345 } else {
1346 dma_addr_t addr = edesc->dma_link_tbl;
1347
1348 if (is_sec1)
1349 addr += areq->assoclen + cryptlen;
1350 else
1351 addr += sizeof(struct talitos_ptr) * tbl_off;
1352
1353 to_talitos_ptr(&desc->ptr[6], addr, authsize, is_sec1);
1354 }
1355 } else if (!is_ipsec_esp) {
1356 ret = talitos_sg_map(dev, areq->dst, authsize, edesc,
1357 &desc->ptr[6], sg_count, areq->assoclen +
1358 cryptlen,
1359 tbl_off);
1360 if (ret > 1) {
1361 tbl_off += ret;
1362 edesc->icv_ool = true;
1363 sync_needed = true;
1364 } else {
1365 edesc->icv_ool = false;
1366 }
1367 } else {
1368 edesc->icv_ool = false;
1369 }
1370
1371
1372 if (is_ipsec_esp)
1373 map_single_talitos_ptr(dev, &desc->ptr[6], ivsize, ctx->iv,
1374 DMA_FROM_DEVICE);
1375
1376 if (sync_needed)
1377 dma_sync_single_for_device(dev, edesc->dma_link_tbl,
1378 edesc->dma_len,
1379 DMA_BIDIRECTIONAL);
1380
1381 ret = talitos_submit(dev, ctx->ch, desc, callback, areq);
1382 if (ret != -EINPROGRESS) {
1383 ipsec_esp_unmap(dev, edesc, areq);
1384 kfree(edesc);
1385 }
1386 return ret;
1387}
1388
1389
1390
1391
1392static struct talitos_edesc *talitos_edesc_alloc(struct device *dev,
1393 struct scatterlist *src,
1394 struct scatterlist *dst,
1395 u8 *iv,
1396 unsigned int assoclen,
1397 unsigned int cryptlen,
1398 unsigned int authsize,
1399 unsigned int ivsize,
1400 int icv_stashing,
1401 u32 cryptoflags,
1402 bool encrypt)
1403{
1404 struct talitos_edesc *edesc;
1405 int src_nents, dst_nents, alloc_len, dma_len, src_len, dst_len;
1406 dma_addr_t iv_dma = 0;
1407 gfp_t flags = cryptoflags & CRYPTO_TFM_REQ_MAY_SLEEP ? GFP_KERNEL :
1408 GFP_ATOMIC;
1409 struct talitos_private *priv = dev_get_drvdata(dev);
1410 bool is_sec1 = has_ftr_sec1(priv);
1411 int max_len = is_sec1 ? TALITOS1_MAX_DATA_LEN : TALITOS2_MAX_DATA_LEN;
1412 void *err;
1413
1414 if (cryptlen + authsize > max_len) {
1415 dev_err(dev, "length exceeds h/w max limit\n");
1416 return ERR_PTR(-EINVAL);
1417 }
1418
1419 if (ivsize)
1420 iv_dma = dma_map_single(dev, iv, ivsize, DMA_TO_DEVICE);
1421
1422 if (!dst || dst == src) {
1423 src_len = assoclen + cryptlen + authsize;
1424 src_nents = sg_nents_for_len(src, src_len);
1425 if (src_nents < 0) {
1426 dev_err(dev, "Invalid number of src SG.\n");
1427 err = ERR_PTR(-EINVAL);
1428 goto error_sg;
1429 }
1430 src_nents = (src_nents == 1) ? 0 : src_nents;
1431 dst_nents = dst ? src_nents : 0;
1432 dst_len = 0;
1433 } else {
1434 src_len = assoclen + cryptlen + (encrypt ? 0 : authsize);
1435 src_nents = sg_nents_for_len(src, src_len);
1436 if (src_nents < 0) {
1437 dev_err(dev, "Invalid number of src SG.\n");
1438 err = ERR_PTR(-EINVAL);
1439 goto error_sg;
1440 }
1441 src_nents = (src_nents == 1) ? 0 : src_nents;
1442 dst_len = assoclen + cryptlen + (encrypt ? authsize : 0);
1443 dst_nents = sg_nents_for_len(dst, dst_len);
1444 if (dst_nents < 0) {
1445 dev_err(dev, "Invalid number of dst SG.\n");
1446 err = ERR_PTR(-EINVAL);
1447 goto error_sg;
1448 }
1449 dst_nents = (dst_nents == 1) ? 0 : dst_nents;
1450 }
1451
1452
1453
1454
1455
1456
1457 alloc_len = sizeof(struct talitos_edesc);
1458 if (src_nents || dst_nents) {
1459 if (is_sec1)
1460 dma_len = (src_nents ? src_len : 0) +
1461 (dst_nents ? dst_len : 0);
1462 else
1463 dma_len = (src_nents + dst_nents + 2) *
1464 sizeof(struct talitos_ptr) + authsize * 2;
1465 alloc_len += dma_len;
1466 } else {
1467 dma_len = 0;
1468 alloc_len += icv_stashing ? authsize : 0;
1469 }
1470
1471
1472 if (is_sec1 && !dst)
1473 alloc_len += sizeof(struct talitos_desc);
1474
1475 edesc = kmalloc(alloc_len, GFP_DMA | flags);
1476 if (!edesc) {
1477 err = ERR_PTR(-ENOMEM);
1478 goto error_sg;
1479 }
1480 memset(&edesc->desc, 0, sizeof(edesc->desc));
1481
1482 edesc->src_nents = src_nents;
1483 edesc->dst_nents = dst_nents;
1484 edesc->iv_dma = iv_dma;
1485 edesc->dma_len = dma_len;
1486 if (dma_len) {
1487 void *addr = &edesc->link_tbl[0];
1488
1489 if (is_sec1 && !dst)
1490 addr += sizeof(struct talitos_desc);
1491 edesc->dma_link_tbl = dma_map_single(dev, addr,
1492 edesc->dma_len,
1493 DMA_BIDIRECTIONAL);
1494 }
1495 return edesc;
1496error_sg:
1497 if (iv_dma)
1498 dma_unmap_single(dev, iv_dma, ivsize, DMA_TO_DEVICE);
1499 return err;
1500}
1501
1502static struct talitos_edesc *aead_edesc_alloc(struct aead_request *areq, u8 *iv,
1503 int icv_stashing, bool encrypt)
1504{
1505 struct crypto_aead *authenc = crypto_aead_reqtfm(areq);
1506 unsigned int authsize = crypto_aead_authsize(authenc);
1507 struct talitos_ctx *ctx = crypto_aead_ctx(authenc);
1508 unsigned int ivsize = crypto_aead_ivsize(authenc);
1509
1510 return talitos_edesc_alloc(ctx->dev, areq->src, areq->dst,
1511 iv, areq->assoclen, areq->cryptlen,
1512 authsize, ivsize, icv_stashing,
1513 areq->base.flags, encrypt);
1514}
1515
1516static int aead_encrypt(struct aead_request *req)
1517{
1518 struct crypto_aead *authenc = crypto_aead_reqtfm(req);
1519 struct talitos_ctx *ctx = crypto_aead_ctx(authenc);
1520 struct talitos_edesc *edesc;
1521
1522
1523 edesc = aead_edesc_alloc(req, req->iv, 0, true);
1524 if (IS_ERR(edesc))
1525 return PTR_ERR(edesc);
1526
1527
1528 edesc->desc.hdr = ctx->desc_hdr_template | DESC_HDR_MODE0_ENCRYPT;
1529
1530 return ipsec_esp(edesc, req, ipsec_esp_encrypt_done);
1531}
1532
1533static int aead_decrypt(struct aead_request *req)
1534{
1535 struct crypto_aead *authenc = crypto_aead_reqtfm(req);
1536 unsigned int authsize = crypto_aead_authsize(authenc);
1537 struct talitos_ctx *ctx = crypto_aead_ctx(authenc);
1538 struct talitos_private *priv = dev_get_drvdata(ctx->dev);
1539 struct talitos_edesc *edesc;
1540 struct scatterlist *sg;
1541 void *icvdata;
1542
1543 req->cryptlen -= authsize;
1544
1545
1546 edesc = aead_edesc_alloc(req, req->iv, 1, false);
1547 if (IS_ERR(edesc))
1548 return PTR_ERR(edesc);
1549
1550 if ((priv->features & TALITOS_FTR_HW_AUTH_CHECK) &&
1551 ((!edesc->src_nents && !edesc->dst_nents) ||
1552 priv->features & TALITOS_FTR_SRC_LINK_TBL_LEN_INCLUDES_EXTENT)) {
1553
1554
1555 edesc->desc.hdr = ctx->desc_hdr_template |
1556 DESC_HDR_DIR_INBOUND |
1557 DESC_HDR_MODE1_MDEU_CICV;
1558
1559
1560
1561 return ipsec_esp(edesc, req, ipsec_esp_decrypt_hwauth_done);
1562 }
1563
1564
1565 edesc->desc.hdr = ctx->desc_hdr_template | DESC_HDR_DIR_INBOUND;
1566
1567
1568 if (edesc->dma_len)
1569 icvdata = (char *)&edesc->link_tbl[edesc->src_nents +
1570 edesc->dst_nents + 2];
1571 else
1572 icvdata = &edesc->link_tbl[0];
1573
1574 sg = sg_last(req->src, edesc->src_nents ? : 1);
1575
1576 memcpy(icvdata, (char *)sg_virt(sg) + sg->length - authsize, authsize);
1577
1578 return ipsec_esp(edesc, req, ipsec_esp_decrypt_swauth_done);
1579}
1580
1581static int ablkcipher_setkey(struct crypto_ablkcipher *cipher,
1582 const u8 *key, unsigned int keylen)
1583{
1584 struct talitos_ctx *ctx = crypto_ablkcipher_ctx(cipher);
1585 struct device *dev = ctx->dev;
1586
1587 if (ctx->keylen)
1588 dma_unmap_single(dev, ctx->dma_key, ctx->keylen, DMA_TO_DEVICE);
1589
1590 memcpy(&ctx->key, key, keylen);
1591 ctx->keylen = keylen;
1592
1593 ctx->dma_key = dma_map_single(dev, ctx->key, keylen, DMA_TO_DEVICE);
1594
1595 return 0;
1596}
1597
1598static int ablkcipher_des_setkey(struct crypto_ablkcipher *cipher,
1599 const u8 *key, unsigned int keylen)
1600{
1601 u32 tmp[DES_EXPKEY_WORDS];
1602
1603 if (unlikely(crypto_ablkcipher_get_flags(cipher) &
1604 CRYPTO_TFM_REQ_WEAK_KEY) &&
1605 !des_ekey(tmp, key)) {
1606 crypto_ablkcipher_set_flags(cipher, CRYPTO_TFM_RES_WEAK_KEY);
1607 return -EINVAL;
1608 }
1609
1610 return ablkcipher_setkey(cipher, key, keylen);
1611}
1612
1613static int ablkcipher_des3_setkey(struct crypto_ablkcipher *cipher,
1614 const u8 *key, unsigned int keylen)
1615{
1616 u32 flags;
1617 int err;
1618
1619 flags = crypto_ablkcipher_get_flags(cipher);
1620 err = __des3_verify_key(&flags, key);
1621 if (unlikely(err)) {
1622 crypto_ablkcipher_set_flags(cipher, flags);
1623 return err;
1624 }
1625
1626 return ablkcipher_setkey(cipher, key, keylen);
1627}
1628
1629static void common_nonsnoop_unmap(struct device *dev,
1630 struct talitos_edesc *edesc,
1631 struct ablkcipher_request *areq)
1632{
1633 unmap_single_talitos_ptr(dev, &edesc->desc.ptr[5], DMA_FROM_DEVICE);
1634
1635 talitos_sg_unmap(dev, edesc, areq->src, areq->dst, areq->nbytes, 0);
1636 unmap_single_talitos_ptr(dev, &edesc->desc.ptr[1], DMA_TO_DEVICE);
1637
1638 if (edesc->dma_len)
1639 dma_unmap_single(dev, edesc->dma_link_tbl, edesc->dma_len,
1640 DMA_BIDIRECTIONAL);
1641}
1642
1643static void ablkcipher_done(struct device *dev,
1644 struct talitos_desc *desc, void *context,
1645 int err)
1646{
1647 struct ablkcipher_request *areq = context;
1648 struct talitos_edesc *edesc;
1649
1650 edesc = container_of(desc, struct talitos_edesc, desc);
1651
1652 common_nonsnoop_unmap(dev, edesc, areq);
1653
1654 kfree(edesc);
1655
1656 areq->base.complete(&areq->base, err);
1657}
1658
1659static int common_nonsnoop(struct talitos_edesc *edesc,
1660 struct ablkcipher_request *areq,
1661 void (*callback) (struct device *dev,
1662 struct talitos_desc *desc,
1663 void *context, int error))
1664{
1665 struct crypto_ablkcipher *cipher = crypto_ablkcipher_reqtfm(areq);
1666 struct talitos_ctx *ctx = crypto_ablkcipher_ctx(cipher);
1667 struct device *dev = ctx->dev;
1668 struct talitos_desc *desc = &edesc->desc;
1669 unsigned int cryptlen = areq->nbytes;
1670 unsigned int ivsize = crypto_ablkcipher_ivsize(cipher);
1671 int sg_count, ret;
1672 bool sync_needed = false;
1673 struct talitos_private *priv = dev_get_drvdata(dev);
1674 bool is_sec1 = has_ftr_sec1(priv);
1675
1676
1677
1678
1679 to_talitos_ptr(&desc->ptr[1], edesc->iv_dma, ivsize, is_sec1);
1680
1681
1682 to_talitos_ptr(&desc->ptr[2], ctx->dma_key, ctx->keylen, is_sec1);
1683
1684 sg_count = edesc->src_nents ?: 1;
1685 if (is_sec1 && sg_count > 1)
1686 sg_copy_to_buffer(areq->src, sg_count, edesc->buf,
1687 cryptlen);
1688 else
1689 sg_count = dma_map_sg(dev, areq->src, sg_count,
1690 (areq->src == areq->dst) ?
1691 DMA_BIDIRECTIONAL : DMA_TO_DEVICE);
1692
1693
1694
1695 sg_count = talitos_sg_map(dev, areq->src, cryptlen, edesc,
1696 &desc->ptr[3], sg_count, 0, 0);
1697 if (sg_count > 1)
1698 sync_needed = true;
1699
1700
1701 if (areq->src != areq->dst) {
1702 sg_count = edesc->dst_nents ? : 1;
1703 if (!is_sec1 || sg_count == 1)
1704 dma_map_sg(dev, areq->dst, sg_count, DMA_FROM_DEVICE);
1705 }
1706
1707 ret = talitos_sg_map(dev, areq->dst, cryptlen, edesc, &desc->ptr[4],
1708 sg_count, 0, (edesc->src_nents + 1));
1709 if (ret > 1)
1710 sync_needed = true;
1711
1712
1713 map_single_talitos_ptr(dev, &desc->ptr[5], ivsize, ctx->iv,
1714 DMA_FROM_DEVICE);
1715
1716
1717
1718 if (sync_needed)
1719 dma_sync_single_for_device(dev, edesc->dma_link_tbl,
1720 edesc->dma_len, DMA_BIDIRECTIONAL);
1721
1722 ret = talitos_submit(dev, ctx->ch, desc, callback, areq);
1723 if (ret != -EINPROGRESS) {
1724 common_nonsnoop_unmap(dev, edesc, areq);
1725 kfree(edesc);
1726 }
1727 return ret;
1728}
1729
1730static struct talitos_edesc *ablkcipher_edesc_alloc(struct ablkcipher_request *
1731 areq, bool encrypt)
1732{
1733 struct crypto_ablkcipher *cipher = crypto_ablkcipher_reqtfm(areq);
1734 struct talitos_ctx *ctx = crypto_ablkcipher_ctx(cipher);
1735 unsigned int ivsize = crypto_ablkcipher_ivsize(cipher);
1736
1737 return talitos_edesc_alloc(ctx->dev, areq->src, areq->dst,
1738 areq->info, 0, areq->nbytes, 0, ivsize, 0,
1739 areq->base.flags, encrypt);
1740}
1741
1742static int ablkcipher_encrypt(struct ablkcipher_request *areq)
1743{
1744 struct crypto_ablkcipher *cipher = crypto_ablkcipher_reqtfm(areq);
1745 struct talitos_ctx *ctx = crypto_ablkcipher_ctx(cipher);
1746 struct talitos_edesc *edesc;
1747
1748
1749 edesc = ablkcipher_edesc_alloc(areq, true);
1750 if (IS_ERR(edesc))
1751 return PTR_ERR(edesc);
1752
1753
1754 edesc->desc.hdr = ctx->desc_hdr_template | DESC_HDR_MODE0_ENCRYPT;
1755
1756 return common_nonsnoop(edesc, areq, ablkcipher_done);
1757}
1758
1759static int ablkcipher_decrypt(struct ablkcipher_request *areq)
1760{
1761 struct crypto_ablkcipher *cipher = crypto_ablkcipher_reqtfm(areq);
1762 struct talitos_ctx *ctx = crypto_ablkcipher_ctx(cipher);
1763 struct talitos_edesc *edesc;
1764
1765
1766 edesc = ablkcipher_edesc_alloc(areq, false);
1767 if (IS_ERR(edesc))
1768 return PTR_ERR(edesc);
1769
1770 edesc->desc.hdr = ctx->desc_hdr_template | DESC_HDR_DIR_INBOUND;
1771
1772 return common_nonsnoop(edesc, areq, ablkcipher_done);
1773}
1774
1775static void common_nonsnoop_hash_unmap(struct device *dev,
1776 struct talitos_edesc *edesc,
1777 struct ahash_request *areq)
1778{
1779 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
1780 struct talitos_private *priv = dev_get_drvdata(dev);
1781 bool is_sec1 = has_ftr_sec1(priv);
1782 struct talitos_desc *desc = &edesc->desc;
1783 struct talitos_desc *desc2 = desc + 1;
1784
1785 unmap_single_talitos_ptr(dev, &edesc->desc.ptr[5], DMA_FROM_DEVICE);
1786 if (desc->next_desc &&
1787 desc->ptr[5].ptr != desc2->ptr[5].ptr)
1788 unmap_single_talitos_ptr(dev, &desc2->ptr[5], DMA_FROM_DEVICE);
1789
1790 talitos_sg_unmap(dev, edesc, req_ctx->psrc, NULL, 0, 0);
1791
1792
1793 if (from_talitos_ptr_len(&edesc->desc.ptr[1], is_sec1))
1794 unmap_single_talitos_ptr(dev, &edesc->desc.ptr[1],
1795 DMA_TO_DEVICE);
1796 else if (desc->next_desc)
1797 unmap_single_talitos_ptr(dev, &desc2->ptr[1],
1798 DMA_TO_DEVICE);
1799
1800 if (is_sec1 && req_ctx->nbuf)
1801 unmap_single_talitos_ptr(dev, &desc->ptr[3],
1802 DMA_TO_DEVICE);
1803
1804 if (edesc->dma_len)
1805 dma_unmap_single(dev, edesc->dma_link_tbl, edesc->dma_len,
1806 DMA_BIDIRECTIONAL);
1807
1808 if (edesc->desc.next_desc)
1809 dma_unmap_single(dev, be32_to_cpu(edesc->desc.next_desc),
1810 TALITOS_DESC_SIZE, DMA_BIDIRECTIONAL);
1811}
1812
1813static void ahash_done(struct device *dev,
1814 struct talitos_desc *desc, void *context,
1815 int err)
1816{
1817 struct ahash_request *areq = context;
1818 struct talitos_edesc *edesc =
1819 container_of(desc, struct talitos_edesc, desc);
1820 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
1821
1822 if (!req_ctx->last && req_ctx->to_hash_later) {
1823
1824 req_ctx->buf_idx = (req_ctx->buf_idx + 1) & 1;
1825 req_ctx->nbuf = req_ctx->to_hash_later;
1826 }
1827 common_nonsnoop_hash_unmap(dev, edesc, areq);
1828
1829 kfree(edesc);
1830
1831 areq->base.complete(&areq->base, err);
1832}
1833
1834
1835
1836
1837
1838static void talitos_handle_buggy_hash(struct talitos_ctx *ctx,
1839 struct talitos_edesc *edesc,
1840 struct talitos_ptr *ptr)
1841{
1842 static u8 padded_hash[64] = {
1843 0x80, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
1844 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
1845 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
1846 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
1847 };
1848
1849 pr_err_once("Bug in SEC1, padding ourself\n");
1850 edesc->desc.hdr &= ~DESC_HDR_MODE0_MDEU_PAD;
1851 map_single_talitos_ptr(ctx->dev, ptr, sizeof(padded_hash),
1852 (char *)padded_hash, DMA_TO_DEVICE);
1853}
1854
1855static int common_nonsnoop_hash(struct talitos_edesc *edesc,
1856 struct ahash_request *areq, unsigned int length,
1857 unsigned int offset,
1858 void (*callback) (struct device *dev,
1859 struct talitos_desc *desc,
1860 void *context, int error))
1861{
1862 struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
1863 struct talitos_ctx *ctx = crypto_ahash_ctx(tfm);
1864 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
1865 struct device *dev = ctx->dev;
1866 struct talitos_desc *desc = &edesc->desc;
1867 int ret;
1868 bool sync_needed = false;
1869 struct talitos_private *priv = dev_get_drvdata(dev);
1870 bool is_sec1 = has_ftr_sec1(priv);
1871 int sg_count;
1872
1873
1874
1875
1876 if (!req_ctx->first || req_ctx->swinit) {
1877 map_single_talitos_ptr_nosync(dev, &desc->ptr[1],
1878 req_ctx->hw_context_size,
1879 req_ctx->hw_context,
1880 DMA_TO_DEVICE);
1881 req_ctx->swinit = 0;
1882 }
1883
1884 req_ctx->first = 0;
1885
1886
1887 if (ctx->keylen)
1888 to_talitos_ptr(&desc->ptr[2], ctx->dma_key, ctx->keylen,
1889 is_sec1);
1890
1891 if (is_sec1 && req_ctx->nbuf)
1892 length -= req_ctx->nbuf;
1893
1894 sg_count = edesc->src_nents ?: 1;
1895 if (is_sec1 && sg_count > 1)
1896 sg_pcopy_to_buffer(req_ctx->psrc, sg_count,
1897 edesc->buf + sizeof(struct talitos_desc),
1898 length, req_ctx->nbuf);
1899 else if (length)
1900 sg_count = dma_map_sg(dev, req_ctx->psrc, sg_count,
1901 DMA_TO_DEVICE);
1902
1903
1904
1905 if (is_sec1 && req_ctx->nbuf) {
1906 map_single_talitos_ptr(dev, &desc->ptr[3], req_ctx->nbuf,
1907 req_ctx->buf[req_ctx->buf_idx],
1908 DMA_TO_DEVICE);
1909 } else {
1910 sg_count = talitos_sg_map(dev, req_ctx->psrc, length, edesc,
1911 &desc->ptr[3], sg_count, offset, 0);
1912 if (sg_count > 1)
1913 sync_needed = true;
1914 }
1915
1916
1917
1918
1919 if (req_ctx->last)
1920 map_single_talitos_ptr(dev, &desc->ptr[5],
1921 crypto_ahash_digestsize(tfm),
1922 areq->result, DMA_FROM_DEVICE);
1923 else
1924 map_single_talitos_ptr_nosync(dev, &desc->ptr[5],
1925 req_ctx->hw_context_size,
1926 req_ctx->hw_context,
1927 DMA_FROM_DEVICE);
1928
1929
1930
1931 if (is_sec1 && from_talitos_ptr_len(&desc->ptr[3], true) == 0)
1932 talitos_handle_buggy_hash(ctx, edesc, &desc->ptr[3]);
1933
1934 if (is_sec1 && req_ctx->nbuf && length) {
1935 struct talitos_desc *desc2 = desc + 1;
1936 dma_addr_t next_desc;
1937
1938 memset(desc2, 0, sizeof(*desc2));
1939 desc2->hdr = desc->hdr;
1940 desc2->hdr &= ~DESC_HDR_MODE0_MDEU_INIT;
1941 desc2->hdr1 = desc2->hdr;
1942 desc->hdr &= ~DESC_HDR_MODE0_MDEU_PAD;
1943 desc->hdr |= DESC_HDR_MODE0_MDEU_CONT;
1944 desc->hdr &= ~DESC_HDR_DONE_NOTIFY;
1945
1946 if (desc->ptr[1].ptr)
1947 copy_talitos_ptr(&desc2->ptr[1], &desc->ptr[1],
1948 is_sec1);
1949 else
1950 map_single_talitos_ptr_nosync(dev, &desc2->ptr[1],
1951 req_ctx->hw_context_size,
1952 req_ctx->hw_context,
1953 DMA_TO_DEVICE);
1954 copy_talitos_ptr(&desc2->ptr[2], &desc->ptr[2], is_sec1);
1955 sg_count = talitos_sg_map(dev, req_ctx->psrc, length, edesc,
1956 &desc2->ptr[3], sg_count, offset, 0);
1957 if (sg_count > 1)
1958 sync_needed = true;
1959 copy_talitos_ptr(&desc2->ptr[5], &desc->ptr[5], is_sec1);
1960 if (req_ctx->last)
1961 map_single_talitos_ptr_nosync(dev, &desc->ptr[5],
1962 req_ctx->hw_context_size,
1963 req_ctx->hw_context,
1964 DMA_FROM_DEVICE);
1965
1966 next_desc = dma_map_single(dev, &desc2->hdr1, TALITOS_DESC_SIZE,
1967 DMA_BIDIRECTIONAL);
1968 desc->next_desc = cpu_to_be32(next_desc);
1969 }
1970
1971 if (sync_needed)
1972 dma_sync_single_for_device(dev, edesc->dma_link_tbl,
1973 edesc->dma_len, DMA_BIDIRECTIONAL);
1974
1975 ret = talitos_submit(dev, ctx->ch, desc, callback, areq);
1976 if (ret != -EINPROGRESS) {
1977 common_nonsnoop_hash_unmap(dev, edesc, areq);
1978 kfree(edesc);
1979 }
1980 return ret;
1981}
1982
1983static struct talitos_edesc *ahash_edesc_alloc(struct ahash_request *areq,
1984 unsigned int nbytes)
1985{
1986 struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
1987 struct talitos_ctx *ctx = crypto_ahash_ctx(tfm);
1988 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
1989 struct talitos_private *priv = dev_get_drvdata(ctx->dev);
1990 bool is_sec1 = has_ftr_sec1(priv);
1991
1992 if (is_sec1)
1993 nbytes -= req_ctx->nbuf;
1994
1995 return talitos_edesc_alloc(ctx->dev, req_ctx->psrc, NULL, NULL, 0,
1996 nbytes, 0, 0, 0, areq->base.flags, false);
1997}
1998
1999static int ahash_init(struct ahash_request *areq)
2000{
2001 struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2002 struct talitos_ctx *ctx = crypto_ahash_ctx(tfm);
2003 struct device *dev = ctx->dev;
2004 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
2005 unsigned int size;
2006 dma_addr_t dma;
2007
2008
2009 req_ctx->buf_idx = 0;
2010 req_ctx->nbuf = 0;
2011 req_ctx->first = 1;
2012 req_ctx->swinit = 0;
2013 size = (crypto_ahash_digestsize(tfm) <= SHA256_DIGEST_SIZE)
2014 ? TALITOS_MDEU_CONTEXT_SIZE_MD5_SHA1_SHA256
2015 : TALITOS_MDEU_CONTEXT_SIZE_SHA384_SHA512;
2016 req_ctx->hw_context_size = size;
2017
2018 dma = dma_map_single(dev, req_ctx->hw_context, req_ctx->hw_context_size,
2019 DMA_TO_DEVICE);
2020 dma_unmap_single(dev, dma, req_ctx->hw_context_size, DMA_TO_DEVICE);
2021
2022 return 0;
2023}
2024
2025
2026
2027
2028
2029static int ahash_init_sha224_swinit(struct ahash_request *areq)
2030{
2031 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
2032
2033 req_ctx->hw_context[0] = SHA224_H0;
2034 req_ctx->hw_context[1] = SHA224_H1;
2035 req_ctx->hw_context[2] = SHA224_H2;
2036 req_ctx->hw_context[3] = SHA224_H3;
2037 req_ctx->hw_context[4] = SHA224_H4;
2038 req_ctx->hw_context[5] = SHA224_H5;
2039 req_ctx->hw_context[6] = SHA224_H6;
2040 req_ctx->hw_context[7] = SHA224_H7;
2041
2042
2043 req_ctx->hw_context[8] = 0;
2044 req_ctx->hw_context[9] = 0;
2045
2046 ahash_init(areq);
2047 req_ctx->swinit = 1;
2048
2049 return 0;
2050}
2051
2052static int ahash_process_req(struct ahash_request *areq, unsigned int nbytes)
2053{
2054 struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2055 struct talitos_ctx *ctx = crypto_ahash_ctx(tfm);
2056 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
2057 struct talitos_edesc *edesc;
2058 unsigned int blocksize =
2059 crypto_tfm_alg_blocksize(crypto_ahash_tfm(tfm));
2060 unsigned int nbytes_to_hash;
2061 unsigned int to_hash_later;
2062 unsigned int nsg;
2063 int nents;
2064 struct device *dev = ctx->dev;
2065 struct talitos_private *priv = dev_get_drvdata(dev);
2066 bool is_sec1 = has_ftr_sec1(priv);
2067 int offset = 0;
2068 u8 *ctx_buf = req_ctx->buf[req_ctx->buf_idx];
2069
2070 if (!req_ctx->last && (nbytes + req_ctx->nbuf <= blocksize)) {
2071
2072 nents = sg_nents_for_len(areq->src, nbytes);
2073 if (nents < 0) {
2074 dev_err(ctx->dev, "Invalid number of src SG.\n");
2075 return nents;
2076 }
2077 sg_copy_to_buffer(areq->src, nents,
2078 ctx_buf + req_ctx->nbuf, nbytes);
2079 req_ctx->nbuf += nbytes;
2080 return 0;
2081 }
2082
2083
2084 nbytes_to_hash = nbytes + req_ctx->nbuf;
2085 to_hash_later = nbytes_to_hash & (blocksize - 1);
2086
2087 if (req_ctx->last)
2088 to_hash_later = 0;
2089 else if (to_hash_later)
2090
2091 nbytes_to_hash -= to_hash_later;
2092 else {
2093
2094 nbytes_to_hash -= blocksize;
2095 to_hash_later = blocksize;
2096 }
2097
2098
2099 if (!is_sec1 && req_ctx->nbuf) {
2100 nsg = (req_ctx->nbuf < nbytes_to_hash) ? 2 : 1;
2101 sg_init_table(req_ctx->bufsl, nsg);
2102 sg_set_buf(req_ctx->bufsl, ctx_buf, req_ctx->nbuf);
2103 if (nsg > 1)
2104 sg_chain(req_ctx->bufsl, 2, areq->src);
2105 req_ctx->psrc = req_ctx->bufsl;
2106 } else if (is_sec1 && req_ctx->nbuf && req_ctx->nbuf < blocksize) {
2107 if (nbytes_to_hash > blocksize)
2108 offset = blocksize - req_ctx->nbuf;
2109 else
2110 offset = nbytes_to_hash - req_ctx->nbuf;
2111 nents = sg_nents_for_len(areq->src, offset);
2112 if (nents < 0) {
2113 dev_err(ctx->dev, "Invalid number of src SG.\n");
2114 return nents;
2115 }
2116 sg_copy_to_buffer(areq->src, nents,
2117 ctx_buf + req_ctx->nbuf, offset);
2118 req_ctx->nbuf += offset;
2119 req_ctx->psrc = areq->src;
2120 } else
2121 req_ctx->psrc = areq->src;
2122
2123 if (to_hash_later) {
2124 nents = sg_nents_for_len(areq->src, nbytes);
2125 if (nents < 0) {
2126 dev_err(ctx->dev, "Invalid number of src SG.\n");
2127 return nents;
2128 }
2129 sg_pcopy_to_buffer(areq->src, nents,
2130 req_ctx->buf[(req_ctx->buf_idx + 1) & 1],
2131 to_hash_later,
2132 nbytes - to_hash_later);
2133 }
2134 req_ctx->to_hash_later = to_hash_later;
2135
2136
2137 edesc = ahash_edesc_alloc(areq, nbytes_to_hash);
2138 if (IS_ERR(edesc))
2139 return PTR_ERR(edesc);
2140
2141 edesc->desc.hdr = ctx->desc_hdr_template;
2142
2143
2144 if (req_ctx->last)
2145 edesc->desc.hdr |= DESC_HDR_MODE0_MDEU_PAD;
2146 else
2147 edesc->desc.hdr |= DESC_HDR_MODE0_MDEU_CONT;
2148
2149
2150 if (req_ctx->first && !req_ctx->swinit)
2151 edesc->desc.hdr |= DESC_HDR_MODE0_MDEU_INIT;
2152
2153
2154
2155
2156 if (ctx->keylen && (req_ctx->first || req_ctx->last))
2157 edesc->desc.hdr |= DESC_HDR_MODE0_MDEU_HMAC;
2158
2159 return common_nonsnoop_hash(edesc, areq, nbytes_to_hash, offset,
2160 ahash_done);
2161}
2162
2163static int ahash_update(struct ahash_request *areq)
2164{
2165 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
2166
2167 req_ctx->last = 0;
2168
2169 return ahash_process_req(areq, areq->nbytes);
2170}
2171
2172static int ahash_final(struct ahash_request *areq)
2173{
2174 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
2175
2176 req_ctx->last = 1;
2177
2178 return ahash_process_req(areq, 0);
2179}
2180
2181static int ahash_finup(struct ahash_request *areq)
2182{
2183 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
2184
2185 req_ctx->last = 1;
2186
2187 return ahash_process_req(areq, areq->nbytes);
2188}
2189
2190static int ahash_digest(struct ahash_request *areq)
2191{
2192 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
2193 struct crypto_ahash *ahash = crypto_ahash_reqtfm(areq);
2194
2195 ahash->init(areq);
2196 req_ctx->last = 1;
2197
2198 return ahash_process_req(areq, areq->nbytes);
2199}
2200
2201static int ahash_export(struct ahash_request *areq, void *out)
2202{
2203 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
2204 struct talitos_export_state *export = out;
2205 struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2206 struct talitos_ctx *ctx = crypto_ahash_ctx(tfm);
2207 struct device *dev = ctx->dev;
2208 dma_addr_t dma;
2209
2210 dma = dma_map_single(dev, req_ctx->hw_context, req_ctx->hw_context_size,
2211 DMA_FROM_DEVICE);
2212 dma_unmap_single(dev, dma, req_ctx->hw_context_size, DMA_FROM_DEVICE);
2213
2214 memcpy(export->hw_context, req_ctx->hw_context,
2215 req_ctx->hw_context_size);
2216 memcpy(export->buf, req_ctx->buf[req_ctx->buf_idx], req_ctx->nbuf);
2217 export->swinit = req_ctx->swinit;
2218 export->first = req_ctx->first;
2219 export->last = req_ctx->last;
2220 export->to_hash_later = req_ctx->to_hash_later;
2221 export->nbuf = req_ctx->nbuf;
2222
2223 return 0;
2224}
2225
2226static int ahash_import(struct ahash_request *areq, const void *in)
2227{
2228 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
2229 struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2230 struct talitos_ctx *ctx = crypto_ahash_ctx(tfm);
2231 struct device *dev = ctx->dev;
2232 const struct talitos_export_state *export = in;
2233 unsigned int size;
2234 dma_addr_t dma;
2235
2236 memset(req_ctx, 0, sizeof(*req_ctx));
2237 size = (crypto_ahash_digestsize(tfm) <= SHA256_DIGEST_SIZE)
2238 ? TALITOS_MDEU_CONTEXT_SIZE_MD5_SHA1_SHA256
2239 : TALITOS_MDEU_CONTEXT_SIZE_SHA384_SHA512;
2240 req_ctx->hw_context_size = size;
2241 memcpy(req_ctx->hw_context, export->hw_context, size);
2242 memcpy(req_ctx->buf[0], export->buf, export->nbuf);
2243 req_ctx->swinit = export->swinit;
2244 req_ctx->first = export->first;
2245 req_ctx->last = export->last;
2246 req_ctx->to_hash_later = export->to_hash_later;
2247 req_ctx->nbuf = export->nbuf;
2248
2249 dma = dma_map_single(dev, req_ctx->hw_context, req_ctx->hw_context_size,
2250 DMA_TO_DEVICE);
2251 dma_unmap_single(dev, dma, req_ctx->hw_context_size, DMA_TO_DEVICE);
2252
2253 return 0;
2254}
2255
2256static int keyhash(struct crypto_ahash *tfm, const u8 *key, unsigned int keylen,
2257 u8 *hash)
2258{
2259 struct talitos_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
2260
2261 struct scatterlist sg[1];
2262 struct ahash_request *req;
2263 struct crypto_wait wait;
2264 int ret;
2265
2266 crypto_init_wait(&wait);
2267
2268 req = ahash_request_alloc(tfm, GFP_KERNEL);
2269 if (!req)
2270 return -ENOMEM;
2271
2272
2273 ctx->keylen = 0;
2274 ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
2275 crypto_req_done, &wait);
2276
2277 sg_init_one(&sg[0], key, keylen);
2278
2279 ahash_request_set_crypt(req, sg, hash, keylen);
2280 ret = crypto_wait_req(crypto_ahash_digest(req), &wait);
2281
2282 ahash_request_free(req);
2283
2284 return ret;
2285}
2286
2287static int ahash_setkey(struct crypto_ahash *tfm, const u8 *key,
2288 unsigned int keylen)
2289{
2290 struct talitos_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
2291 struct device *dev = ctx->dev;
2292 unsigned int blocksize =
2293 crypto_tfm_alg_blocksize(crypto_ahash_tfm(tfm));
2294 unsigned int digestsize = crypto_ahash_digestsize(tfm);
2295 unsigned int keysize = keylen;
2296 u8 hash[SHA512_DIGEST_SIZE];
2297 int ret;
2298
2299 if (keylen <= blocksize)
2300 memcpy(ctx->key, key, keysize);
2301 else {
2302
2303 ret = keyhash(tfm, key, keylen, hash);
2304
2305 if (ret) {
2306 crypto_ahash_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
2307 return -EINVAL;
2308 }
2309
2310 keysize = digestsize;
2311 memcpy(ctx->key, hash, digestsize);
2312 }
2313
2314 if (ctx->keylen)
2315 dma_unmap_single(dev, ctx->dma_key, ctx->keylen, DMA_TO_DEVICE);
2316
2317 ctx->keylen = keysize;
2318 ctx->dma_key = dma_map_single(dev, ctx->key, keysize, DMA_TO_DEVICE);
2319
2320 return 0;
2321}
2322
2323
2324struct talitos_alg_template {
2325 u32 type;
2326 u32 priority;
2327 union {
2328 struct crypto_alg crypto;
2329 struct ahash_alg hash;
2330 struct aead_alg aead;
2331 } alg;
2332 __be32 desc_hdr_template;
2333};
2334
2335static struct talitos_alg_template driver_algs[] = {
2336
2337 { .type = CRYPTO_ALG_TYPE_AEAD,
2338 .alg.aead = {
2339 .base = {
2340 .cra_name = "authenc(hmac(sha1),cbc(aes))",
2341 .cra_driver_name = "authenc-hmac-sha1-"
2342 "cbc-aes-talitos",
2343 .cra_blocksize = AES_BLOCK_SIZE,
2344 .cra_flags = CRYPTO_ALG_ASYNC,
2345 },
2346 .ivsize = AES_BLOCK_SIZE,
2347 .maxauthsize = SHA1_DIGEST_SIZE,
2348 },
2349 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2350 DESC_HDR_SEL0_AESU |
2351 DESC_HDR_MODE0_AESU_CBC |
2352 DESC_HDR_SEL1_MDEUA |
2353 DESC_HDR_MODE1_MDEU_INIT |
2354 DESC_HDR_MODE1_MDEU_PAD |
2355 DESC_HDR_MODE1_MDEU_SHA1_HMAC,
2356 },
2357 { .type = CRYPTO_ALG_TYPE_AEAD,
2358 .priority = TALITOS_CRA_PRIORITY_AEAD_HSNA,
2359 .alg.aead = {
2360 .base = {
2361 .cra_name = "authenc(hmac(sha1),cbc(aes))",
2362 .cra_driver_name = "authenc-hmac-sha1-"
2363 "cbc-aes-talitos",
2364 .cra_blocksize = AES_BLOCK_SIZE,
2365 .cra_flags = CRYPTO_ALG_ASYNC,
2366 },
2367 .ivsize = AES_BLOCK_SIZE,
2368 .maxauthsize = SHA1_DIGEST_SIZE,
2369 },
2370 .desc_hdr_template = DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU |
2371 DESC_HDR_SEL0_AESU |
2372 DESC_HDR_MODE0_AESU_CBC |
2373 DESC_HDR_SEL1_MDEUA |
2374 DESC_HDR_MODE1_MDEU_INIT |
2375 DESC_HDR_MODE1_MDEU_PAD |
2376 DESC_HDR_MODE1_MDEU_SHA1_HMAC,
2377 },
2378 { .type = CRYPTO_ALG_TYPE_AEAD,
2379 .alg.aead = {
2380 .base = {
2381 .cra_name = "authenc(hmac(sha1),"
2382 "cbc(des3_ede))",
2383 .cra_driver_name = "authenc-hmac-sha1-"
2384 "cbc-3des-talitos",
2385 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2386 .cra_flags = CRYPTO_ALG_ASYNC,
2387 },
2388 .ivsize = DES3_EDE_BLOCK_SIZE,
2389 .maxauthsize = SHA1_DIGEST_SIZE,
2390 .setkey = aead_des3_setkey,
2391 },
2392 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2393 DESC_HDR_SEL0_DEU |
2394 DESC_HDR_MODE0_DEU_CBC |
2395 DESC_HDR_MODE0_DEU_3DES |
2396 DESC_HDR_SEL1_MDEUA |
2397 DESC_HDR_MODE1_MDEU_INIT |
2398 DESC_HDR_MODE1_MDEU_PAD |
2399 DESC_HDR_MODE1_MDEU_SHA1_HMAC,
2400 },
2401 { .type = CRYPTO_ALG_TYPE_AEAD,
2402 .priority = TALITOS_CRA_PRIORITY_AEAD_HSNA,
2403 .alg.aead = {
2404 .base = {
2405 .cra_name = "authenc(hmac(sha1),"
2406 "cbc(des3_ede))",
2407 .cra_driver_name = "authenc-hmac-sha1-"
2408 "cbc-3des-talitos",
2409 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2410 .cra_flags = CRYPTO_ALG_ASYNC,
2411 },
2412 .ivsize = DES3_EDE_BLOCK_SIZE,
2413 .maxauthsize = SHA1_DIGEST_SIZE,
2414 .setkey = aead_des3_setkey,
2415 },
2416 .desc_hdr_template = DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU |
2417 DESC_HDR_SEL0_DEU |
2418 DESC_HDR_MODE0_DEU_CBC |
2419 DESC_HDR_MODE0_DEU_3DES |
2420 DESC_HDR_SEL1_MDEUA |
2421 DESC_HDR_MODE1_MDEU_INIT |
2422 DESC_HDR_MODE1_MDEU_PAD |
2423 DESC_HDR_MODE1_MDEU_SHA1_HMAC,
2424 },
2425 { .type = CRYPTO_ALG_TYPE_AEAD,
2426 .alg.aead = {
2427 .base = {
2428 .cra_name = "authenc(hmac(sha224),cbc(aes))",
2429 .cra_driver_name = "authenc-hmac-sha224-"
2430 "cbc-aes-talitos",
2431 .cra_blocksize = AES_BLOCK_SIZE,
2432 .cra_flags = CRYPTO_ALG_ASYNC,
2433 },
2434 .ivsize = AES_BLOCK_SIZE,
2435 .maxauthsize = SHA224_DIGEST_SIZE,
2436 },
2437 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2438 DESC_HDR_SEL0_AESU |
2439 DESC_HDR_MODE0_AESU_CBC |
2440 DESC_HDR_SEL1_MDEUA |
2441 DESC_HDR_MODE1_MDEU_INIT |
2442 DESC_HDR_MODE1_MDEU_PAD |
2443 DESC_HDR_MODE1_MDEU_SHA224_HMAC,
2444 },
2445 { .type = CRYPTO_ALG_TYPE_AEAD,
2446 .priority = TALITOS_CRA_PRIORITY_AEAD_HSNA,
2447 .alg.aead = {
2448 .base = {
2449 .cra_name = "authenc(hmac(sha224),cbc(aes))",
2450 .cra_driver_name = "authenc-hmac-sha224-"
2451 "cbc-aes-talitos",
2452 .cra_blocksize = AES_BLOCK_SIZE,
2453 .cra_flags = CRYPTO_ALG_ASYNC,
2454 },
2455 .ivsize = AES_BLOCK_SIZE,
2456 .maxauthsize = SHA224_DIGEST_SIZE,
2457 },
2458 .desc_hdr_template = DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU |
2459 DESC_HDR_SEL0_AESU |
2460 DESC_HDR_MODE0_AESU_CBC |
2461 DESC_HDR_SEL1_MDEUA |
2462 DESC_HDR_MODE1_MDEU_INIT |
2463 DESC_HDR_MODE1_MDEU_PAD |
2464 DESC_HDR_MODE1_MDEU_SHA224_HMAC,
2465 },
2466 { .type = CRYPTO_ALG_TYPE_AEAD,
2467 .alg.aead = {
2468 .base = {
2469 .cra_name = "authenc(hmac(sha224),"
2470 "cbc(des3_ede))",
2471 .cra_driver_name = "authenc-hmac-sha224-"
2472 "cbc-3des-talitos",
2473 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2474 .cra_flags = CRYPTO_ALG_ASYNC,
2475 },
2476 .ivsize = DES3_EDE_BLOCK_SIZE,
2477 .maxauthsize = SHA224_DIGEST_SIZE,
2478 .setkey = aead_des3_setkey,
2479 },
2480 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2481 DESC_HDR_SEL0_DEU |
2482 DESC_HDR_MODE0_DEU_CBC |
2483 DESC_HDR_MODE0_DEU_3DES |
2484 DESC_HDR_SEL1_MDEUA |
2485 DESC_HDR_MODE1_MDEU_INIT |
2486 DESC_HDR_MODE1_MDEU_PAD |
2487 DESC_HDR_MODE1_MDEU_SHA224_HMAC,
2488 },
2489 { .type = CRYPTO_ALG_TYPE_AEAD,
2490 .priority = TALITOS_CRA_PRIORITY_AEAD_HSNA,
2491 .alg.aead = {
2492 .base = {
2493 .cra_name = "authenc(hmac(sha224),"
2494 "cbc(des3_ede))",
2495 .cra_driver_name = "authenc-hmac-sha224-"
2496 "cbc-3des-talitos",
2497 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2498 .cra_flags = CRYPTO_ALG_ASYNC,
2499 },
2500 .ivsize = DES3_EDE_BLOCK_SIZE,
2501 .maxauthsize = SHA224_DIGEST_SIZE,
2502 .setkey = aead_des3_setkey,
2503 },
2504 .desc_hdr_template = DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU |
2505 DESC_HDR_SEL0_DEU |
2506 DESC_HDR_MODE0_DEU_CBC |
2507 DESC_HDR_MODE0_DEU_3DES |
2508 DESC_HDR_SEL1_MDEUA |
2509 DESC_HDR_MODE1_MDEU_INIT |
2510 DESC_HDR_MODE1_MDEU_PAD |
2511 DESC_HDR_MODE1_MDEU_SHA224_HMAC,
2512 },
2513 { .type = CRYPTO_ALG_TYPE_AEAD,
2514 .alg.aead = {
2515 .base = {
2516 .cra_name = "authenc(hmac(sha256),cbc(aes))",
2517 .cra_driver_name = "authenc-hmac-sha256-"
2518 "cbc-aes-talitos",
2519 .cra_blocksize = AES_BLOCK_SIZE,
2520 .cra_flags = CRYPTO_ALG_ASYNC,
2521 },
2522 .ivsize = AES_BLOCK_SIZE,
2523 .maxauthsize = SHA256_DIGEST_SIZE,
2524 },
2525 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2526 DESC_HDR_SEL0_AESU |
2527 DESC_HDR_MODE0_AESU_CBC |
2528 DESC_HDR_SEL1_MDEUA |
2529 DESC_HDR_MODE1_MDEU_INIT |
2530 DESC_HDR_MODE1_MDEU_PAD |
2531 DESC_HDR_MODE1_MDEU_SHA256_HMAC,
2532 },
2533 { .type = CRYPTO_ALG_TYPE_AEAD,
2534 .priority = TALITOS_CRA_PRIORITY_AEAD_HSNA,
2535 .alg.aead = {
2536 .base = {
2537 .cra_name = "authenc(hmac(sha256),cbc(aes))",
2538 .cra_driver_name = "authenc-hmac-sha256-"
2539 "cbc-aes-talitos",
2540 .cra_blocksize = AES_BLOCK_SIZE,
2541 .cra_flags = CRYPTO_ALG_ASYNC,
2542 },
2543 .ivsize = AES_BLOCK_SIZE,
2544 .maxauthsize = SHA256_DIGEST_SIZE,
2545 },
2546 .desc_hdr_template = DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU |
2547 DESC_HDR_SEL0_AESU |
2548 DESC_HDR_MODE0_AESU_CBC |
2549 DESC_HDR_SEL1_MDEUA |
2550 DESC_HDR_MODE1_MDEU_INIT |
2551 DESC_HDR_MODE1_MDEU_PAD |
2552 DESC_HDR_MODE1_MDEU_SHA256_HMAC,
2553 },
2554 { .type = CRYPTO_ALG_TYPE_AEAD,
2555 .alg.aead = {
2556 .base = {
2557 .cra_name = "authenc(hmac(sha256),"
2558 "cbc(des3_ede))",
2559 .cra_driver_name = "authenc-hmac-sha256-"
2560 "cbc-3des-talitos",
2561 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2562 .cra_flags = CRYPTO_ALG_ASYNC,
2563 },
2564 .ivsize = DES3_EDE_BLOCK_SIZE,
2565 .maxauthsize = SHA256_DIGEST_SIZE,
2566 .setkey = aead_des3_setkey,
2567 },
2568 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2569 DESC_HDR_SEL0_DEU |
2570 DESC_HDR_MODE0_DEU_CBC |
2571 DESC_HDR_MODE0_DEU_3DES |
2572 DESC_HDR_SEL1_MDEUA |
2573 DESC_HDR_MODE1_MDEU_INIT |
2574 DESC_HDR_MODE1_MDEU_PAD |
2575 DESC_HDR_MODE1_MDEU_SHA256_HMAC,
2576 },
2577 { .type = CRYPTO_ALG_TYPE_AEAD,
2578 .priority = TALITOS_CRA_PRIORITY_AEAD_HSNA,
2579 .alg.aead = {
2580 .base = {
2581 .cra_name = "authenc(hmac(sha256),"
2582 "cbc(des3_ede))",
2583 .cra_driver_name = "authenc-hmac-sha256-"
2584 "cbc-3des-talitos",
2585 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2586 .cra_flags = CRYPTO_ALG_ASYNC,
2587 },
2588 .ivsize = DES3_EDE_BLOCK_SIZE,
2589 .maxauthsize = SHA256_DIGEST_SIZE,
2590 .setkey = aead_des3_setkey,
2591 },
2592 .desc_hdr_template = DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU |
2593 DESC_HDR_SEL0_DEU |
2594 DESC_HDR_MODE0_DEU_CBC |
2595 DESC_HDR_MODE0_DEU_3DES |
2596 DESC_HDR_SEL1_MDEUA |
2597 DESC_HDR_MODE1_MDEU_INIT |
2598 DESC_HDR_MODE1_MDEU_PAD |
2599 DESC_HDR_MODE1_MDEU_SHA256_HMAC,
2600 },
2601 { .type = CRYPTO_ALG_TYPE_AEAD,
2602 .alg.aead = {
2603 .base = {
2604 .cra_name = "authenc(hmac(sha384),cbc(aes))",
2605 .cra_driver_name = "authenc-hmac-sha384-"
2606 "cbc-aes-talitos",
2607 .cra_blocksize = AES_BLOCK_SIZE,
2608 .cra_flags = CRYPTO_ALG_ASYNC,
2609 },
2610 .ivsize = AES_BLOCK_SIZE,
2611 .maxauthsize = SHA384_DIGEST_SIZE,
2612 },
2613 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2614 DESC_HDR_SEL0_AESU |
2615 DESC_HDR_MODE0_AESU_CBC |
2616 DESC_HDR_SEL1_MDEUB |
2617 DESC_HDR_MODE1_MDEU_INIT |
2618 DESC_HDR_MODE1_MDEU_PAD |
2619 DESC_HDR_MODE1_MDEUB_SHA384_HMAC,
2620 },
2621 { .type = CRYPTO_ALG_TYPE_AEAD,
2622 .alg.aead = {
2623 .base = {
2624 .cra_name = "authenc(hmac(sha384),"
2625 "cbc(des3_ede))",
2626 .cra_driver_name = "authenc-hmac-sha384-"
2627 "cbc-3des-talitos",
2628 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2629 .cra_flags = CRYPTO_ALG_ASYNC,
2630 },
2631 .ivsize = DES3_EDE_BLOCK_SIZE,
2632 .maxauthsize = SHA384_DIGEST_SIZE,
2633 .setkey = aead_des3_setkey,
2634 },
2635 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2636 DESC_HDR_SEL0_DEU |
2637 DESC_HDR_MODE0_DEU_CBC |
2638 DESC_HDR_MODE0_DEU_3DES |
2639 DESC_HDR_SEL1_MDEUB |
2640 DESC_HDR_MODE1_MDEU_INIT |
2641 DESC_HDR_MODE1_MDEU_PAD |
2642 DESC_HDR_MODE1_MDEUB_SHA384_HMAC,
2643 },
2644 { .type = CRYPTO_ALG_TYPE_AEAD,
2645 .alg.aead = {
2646 .base = {
2647 .cra_name = "authenc(hmac(sha512),cbc(aes))",
2648 .cra_driver_name = "authenc-hmac-sha512-"
2649 "cbc-aes-talitos",
2650 .cra_blocksize = AES_BLOCK_SIZE,
2651 .cra_flags = CRYPTO_ALG_ASYNC,
2652 },
2653 .ivsize = AES_BLOCK_SIZE,
2654 .maxauthsize = SHA512_DIGEST_SIZE,
2655 },
2656 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2657 DESC_HDR_SEL0_AESU |
2658 DESC_HDR_MODE0_AESU_CBC |
2659 DESC_HDR_SEL1_MDEUB |
2660 DESC_HDR_MODE1_MDEU_INIT |
2661 DESC_HDR_MODE1_MDEU_PAD |
2662 DESC_HDR_MODE1_MDEUB_SHA512_HMAC,
2663 },
2664 { .type = CRYPTO_ALG_TYPE_AEAD,
2665 .alg.aead = {
2666 .base = {
2667 .cra_name = "authenc(hmac(sha512),"
2668 "cbc(des3_ede))",
2669 .cra_driver_name = "authenc-hmac-sha512-"
2670 "cbc-3des-talitos",
2671 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2672 .cra_flags = CRYPTO_ALG_ASYNC,
2673 },
2674 .ivsize = DES3_EDE_BLOCK_SIZE,
2675 .maxauthsize = SHA512_DIGEST_SIZE,
2676 .setkey = aead_des3_setkey,
2677 },
2678 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2679 DESC_HDR_SEL0_DEU |
2680 DESC_HDR_MODE0_DEU_CBC |
2681 DESC_HDR_MODE0_DEU_3DES |
2682 DESC_HDR_SEL1_MDEUB |
2683 DESC_HDR_MODE1_MDEU_INIT |
2684 DESC_HDR_MODE1_MDEU_PAD |
2685 DESC_HDR_MODE1_MDEUB_SHA512_HMAC,
2686 },
2687 { .type = CRYPTO_ALG_TYPE_AEAD,
2688 .alg.aead = {
2689 .base = {
2690 .cra_name = "authenc(hmac(md5),cbc(aes))",
2691 .cra_driver_name = "authenc-hmac-md5-"
2692 "cbc-aes-talitos",
2693 .cra_blocksize = AES_BLOCK_SIZE,
2694 .cra_flags = CRYPTO_ALG_ASYNC,
2695 },
2696 .ivsize = AES_BLOCK_SIZE,
2697 .maxauthsize = MD5_DIGEST_SIZE,
2698 },
2699 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2700 DESC_HDR_SEL0_AESU |
2701 DESC_HDR_MODE0_AESU_CBC |
2702 DESC_HDR_SEL1_MDEUA |
2703 DESC_HDR_MODE1_MDEU_INIT |
2704 DESC_HDR_MODE1_MDEU_PAD |
2705 DESC_HDR_MODE1_MDEU_MD5_HMAC,
2706 },
2707 { .type = CRYPTO_ALG_TYPE_AEAD,
2708 .priority = TALITOS_CRA_PRIORITY_AEAD_HSNA,
2709 .alg.aead = {
2710 .base = {
2711 .cra_name = "authenc(hmac(md5),cbc(aes))",
2712 .cra_driver_name = "authenc-hmac-md5-"
2713 "cbc-aes-talitos",
2714 .cra_blocksize = AES_BLOCK_SIZE,
2715 .cra_flags = CRYPTO_ALG_ASYNC,
2716 },
2717 .ivsize = AES_BLOCK_SIZE,
2718 .maxauthsize = MD5_DIGEST_SIZE,
2719 },
2720 .desc_hdr_template = DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU |
2721 DESC_HDR_SEL0_AESU |
2722 DESC_HDR_MODE0_AESU_CBC |
2723 DESC_HDR_SEL1_MDEUA |
2724 DESC_HDR_MODE1_MDEU_INIT |
2725 DESC_HDR_MODE1_MDEU_PAD |
2726 DESC_HDR_MODE1_MDEU_MD5_HMAC,
2727 },
2728 { .type = CRYPTO_ALG_TYPE_AEAD,
2729 .alg.aead = {
2730 .base = {
2731 .cra_name = "authenc(hmac(md5),cbc(des3_ede))",
2732 .cra_driver_name = "authenc-hmac-md5-"
2733 "cbc-3des-talitos",
2734 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2735 .cra_flags = CRYPTO_ALG_ASYNC,
2736 },
2737 .ivsize = DES3_EDE_BLOCK_SIZE,
2738 .maxauthsize = MD5_DIGEST_SIZE,
2739 .setkey = aead_des3_setkey,
2740 },
2741 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2742 DESC_HDR_SEL0_DEU |
2743 DESC_HDR_MODE0_DEU_CBC |
2744 DESC_HDR_MODE0_DEU_3DES |
2745 DESC_HDR_SEL1_MDEUA |
2746 DESC_HDR_MODE1_MDEU_INIT |
2747 DESC_HDR_MODE1_MDEU_PAD |
2748 DESC_HDR_MODE1_MDEU_MD5_HMAC,
2749 },
2750 { .type = CRYPTO_ALG_TYPE_AEAD,
2751 .priority = TALITOS_CRA_PRIORITY_AEAD_HSNA,
2752 .alg.aead = {
2753 .base = {
2754 .cra_name = "authenc(hmac(md5),cbc(des3_ede))",
2755 .cra_driver_name = "authenc-hmac-md5-"
2756 "cbc-3des-talitos",
2757 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2758 .cra_flags = CRYPTO_ALG_ASYNC,
2759 },
2760 .ivsize = DES3_EDE_BLOCK_SIZE,
2761 .maxauthsize = MD5_DIGEST_SIZE,
2762 .setkey = aead_des3_setkey,
2763 },
2764 .desc_hdr_template = DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU |
2765 DESC_HDR_SEL0_DEU |
2766 DESC_HDR_MODE0_DEU_CBC |
2767 DESC_HDR_MODE0_DEU_3DES |
2768 DESC_HDR_SEL1_MDEUA |
2769 DESC_HDR_MODE1_MDEU_INIT |
2770 DESC_HDR_MODE1_MDEU_PAD |
2771 DESC_HDR_MODE1_MDEU_MD5_HMAC,
2772 },
2773
2774 { .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
2775 .alg.crypto = {
2776 .cra_name = "ecb(aes)",
2777 .cra_driver_name = "ecb-aes-talitos",
2778 .cra_blocksize = AES_BLOCK_SIZE,
2779 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
2780 CRYPTO_ALG_ASYNC,
2781 .cra_ablkcipher = {
2782 .min_keysize = AES_MIN_KEY_SIZE,
2783 .max_keysize = AES_MAX_KEY_SIZE,
2784 .ivsize = AES_BLOCK_SIZE,
2785 }
2786 },
2787 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2788 DESC_HDR_SEL0_AESU,
2789 },
2790 { .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
2791 .alg.crypto = {
2792 .cra_name = "cbc(aes)",
2793 .cra_driver_name = "cbc-aes-talitos",
2794 .cra_blocksize = AES_BLOCK_SIZE,
2795 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
2796 CRYPTO_ALG_ASYNC,
2797 .cra_ablkcipher = {
2798 .min_keysize = AES_MIN_KEY_SIZE,
2799 .max_keysize = AES_MAX_KEY_SIZE,
2800 .ivsize = AES_BLOCK_SIZE,
2801 }
2802 },
2803 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2804 DESC_HDR_SEL0_AESU |
2805 DESC_HDR_MODE0_AESU_CBC,
2806 },
2807 { .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
2808 .alg.crypto = {
2809 .cra_name = "ctr(aes)",
2810 .cra_driver_name = "ctr-aes-talitos",
2811 .cra_blocksize = AES_BLOCK_SIZE,
2812 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
2813 CRYPTO_ALG_ASYNC,
2814 .cra_ablkcipher = {
2815 .min_keysize = AES_MIN_KEY_SIZE,
2816 .max_keysize = AES_MAX_KEY_SIZE,
2817 .ivsize = AES_BLOCK_SIZE,
2818 }
2819 },
2820 .desc_hdr_template = DESC_HDR_TYPE_AESU_CTR_NONSNOOP |
2821 DESC_HDR_SEL0_AESU |
2822 DESC_HDR_MODE0_AESU_CTR,
2823 },
2824 { .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
2825 .alg.crypto = {
2826 .cra_name = "ecb(des)",
2827 .cra_driver_name = "ecb-des-talitos",
2828 .cra_blocksize = DES_BLOCK_SIZE,
2829 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
2830 CRYPTO_ALG_ASYNC,
2831 .cra_ablkcipher = {
2832 .min_keysize = DES_KEY_SIZE,
2833 .max_keysize = DES_KEY_SIZE,
2834 .ivsize = DES_BLOCK_SIZE,
2835 .setkey = ablkcipher_des_setkey,
2836 }
2837 },
2838 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2839 DESC_HDR_SEL0_DEU,
2840 },
2841 { .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
2842 .alg.crypto = {
2843 .cra_name = "cbc(des)",
2844 .cra_driver_name = "cbc-des-talitos",
2845 .cra_blocksize = DES_BLOCK_SIZE,
2846 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
2847 CRYPTO_ALG_ASYNC,
2848 .cra_ablkcipher = {
2849 .min_keysize = DES_KEY_SIZE,
2850 .max_keysize = DES_KEY_SIZE,
2851 .ivsize = DES_BLOCK_SIZE,
2852 .setkey = ablkcipher_des_setkey,
2853 }
2854 },
2855 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2856 DESC_HDR_SEL0_DEU |
2857 DESC_HDR_MODE0_DEU_CBC,
2858 },
2859 { .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
2860 .alg.crypto = {
2861 .cra_name = "ecb(des3_ede)",
2862 .cra_driver_name = "ecb-3des-talitos",
2863 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2864 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
2865 CRYPTO_ALG_ASYNC,
2866 .cra_ablkcipher = {
2867 .min_keysize = DES3_EDE_KEY_SIZE,
2868 .max_keysize = DES3_EDE_KEY_SIZE,
2869 .ivsize = DES3_EDE_BLOCK_SIZE,
2870 .setkey = ablkcipher_des3_setkey,
2871 }
2872 },
2873 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2874 DESC_HDR_SEL0_DEU |
2875 DESC_HDR_MODE0_DEU_3DES,
2876 },
2877 { .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
2878 .alg.crypto = {
2879 .cra_name = "cbc(des3_ede)",
2880 .cra_driver_name = "cbc-3des-talitos",
2881 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2882 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
2883 CRYPTO_ALG_ASYNC,
2884 .cra_ablkcipher = {
2885 .min_keysize = DES3_EDE_KEY_SIZE,
2886 .max_keysize = DES3_EDE_KEY_SIZE,
2887 .ivsize = DES3_EDE_BLOCK_SIZE,
2888 .setkey = ablkcipher_des3_setkey,
2889 }
2890 },
2891 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2892 DESC_HDR_SEL0_DEU |
2893 DESC_HDR_MODE0_DEU_CBC |
2894 DESC_HDR_MODE0_DEU_3DES,
2895 },
2896
2897 { .type = CRYPTO_ALG_TYPE_AHASH,
2898 .alg.hash = {
2899 .halg.digestsize = MD5_DIGEST_SIZE,
2900 .halg.statesize = sizeof(struct talitos_export_state),
2901 .halg.base = {
2902 .cra_name = "md5",
2903 .cra_driver_name = "md5-talitos",
2904 .cra_blocksize = MD5_HMAC_BLOCK_SIZE,
2905 .cra_flags = CRYPTO_ALG_TYPE_AHASH |
2906 CRYPTO_ALG_ASYNC,
2907 }
2908 },
2909 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2910 DESC_HDR_SEL0_MDEUA |
2911 DESC_HDR_MODE0_MDEU_MD5,
2912 },
2913 { .type = CRYPTO_ALG_TYPE_AHASH,
2914 .alg.hash = {
2915 .halg.digestsize = SHA1_DIGEST_SIZE,
2916 .halg.statesize = sizeof(struct talitos_export_state),
2917 .halg.base = {
2918 .cra_name = "sha1",
2919 .cra_driver_name = "sha1-talitos",
2920 .cra_blocksize = SHA1_BLOCK_SIZE,
2921 .cra_flags = CRYPTO_ALG_TYPE_AHASH |
2922 CRYPTO_ALG_ASYNC,
2923 }
2924 },
2925 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2926 DESC_HDR_SEL0_MDEUA |
2927 DESC_HDR_MODE0_MDEU_SHA1,
2928 },
2929 { .type = CRYPTO_ALG_TYPE_AHASH,
2930 .alg.hash = {
2931 .halg.digestsize = SHA224_DIGEST_SIZE,
2932 .halg.statesize = sizeof(struct talitos_export_state),
2933 .halg.base = {
2934 .cra_name = "sha224",
2935 .cra_driver_name = "sha224-talitos",
2936 .cra_blocksize = SHA224_BLOCK_SIZE,
2937 .cra_flags = CRYPTO_ALG_TYPE_AHASH |
2938 CRYPTO_ALG_ASYNC,
2939 }
2940 },
2941 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2942 DESC_HDR_SEL0_MDEUA |
2943 DESC_HDR_MODE0_MDEU_SHA224,
2944 },
2945 { .type = CRYPTO_ALG_TYPE_AHASH,
2946 .alg.hash = {
2947 .halg.digestsize = SHA256_DIGEST_SIZE,
2948 .halg.statesize = sizeof(struct talitos_export_state),
2949 .halg.base = {
2950 .cra_name = "sha256",
2951 .cra_driver_name = "sha256-talitos",
2952 .cra_blocksize = SHA256_BLOCK_SIZE,
2953 .cra_flags = CRYPTO_ALG_TYPE_AHASH |
2954 CRYPTO_ALG_ASYNC,
2955 }
2956 },
2957 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2958 DESC_HDR_SEL0_MDEUA |
2959 DESC_HDR_MODE0_MDEU_SHA256,
2960 },
2961 { .type = CRYPTO_ALG_TYPE_AHASH,
2962 .alg.hash = {
2963 .halg.digestsize = SHA384_DIGEST_SIZE,
2964 .halg.statesize = sizeof(struct talitos_export_state),
2965 .halg.base = {
2966 .cra_name = "sha384",
2967 .cra_driver_name = "sha384-talitos",
2968 .cra_blocksize = SHA384_BLOCK_SIZE,
2969 .cra_flags = CRYPTO_ALG_TYPE_AHASH |
2970 CRYPTO_ALG_ASYNC,
2971 }
2972 },
2973 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2974 DESC_HDR_SEL0_MDEUB |
2975 DESC_HDR_MODE0_MDEUB_SHA384,
2976 },
2977 { .type = CRYPTO_ALG_TYPE_AHASH,
2978 .alg.hash = {
2979 .halg.digestsize = SHA512_DIGEST_SIZE,
2980 .halg.statesize = sizeof(struct talitos_export_state),
2981 .halg.base = {
2982 .cra_name = "sha512",
2983 .cra_driver_name = "sha512-talitos",
2984 .cra_blocksize = SHA512_BLOCK_SIZE,
2985 .cra_flags = CRYPTO_ALG_TYPE_AHASH |
2986 CRYPTO_ALG_ASYNC,
2987 }
2988 },
2989 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2990 DESC_HDR_SEL0_MDEUB |
2991 DESC_HDR_MODE0_MDEUB_SHA512,
2992 },
2993 { .type = CRYPTO_ALG_TYPE_AHASH,
2994 .alg.hash = {
2995 .halg.digestsize = MD5_DIGEST_SIZE,
2996 .halg.statesize = sizeof(struct talitos_export_state),
2997 .halg.base = {
2998 .cra_name = "hmac(md5)",
2999 .cra_driver_name = "hmac-md5-talitos",
3000 .cra_blocksize = MD5_HMAC_BLOCK_SIZE,
3001 .cra_flags = CRYPTO_ALG_TYPE_AHASH |
3002 CRYPTO_ALG_ASYNC,
3003 }
3004 },
3005 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
3006 DESC_HDR_SEL0_MDEUA |
3007 DESC_HDR_MODE0_MDEU_MD5,
3008 },
3009 { .type = CRYPTO_ALG_TYPE_AHASH,
3010 .alg.hash = {
3011 .halg.digestsize = SHA1_DIGEST_SIZE,
3012 .halg.statesize = sizeof(struct talitos_export_state),
3013 .halg.base = {
3014 .cra_name = "hmac(sha1)",
3015 .cra_driver_name = "hmac-sha1-talitos",
3016 .cra_blocksize = SHA1_BLOCK_SIZE,
3017 .cra_flags = CRYPTO_ALG_TYPE_AHASH |
3018 CRYPTO_ALG_ASYNC,
3019 }
3020 },
3021 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
3022 DESC_HDR_SEL0_MDEUA |
3023 DESC_HDR_MODE0_MDEU_SHA1,
3024 },
3025 { .type = CRYPTO_ALG_TYPE_AHASH,
3026 .alg.hash = {
3027 .halg.digestsize = SHA224_DIGEST_SIZE,
3028 .halg.statesize = sizeof(struct talitos_export_state),
3029 .halg.base = {
3030 .cra_name = "hmac(sha224)",
3031 .cra_driver_name = "hmac-sha224-talitos",
3032 .cra_blocksize = SHA224_BLOCK_SIZE,
3033 .cra_flags = CRYPTO_ALG_TYPE_AHASH |
3034 CRYPTO_ALG_ASYNC,
3035 }
3036 },
3037 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
3038 DESC_HDR_SEL0_MDEUA |
3039 DESC_HDR_MODE0_MDEU_SHA224,
3040 },
3041 { .type = CRYPTO_ALG_TYPE_AHASH,
3042 .alg.hash = {
3043 .halg.digestsize = SHA256_DIGEST_SIZE,
3044 .halg.statesize = sizeof(struct talitos_export_state),
3045 .halg.base = {
3046 .cra_name = "hmac(sha256)",
3047 .cra_driver_name = "hmac-sha256-talitos",
3048 .cra_blocksize = SHA256_BLOCK_SIZE,
3049 .cra_flags = CRYPTO_ALG_TYPE_AHASH |
3050 CRYPTO_ALG_ASYNC,
3051 }
3052 },
3053 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
3054 DESC_HDR_SEL0_MDEUA |
3055 DESC_HDR_MODE0_MDEU_SHA256,
3056 },
3057 { .type = CRYPTO_ALG_TYPE_AHASH,
3058 .alg.hash = {
3059 .halg.digestsize = SHA384_DIGEST_SIZE,
3060 .halg.statesize = sizeof(struct talitos_export_state),
3061 .halg.base = {
3062 .cra_name = "hmac(sha384)",
3063 .cra_driver_name = "hmac-sha384-talitos",
3064 .cra_blocksize = SHA384_BLOCK_SIZE,
3065 .cra_flags = CRYPTO_ALG_TYPE_AHASH |
3066 CRYPTO_ALG_ASYNC,
3067 }
3068 },
3069 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
3070 DESC_HDR_SEL0_MDEUB |
3071 DESC_HDR_MODE0_MDEUB_SHA384,
3072 },
3073 { .type = CRYPTO_ALG_TYPE_AHASH,
3074 .alg.hash = {
3075 .halg.digestsize = SHA512_DIGEST_SIZE,
3076 .halg.statesize = sizeof(struct talitos_export_state),
3077 .halg.base = {
3078 .cra_name = "hmac(sha512)",
3079 .cra_driver_name = "hmac-sha512-talitos",
3080 .cra_blocksize = SHA512_BLOCK_SIZE,
3081 .cra_flags = CRYPTO_ALG_TYPE_AHASH |
3082 CRYPTO_ALG_ASYNC,
3083 }
3084 },
3085 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
3086 DESC_HDR_SEL0_MDEUB |
3087 DESC_HDR_MODE0_MDEUB_SHA512,
3088 }
3089};
3090
3091struct talitos_crypto_alg {
3092 struct list_head entry;
3093 struct device *dev;
3094 struct talitos_alg_template algt;
3095};
3096
3097static int talitos_init_common(struct talitos_ctx *ctx,
3098 struct talitos_crypto_alg *talitos_alg)
3099{
3100 struct talitos_private *priv;
3101
3102
3103 ctx->dev = talitos_alg->dev;
3104
3105
3106 priv = dev_get_drvdata(ctx->dev);
3107 ctx->ch = atomic_inc_return(&priv->last_chan) &
3108 (priv->num_channels - 1);
3109
3110
3111 ctx->desc_hdr_template = talitos_alg->algt.desc_hdr_template;
3112
3113
3114 ctx->desc_hdr_template |= DESC_HDR_DONE_NOTIFY;
3115
3116 return 0;
3117}
3118
3119static int talitos_cra_init(struct crypto_tfm *tfm)
3120{
3121 struct crypto_alg *alg = tfm->__crt_alg;
3122 struct talitos_crypto_alg *talitos_alg;
3123 struct talitos_ctx *ctx = crypto_tfm_ctx(tfm);
3124
3125 if ((alg->cra_flags & CRYPTO_ALG_TYPE_MASK) == CRYPTO_ALG_TYPE_AHASH)
3126 talitos_alg = container_of(__crypto_ahash_alg(alg),
3127 struct talitos_crypto_alg,
3128 algt.alg.hash);
3129 else
3130 talitos_alg = container_of(alg, struct talitos_crypto_alg,
3131 algt.alg.crypto);
3132
3133 return talitos_init_common(ctx, talitos_alg);
3134}
3135
3136static int talitos_cra_init_aead(struct crypto_aead *tfm)
3137{
3138 struct aead_alg *alg = crypto_aead_alg(tfm);
3139 struct talitos_crypto_alg *talitos_alg;
3140 struct talitos_ctx *ctx = crypto_aead_ctx(tfm);
3141
3142 talitos_alg = container_of(alg, struct talitos_crypto_alg,
3143 algt.alg.aead);
3144
3145 return talitos_init_common(ctx, talitos_alg);
3146}
3147
3148static int talitos_cra_init_ahash(struct crypto_tfm *tfm)
3149{
3150 struct talitos_ctx *ctx = crypto_tfm_ctx(tfm);
3151
3152 talitos_cra_init(tfm);
3153
3154 ctx->keylen = 0;
3155 crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm),
3156 sizeof(struct talitos_ahash_req_ctx));
3157
3158 return 0;
3159}
3160
3161static void talitos_cra_exit(struct crypto_tfm *tfm)
3162{
3163 struct talitos_ctx *ctx = crypto_tfm_ctx(tfm);
3164 struct device *dev = ctx->dev;
3165
3166 if (ctx->keylen)
3167 dma_unmap_single(dev, ctx->dma_key, ctx->keylen, DMA_TO_DEVICE);
3168}
3169
3170
3171
3172
3173
3174
3175static int hw_supports(struct device *dev, __be32 desc_hdr_template)
3176{
3177 struct talitos_private *priv = dev_get_drvdata(dev);
3178 int ret;
3179
3180 ret = (1 << DESC_TYPE(desc_hdr_template) & priv->desc_types) &&
3181 (1 << PRIMARY_EU(desc_hdr_template) & priv->exec_units);
3182
3183 if (SECONDARY_EU(desc_hdr_template))
3184 ret = ret && (1 << SECONDARY_EU(desc_hdr_template)
3185 & priv->exec_units);
3186
3187 return ret;
3188}
3189
3190static int talitos_remove(struct platform_device *ofdev)
3191{
3192 struct device *dev = &ofdev->dev;
3193 struct talitos_private *priv = dev_get_drvdata(dev);
3194 struct talitos_crypto_alg *t_alg, *n;
3195 int i;
3196
3197 list_for_each_entry_safe(t_alg, n, &priv->alg_list, entry) {
3198 switch (t_alg->algt.type) {
3199 case CRYPTO_ALG_TYPE_ABLKCIPHER:
3200 break;
3201 case CRYPTO_ALG_TYPE_AEAD:
3202 crypto_unregister_aead(&t_alg->algt.alg.aead);
3203 case CRYPTO_ALG_TYPE_AHASH:
3204 crypto_unregister_ahash(&t_alg->algt.alg.hash);
3205 break;
3206 }
3207 list_del(&t_alg->entry);
3208 }
3209
3210 if (hw_supports(dev, DESC_HDR_SEL0_RNG))
3211 talitos_unregister_rng(dev);
3212
3213 for (i = 0; i < 2; i++)
3214 if (priv->irq[i]) {
3215 free_irq(priv->irq[i], dev);
3216 irq_dispose_mapping(priv->irq[i]);
3217 }
3218
3219 tasklet_kill(&priv->done_task[0]);
3220 if (priv->irq[1])
3221 tasklet_kill(&priv->done_task[1]);
3222
3223 return 0;
3224}
3225
3226static struct talitos_crypto_alg *talitos_alg_alloc(struct device *dev,
3227 struct talitos_alg_template
3228 *template)
3229{
3230 struct talitos_private *priv = dev_get_drvdata(dev);
3231 struct talitos_crypto_alg *t_alg;
3232 struct crypto_alg *alg;
3233
3234 t_alg = devm_kzalloc(dev, sizeof(struct talitos_crypto_alg),
3235 GFP_KERNEL);
3236 if (!t_alg)
3237 return ERR_PTR(-ENOMEM);
3238
3239 t_alg->algt = *template;
3240
3241 switch (t_alg->algt.type) {
3242 case CRYPTO_ALG_TYPE_ABLKCIPHER:
3243 alg = &t_alg->algt.alg.crypto;
3244 alg->cra_init = talitos_cra_init;
3245 alg->cra_exit = talitos_cra_exit;
3246 alg->cra_type = &crypto_ablkcipher_type;
3247 alg->cra_ablkcipher.setkey = alg->cra_ablkcipher.setkey ?:
3248 ablkcipher_setkey;
3249 alg->cra_ablkcipher.encrypt = ablkcipher_encrypt;
3250 alg->cra_ablkcipher.decrypt = ablkcipher_decrypt;
3251 alg->cra_ablkcipher.geniv = "eseqiv";
3252 break;
3253 case CRYPTO_ALG_TYPE_AEAD:
3254 alg = &t_alg->algt.alg.aead.base;
3255 alg->cra_exit = talitos_cra_exit;
3256 t_alg->algt.alg.aead.init = talitos_cra_init_aead;
3257 t_alg->algt.alg.aead.setkey = t_alg->algt.alg.aead.setkey ?:
3258 aead_setkey;
3259 t_alg->algt.alg.aead.encrypt = aead_encrypt;
3260 t_alg->algt.alg.aead.decrypt = aead_decrypt;
3261 if (!(priv->features & TALITOS_FTR_SHA224_HWINIT) &&
3262 !strncmp(alg->cra_name, "authenc(hmac(sha224)", 20)) {
3263 devm_kfree(dev, t_alg);
3264 return ERR_PTR(-ENOTSUPP);
3265 }
3266 break;
3267 case CRYPTO_ALG_TYPE_AHASH:
3268 alg = &t_alg->algt.alg.hash.halg.base;
3269 alg->cra_init = talitos_cra_init_ahash;
3270 alg->cra_exit = talitos_cra_exit;
3271 alg->cra_type = &crypto_ahash_type;
3272 t_alg->algt.alg.hash.init = ahash_init;
3273 t_alg->algt.alg.hash.update = ahash_update;
3274 t_alg->algt.alg.hash.final = ahash_final;
3275 t_alg->algt.alg.hash.finup = ahash_finup;
3276 t_alg->algt.alg.hash.digest = ahash_digest;
3277 if (!strncmp(alg->cra_name, "hmac", 4))
3278 t_alg->algt.alg.hash.setkey = ahash_setkey;
3279 t_alg->algt.alg.hash.import = ahash_import;
3280 t_alg->algt.alg.hash.export = ahash_export;
3281
3282 if (!(priv->features & TALITOS_FTR_HMAC_OK) &&
3283 !strncmp(alg->cra_name, "hmac", 4)) {
3284 devm_kfree(dev, t_alg);
3285 return ERR_PTR(-ENOTSUPP);
3286 }
3287 if (!(priv->features & TALITOS_FTR_SHA224_HWINIT) &&
3288 (!strcmp(alg->cra_name, "sha224") ||
3289 !strcmp(alg->cra_name, "hmac(sha224)"))) {
3290 t_alg->algt.alg.hash.init = ahash_init_sha224_swinit;
3291 t_alg->algt.desc_hdr_template =
3292 DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
3293 DESC_HDR_SEL0_MDEUA |
3294 DESC_HDR_MODE0_MDEU_SHA256;
3295 }
3296 break;
3297 default:
3298 dev_err(dev, "unknown algorithm type %d\n", t_alg->algt.type);
3299 devm_kfree(dev, t_alg);
3300 return ERR_PTR(-EINVAL);
3301 }
3302
3303 alg->cra_module = THIS_MODULE;
3304 if (t_alg->algt.priority)
3305 alg->cra_priority = t_alg->algt.priority;
3306 else
3307 alg->cra_priority = TALITOS_CRA_PRIORITY;
3308 alg->cra_alignmask = 0;
3309 alg->cra_ctxsize = sizeof(struct talitos_ctx);
3310 alg->cra_flags |= CRYPTO_ALG_KERN_DRIVER_ONLY;
3311
3312 t_alg->dev = dev;
3313
3314 return t_alg;
3315}
3316
3317static int talitos_probe_irq(struct platform_device *ofdev)
3318{
3319 struct device *dev = &ofdev->dev;
3320 struct device_node *np = ofdev->dev.of_node;
3321 struct talitos_private *priv = dev_get_drvdata(dev);
3322 int err;
3323 bool is_sec1 = has_ftr_sec1(priv);
3324
3325 priv->irq[0] = irq_of_parse_and_map(np, 0);
3326 if (!priv->irq[0]) {
3327 dev_err(dev, "failed to map irq\n");
3328 return -EINVAL;
3329 }
3330 if (is_sec1) {
3331 err = request_irq(priv->irq[0], talitos1_interrupt_4ch, 0,
3332 dev_driver_string(dev), dev);
3333 goto primary_out;
3334 }
3335
3336 priv->irq[1] = irq_of_parse_and_map(np, 1);
3337
3338
3339 if (!priv->irq[1]) {
3340 err = request_irq(priv->irq[0], talitos2_interrupt_4ch, 0,
3341 dev_driver_string(dev), dev);
3342 goto primary_out;
3343 }
3344
3345 err = request_irq(priv->irq[0], talitos2_interrupt_ch0_2, 0,
3346 dev_driver_string(dev), dev);
3347 if (err)
3348 goto primary_out;
3349
3350
3351 err = request_irq(priv->irq[1], talitos2_interrupt_ch1_3, 0,
3352 dev_driver_string(dev), dev);
3353 if (err) {
3354 dev_err(dev, "failed to request secondary irq\n");
3355 irq_dispose_mapping(priv->irq[1]);
3356 priv->irq[1] = 0;
3357 }
3358
3359 return err;
3360
3361primary_out:
3362 if (err) {
3363 dev_err(dev, "failed to request primary irq\n");
3364 irq_dispose_mapping(priv->irq[0]);
3365 priv->irq[0] = 0;
3366 }
3367
3368 return err;
3369}
3370
3371static int talitos_probe(struct platform_device *ofdev)
3372{
3373 struct device *dev = &ofdev->dev;
3374 struct device_node *np = ofdev->dev.of_node;
3375 struct talitos_private *priv;
3376 int i, err;
3377 int stride;
3378 struct resource *res;
3379
3380 priv = devm_kzalloc(dev, sizeof(struct talitos_private), GFP_KERNEL);
3381 if (!priv)
3382 return -ENOMEM;
3383
3384 INIT_LIST_HEAD(&priv->alg_list);
3385
3386 dev_set_drvdata(dev, priv);
3387
3388 priv->ofdev = ofdev;
3389
3390 spin_lock_init(&priv->reg_lock);
3391
3392 res = platform_get_resource(ofdev, IORESOURCE_MEM, 0);
3393 if (!res)
3394 return -ENXIO;
3395 priv->reg = devm_ioremap(dev, res->start, resource_size(res));
3396 if (!priv->reg) {
3397 dev_err(dev, "failed to of_iomap\n");
3398 err = -ENOMEM;
3399 goto err_out;
3400 }
3401
3402
3403 of_property_read_u32(np, "fsl,num-channels", &priv->num_channels);
3404 of_property_read_u32(np, "fsl,channel-fifo-len", &priv->chfifo_len);
3405 of_property_read_u32(np, "fsl,exec-units-mask", &priv->exec_units);
3406 of_property_read_u32(np, "fsl,descriptor-types-mask",
3407 &priv->desc_types);
3408
3409 if (!is_power_of_2(priv->num_channels) || !priv->chfifo_len ||
3410 !priv->exec_units || !priv->desc_types) {
3411 dev_err(dev, "invalid property data in device tree node\n");
3412 err = -EINVAL;
3413 goto err_out;
3414 }
3415
3416 if (of_device_is_compatible(np, "fsl,sec3.0"))
3417 priv->features |= TALITOS_FTR_SRC_LINK_TBL_LEN_INCLUDES_EXTENT;
3418
3419 if (of_device_is_compatible(np, "fsl,sec2.1"))
3420 priv->features |= TALITOS_FTR_HW_AUTH_CHECK |
3421 TALITOS_FTR_SHA224_HWINIT |
3422 TALITOS_FTR_HMAC_OK;
3423
3424 if (of_device_is_compatible(np, "fsl,sec1.0"))
3425 priv->features |= TALITOS_FTR_SEC1;
3426
3427 if (of_device_is_compatible(np, "fsl,sec1.2")) {
3428 priv->reg_deu = priv->reg + TALITOS12_DEU;
3429 priv->reg_aesu = priv->reg + TALITOS12_AESU;
3430 priv->reg_mdeu = priv->reg + TALITOS12_MDEU;
3431 stride = TALITOS1_CH_STRIDE;
3432 } else if (of_device_is_compatible(np, "fsl,sec1.0")) {
3433 priv->reg_deu = priv->reg + TALITOS10_DEU;
3434 priv->reg_aesu = priv->reg + TALITOS10_AESU;
3435 priv->reg_mdeu = priv->reg + TALITOS10_MDEU;
3436 priv->reg_afeu = priv->reg + TALITOS10_AFEU;
3437 priv->reg_rngu = priv->reg + TALITOS10_RNGU;
3438 priv->reg_pkeu = priv->reg + TALITOS10_PKEU;
3439 stride = TALITOS1_CH_STRIDE;
3440 } else {
3441 priv->reg_deu = priv->reg + TALITOS2_DEU;
3442 priv->reg_aesu = priv->reg + TALITOS2_AESU;
3443 priv->reg_mdeu = priv->reg + TALITOS2_MDEU;
3444 priv->reg_afeu = priv->reg + TALITOS2_AFEU;
3445 priv->reg_rngu = priv->reg + TALITOS2_RNGU;
3446 priv->reg_pkeu = priv->reg + TALITOS2_PKEU;
3447 priv->reg_keu = priv->reg + TALITOS2_KEU;
3448 priv->reg_crcu = priv->reg + TALITOS2_CRCU;
3449 stride = TALITOS2_CH_STRIDE;
3450 }
3451
3452 err = talitos_probe_irq(ofdev);
3453 if (err)
3454 goto err_out;
3455
3456 if (of_device_is_compatible(np, "fsl,sec1.0")) {
3457 if (priv->num_channels == 1)
3458 tasklet_init(&priv->done_task[0], talitos1_done_ch0,
3459 (unsigned long)dev);
3460 else
3461 tasklet_init(&priv->done_task[0], talitos1_done_4ch,
3462 (unsigned long)dev);
3463 } else {
3464 if (priv->irq[1]) {
3465 tasklet_init(&priv->done_task[0], talitos2_done_ch0_2,
3466 (unsigned long)dev);
3467 tasklet_init(&priv->done_task[1], talitos2_done_ch1_3,
3468 (unsigned long)dev);
3469 } else if (priv->num_channels == 1) {
3470 tasklet_init(&priv->done_task[0], talitos2_done_ch0,
3471 (unsigned long)dev);
3472 } else {
3473 tasklet_init(&priv->done_task[0], talitos2_done_4ch,
3474 (unsigned long)dev);
3475 }
3476 }
3477
3478 priv->chan = devm_kcalloc(dev,
3479 priv->num_channels,
3480 sizeof(struct talitos_channel),
3481 GFP_KERNEL);
3482 if (!priv->chan) {
3483 dev_err(dev, "failed to allocate channel management space\n");
3484 err = -ENOMEM;
3485 goto err_out;
3486 }
3487
3488 priv->fifo_len = roundup_pow_of_two(priv->chfifo_len);
3489
3490 for (i = 0; i < priv->num_channels; i++) {
3491 priv->chan[i].reg = priv->reg + stride * (i + 1);
3492 if (!priv->irq[1] || !(i & 1))
3493 priv->chan[i].reg += TALITOS_CH_BASE_OFFSET;
3494
3495 spin_lock_init(&priv->chan[i].head_lock);
3496 spin_lock_init(&priv->chan[i].tail_lock);
3497
3498 priv->chan[i].fifo = devm_kcalloc(dev,
3499 priv->fifo_len,
3500 sizeof(struct talitos_request),
3501 GFP_KERNEL);
3502 if (!priv->chan[i].fifo) {
3503 dev_err(dev, "failed to allocate request fifo %d\n", i);
3504 err = -ENOMEM;
3505 goto err_out;
3506 }
3507
3508 atomic_set(&priv->chan[i].submit_count,
3509 -(priv->chfifo_len - 1));
3510 }
3511
3512 dma_set_mask(dev, DMA_BIT_MASK(36));
3513
3514
3515 err = init_device(dev);
3516 if (err) {
3517 dev_err(dev, "failed to initialize device\n");
3518 goto err_out;
3519 }
3520
3521
3522 if (hw_supports(dev, DESC_HDR_SEL0_RNG)) {
3523 err = talitos_register_rng(dev);
3524 if (err) {
3525 dev_err(dev, "failed to register hwrng: %d\n", err);
3526 goto err_out;
3527 } else
3528 dev_info(dev, "hwrng\n");
3529 }
3530
3531
3532 for (i = 0; i < ARRAY_SIZE(driver_algs); i++) {
3533 if (hw_supports(dev, driver_algs[i].desc_hdr_template)) {
3534 struct talitos_crypto_alg *t_alg;
3535 struct crypto_alg *alg = NULL;
3536
3537 t_alg = talitos_alg_alloc(dev, &driver_algs[i]);
3538 if (IS_ERR(t_alg)) {
3539 err = PTR_ERR(t_alg);
3540 if (err == -ENOTSUPP)
3541 continue;
3542 goto err_out;
3543 }
3544
3545 switch (t_alg->algt.type) {
3546 case CRYPTO_ALG_TYPE_ABLKCIPHER:
3547 err = crypto_register_alg(
3548 &t_alg->algt.alg.crypto);
3549 alg = &t_alg->algt.alg.crypto;
3550 break;
3551
3552 case CRYPTO_ALG_TYPE_AEAD:
3553 err = crypto_register_aead(
3554 &t_alg->algt.alg.aead);
3555 alg = &t_alg->algt.alg.aead.base;
3556 break;
3557
3558 case CRYPTO_ALG_TYPE_AHASH:
3559 err = crypto_register_ahash(
3560 &t_alg->algt.alg.hash);
3561 alg = &t_alg->algt.alg.hash.halg.base;
3562 break;
3563 }
3564 if (err) {
3565 dev_err(dev, "%s alg registration failed\n",
3566 alg->cra_driver_name);
3567 devm_kfree(dev, t_alg);
3568 } else
3569 list_add_tail(&t_alg->entry, &priv->alg_list);
3570 }
3571 }
3572 if (!list_empty(&priv->alg_list))
3573 dev_info(dev, "%s algorithms registered in /proc/crypto\n",
3574 (char *)of_get_property(np, "compatible", NULL));
3575
3576 return 0;
3577
3578err_out:
3579 talitos_remove(ofdev);
3580
3581 return err;
3582}
3583
3584static const struct of_device_id talitos_match[] = {
3585#ifdef CONFIG_CRYPTO_DEV_TALITOS1
3586 {
3587 .compatible = "fsl,sec1.0",
3588 },
3589#endif
3590#ifdef CONFIG_CRYPTO_DEV_TALITOS2
3591 {
3592 .compatible = "fsl,sec2.0",
3593 },
3594#endif
3595 {},
3596};
3597MODULE_DEVICE_TABLE(of, talitos_match);
3598
3599static struct platform_driver talitos_driver = {
3600 .driver = {
3601 .name = "talitos",
3602 .of_match_table = talitos_match,
3603 },
3604 .probe = talitos_probe,
3605 .remove = talitos_remove,
3606};
3607
3608module_platform_driver(talitos_driver);
3609
3610MODULE_LICENSE("GPL");
3611MODULE_AUTHOR("Kim Phillips <kim.phillips@freescale.com>");
3612MODULE_DESCRIPTION("Freescale integrated security engine (SEC) driver");
3613