1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23#include <linux/kernel.h>
24#include <linux/string.h>
25
26#include "util.h"
27#include "spu.h"
28#include "spu2.h"
29
30#define SPU2_TX_STATUS_LEN 0
31
32
33
34
35
36#define SPU2_RX_STATUS_LEN 2
37
38enum spu2_proto_sel {
39 SPU2_PROTO_RESV = 0,
40 SPU2_MACSEC_SECTAG8_ECB = 1,
41 SPU2_MACSEC_SECTAG8_SCB = 2,
42 SPU2_MACSEC_SECTAG16 = 3,
43 SPU2_MACSEC_SECTAG16_8_XPN = 4,
44 SPU2_IPSEC = 5,
45 SPU2_IPSEC_ESN = 6,
46 SPU2_TLS_CIPHER = 7,
47 SPU2_TLS_AEAD = 8,
48 SPU2_DTLS_CIPHER = 9,
49 SPU2_DTLS_AEAD = 10
50};
51
52char *spu2_cipher_type_names[] = { "None", "AES128", "AES192", "AES256",
53 "DES", "3DES"
54};
55
56char *spu2_cipher_mode_names[] = { "ECB", "CBC", "CTR", "CFB", "OFB", "XTS",
57 "CCM", "GCM"
58};
59
60char *spu2_hash_type_names[] = { "None", "AES128", "AES192", "AES256",
61 "Reserved", "Reserved", "MD5", "SHA1", "SHA224", "SHA256", "SHA384",
62 "SHA512", "SHA512/224", "SHA512/256", "SHA3-224", "SHA3-256",
63 "SHA3-384", "SHA3-512"
64};
65
66char *spu2_hash_mode_names[] = { "CMAC", "CBC-MAC", "XCBC-MAC", "HMAC",
67 "Rabin", "CCM", "GCM", "Reserved"
68};
69
70static char *spu2_ciph_type_name(enum spu2_cipher_type cipher_type)
71{
72 if (cipher_type >= SPU2_CIPHER_TYPE_LAST)
73 return "Reserved";
74 return spu2_cipher_type_names[cipher_type];
75}
76
77static char *spu2_ciph_mode_name(enum spu2_cipher_mode cipher_mode)
78{
79 if (cipher_mode >= SPU2_CIPHER_MODE_LAST)
80 return "Reserved";
81 return spu2_cipher_mode_names[cipher_mode];
82}
83
84static char *spu2_hash_type_name(enum spu2_hash_type hash_type)
85{
86 if (hash_type >= SPU2_HASH_TYPE_LAST)
87 return "Reserved";
88 return spu2_hash_type_names[hash_type];
89}
90
91static char *spu2_hash_mode_name(enum spu2_hash_mode hash_mode)
92{
93 if (hash_mode >= SPU2_HASH_MODE_LAST)
94 return "Reserved";
95 return spu2_hash_mode_names[hash_mode];
96}
97
98
99
100
101
102static int spu2_cipher_mode_xlate(enum spu_cipher_mode cipher_mode,
103 enum spu2_cipher_mode *spu2_mode)
104{
105 switch (cipher_mode) {
106 case CIPHER_MODE_ECB:
107 *spu2_mode = SPU2_CIPHER_MODE_ECB;
108 break;
109 case CIPHER_MODE_CBC:
110 *spu2_mode = SPU2_CIPHER_MODE_CBC;
111 break;
112 case CIPHER_MODE_OFB:
113 *spu2_mode = SPU2_CIPHER_MODE_OFB;
114 break;
115 case CIPHER_MODE_CFB:
116 *spu2_mode = SPU2_CIPHER_MODE_CFB;
117 break;
118 case CIPHER_MODE_CTR:
119 *spu2_mode = SPU2_CIPHER_MODE_CTR;
120 break;
121 case CIPHER_MODE_CCM:
122 *spu2_mode = SPU2_CIPHER_MODE_CCM;
123 break;
124 case CIPHER_MODE_GCM:
125 *spu2_mode = SPU2_CIPHER_MODE_GCM;
126 break;
127 case CIPHER_MODE_XTS:
128 *spu2_mode = SPU2_CIPHER_MODE_XTS;
129 break;
130 default:
131 return -EINVAL;
132 }
133 return 0;
134}
135
136
137
138
139
140
141
142
143
144
145
146
147static int spu2_cipher_xlate(enum spu_cipher_alg cipher_alg,
148 enum spu_cipher_mode cipher_mode,
149 enum spu_cipher_type cipher_type,
150 enum spu2_cipher_type *spu2_type,
151 enum spu2_cipher_mode *spu2_mode)
152{
153 int err;
154
155 err = spu2_cipher_mode_xlate(cipher_mode, spu2_mode);
156 if (err) {
157 flow_log("Invalid cipher mode %d\n", cipher_mode);
158 return err;
159 }
160
161 switch (cipher_alg) {
162 case CIPHER_ALG_NONE:
163 *spu2_type = SPU2_CIPHER_TYPE_NONE;
164 break;
165 case CIPHER_ALG_RC4:
166
167 err = -EINVAL;
168 *spu2_type = SPU2_CIPHER_TYPE_NONE;
169 break;
170 case CIPHER_ALG_DES:
171 *spu2_type = SPU2_CIPHER_TYPE_DES;
172 break;
173 case CIPHER_ALG_3DES:
174 *spu2_type = SPU2_CIPHER_TYPE_3DES;
175 break;
176 case CIPHER_ALG_AES:
177 switch (cipher_type) {
178 case CIPHER_TYPE_AES128:
179 *spu2_type = SPU2_CIPHER_TYPE_AES128;
180 break;
181 case CIPHER_TYPE_AES192:
182 *spu2_type = SPU2_CIPHER_TYPE_AES192;
183 break;
184 case CIPHER_TYPE_AES256:
185 *spu2_type = SPU2_CIPHER_TYPE_AES256;
186 break;
187 default:
188 err = -EINVAL;
189 }
190 break;
191 case CIPHER_ALG_LAST:
192 default:
193 err = -EINVAL;
194 break;
195 }
196
197 if (err)
198 flow_log("Invalid cipher alg %d or type %d\n",
199 cipher_alg, cipher_type);
200 return err;
201}
202
203
204
205
206
207static int spu2_hash_mode_xlate(enum hash_mode hash_mode,
208 enum spu2_hash_mode *spu2_mode)
209{
210 switch (hash_mode) {
211 case HASH_MODE_XCBC:
212 *spu2_mode = SPU2_HASH_MODE_XCBC_MAC;
213 break;
214 case HASH_MODE_CMAC:
215 *spu2_mode = SPU2_HASH_MODE_CMAC;
216 break;
217 case HASH_MODE_HMAC:
218 *spu2_mode = SPU2_HASH_MODE_HMAC;
219 break;
220 case HASH_MODE_CCM:
221 *spu2_mode = SPU2_HASH_MODE_CCM;
222 break;
223 case HASH_MODE_GCM:
224 *spu2_mode = SPU2_HASH_MODE_GCM;
225 break;
226 default:
227 return -EINVAL;
228 }
229 return 0;
230}
231
232
233
234
235
236
237
238
239
240
241
242
243
244static int
245spu2_hash_xlate(enum hash_alg hash_alg, enum hash_mode hash_mode,
246 enum hash_type hash_type, enum spu_cipher_type ciph_type,
247 enum spu2_hash_type *spu2_type, enum spu2_hash_mode *spu2_mode)
248{
249 int err;
250
251 err = spu2_hash_mode_xlate(hash_mode, spu2_mode);
252 if (err) {
253 flow_log("Invalid hash mode %d\n", hash_mode);
254 return err;
255 }
256
257 switch (hash_alg) {
258 case HASH_ALG_NONE:
259 *spu2_type = SPU2_HASH_TYPE_NONE;
260 break;
261 case HASH_ALG_MD5:
262 *spu2_type = SPU2_HASH_TYPE_MD5;
263 break;
264 case HASH_ALG_SHA1:
265 *spu2_type = SPU2_HASH_TYPE_SHA1;
266 break;
267 case HASH_ALG_SHA224:
268 *spu2_type = SPU2_HASH_TYPE_SHA224;
269 break;
270 case HASH_ALG_SHA256:
271 *spu2_type = SPU2_HASH_TYPE_SHA256;
272 break;
273 case HASH_ALG_SHA384:
274 *spu2_type = SPU2_HASH_TYPE_SHA384;
275 break;
276 case HASH_ALG_SHA512:
277 *spu2_type = SPU2_HASH_TYPE_SHA512;
278 break;
279 case HASH_ALG_AES:
280 switch (ciph_type) {
281 case CIPHER_TYPE_AES128:
282 *spu2_type = SPU2_HASH_TYPE_AES128;
283 break;
284 case CIPHER_TYPE_AES192:
285 *spu2_type = SPU2_HASH_TYPE_AES192;
286 break;
287 case CIPHER_TYPE_AES256:
288 *spu2_type = SPU2_HASH_TYPE_AES256;
289 break;
290 default:
291 err = -EINVAL;
292 }
293 break;
294 case HASH_ALG_SHA3_224:
295 *spu2_type = SPU2_HASH_TYPE_SHA3_224;
296 break;
297 case HASH_ALG_SHA3_256:
298 *spu2_type = SPU2_HASH_TYPE_SHA3_256;
299 break;
300 case HASH_ALG_SHA3_384:
301 *spu2_type = SPU2_HASH_TYPE_SHA3_384;
302 break;
303 case HASH_ALG_SHA3_512:
304 *spu2_type = SPU2_HASH_TYPE_SHA3_512;
305 break;
306 case HASH_ALG_LAST:
307 default:
308 err = -EINVAL;
309 break;
310 }
311
312 if (err)
313 flow_log("Invalid hash alg %d or type %d\n",
314 hash_alg, hash_type);
315 return err;
316}
317
318
319static void spu2_dump_fmd_ctrl0(u64 ctrl0)
320{
321 enum spu2_cipher_type ciph_type;
322 enum spu2_cipher_mode ciph_mode;
323 enum spu2_hash_type hash_type;
324 enum spu2_hash_mode hash_mode;
325 char *ciph_name;
326 char *ciph_mode_name;
327 char *hash_name;
328 char *hash_mode_name;
329 u8 cfb;
330 u8 proto;
331
332 packet_log(" FMD CTRL0 %#16llx\n", ctrl0);
333 if (ctrl0 & SPU2_CIPH_ENCRYPT_EN)
334 packet_log(" encrypt\n");
335 else
336 packet_log(" decrypt\n");
337
338 ciph_type = (ctrl0 & SPU2_CIPH_TYPE) >> SPU2_CIPH_TYPE_SHIFT;
339 ciph_name = spu2_ciph_type_name(ciph_type);
340 packet_log(" Cipher type: %s\n", ciph_name);
341
342 if (ciph_type != SPU2_CIPHER_TYPE_NONE) {
343 ciph_mode = (ctrl0 & SPU2_CIPH_MODE) >> SPU2_CIPH_MODE_SHIFT;
344 ciph_mode_name = spu2_ciph_mode_name(ciph_mode);
345 packet_log(" Cipher mode: %s\n", ciph_mode_name);
346 }
347
348 cfb = (ctrl0 & SPU2_CFB_MASK) >> SPU2_CFB_MASK_SHIFT;
349 packet_log(" CFB %#x\n", cfb);
350
351 proto = (ctrl0 & SPU2_PROTO_SEL) >> SPU2_PROTO_SEL_SHIFT;
352 packet_log(" protocol %#x\n", proto);
353
354 if (ctrl0 & SPU2_HASH_FIRST)
355 packet_log(" hash first\n");
356 else
357 packet_log(" cipher first\n");
358
359 if (ctrl0 & SPU2_CHK_TAG)
360 packet_log(" check tag\n");
361
362 hash_type = (ctrl0 & SPU2_HASH_TYPE) >> SPU2_HASH_TYPE_SHIFT;
363 hash_name = spu2_hash_type_name(hash_type);
364 packet_log(" Hash type: %s\n", hash_name);
365
366 if (hash_type != SPU2_HASH_TYPE_NONE) {
367 hash_mode = (ctrl0 & SPU2_HASH_MODE) >> SPU2_HASH_MODE_SHIFT;
368 hash_mode_name = spu2_hash_mode_name(hash_mode);
369 packet_log(" Hash mode: %s\n", hash_mode_name);
370 }
371
372 if (ctrl0 & SPU2_CIPH_PAD_EN) {
373 packet_log(" Cipher pad: %#2llx\n",
374 (ctrl0 & SPU2_CIPH_PAD) >> SPU2_CIPH_PAD_SHIFT);
375 }
376}
377
378
379static void spu2_dump_fmd_ctrl1(u64 ctrl1)
380{
381 u8 hash_key_len;
382 u8 ciph_key_len;
383 u8 ret_iv_len;
384 u8 iv_offset;
385 u8 iv_len;
386 u8 hash_tag_len;
387 u8 ret_md;
388
389 packet_log(" FMD CTRL1 %#16llx\n", ctrl1);
390 if (ctrl1 & SPU2_TAG_LOC)
391 packet_log(" Tag after payload\n");
392
393 packet_log(" Msg includes ");
394 if (ctrl1 & SPU2_HAS_FR_DATA)
395 packet_log("FD ");
396 if (ctrl1 & SPU2_HAS_AAD1)
397 packet_log("AAD1 ");
398 if (ctrl1 & SPU2_HAS_NAAD)
399 packet_log("NAAD ");
400 if (ctrl1 & SPU2_HAS_AAD2)
401 packet_log("AAD2 ");
402 if (ctrl1 & SPU2_HAS_ESN)
403 packet_log("ESN ");
404 packet_log("\n");
405
406 hash_key_len = (ctrl1 & SPU2_HASH_KEY_LEN) >> SPU2_HASH_KEY_LEN_SHIFT;
407 packet_log(" Hash key len %u\n", hash_key_len);
408
409 ciph_key_len = (ctrl1 & SPU2_CIPH_KEY_LEN) >> SPU2_CIPH_KEY_LEN_SHIFT;
410 packet_log(" Cipher key len %u\n", ciph_key_len);
411
412 if (ctrl1 & SPU2_GENIV)
413 packet_log(" Generate IV\n");
414
415 if (ctrl1 & SPU2_HASH_IV)
416 packet_log(" IV included in hash\n");
417
418 if (ctrl1 & SPU2_RET_IV)
419 packet_log(" Return IV in output before payload\n");
420
421 ret_iv_len = (ctrl1 & SPU2_RET_IV_LEN) >> SPU2_RET_IV_LEN_SHIFT;
422 packet_log(" Length of returned IV %u bytes\n",
423 ret_iv_len ? ret_iv_len : 16);
424
425 iv_offset = (ctrl1 & SPU2_IV_OFFSET) >> SPU2_IV_OFFSET_SHIFT;
426 packet_log(" IV offset %u\n", iv_offset);
427
428 iv_len = (ctrl1 & SPU2_IV_LEN) >> SPU2_IV_LEN_SHIFT;
429 packet_log(" Input IV len %u bytes\n", iv_len);
430
431 hash_tag_len = (ctrl1 & SPU2_HASH_TAG_LEN) >> SPU2_HASH_TAG_LEN_SHIFT;
432 packet_log(" Hash tag length %u bytes\n", hash_tag_len);
433
434 packet_log(" Return ");
435 ret_md = (ctrl1 & SPU2_RETURN_MD) >> SPU2_RETURN_MD_SHIFT;
436 if (ret_md)
437 packet_log("FMD ");
438 if (ret_md == SPU2_RET_FMD_OMD)
439 packet_log("OMD ");
440 else if (ret_md == SPU2_RET_FMD_OMD_IV)
441 packet_log("OMD IV ");
442 if (ctrl1 & SPU2_RETURN_FD)
443 packet_log("FD ");
444 if (ctrl1 & SPU2_RETURN_AAD1)
445 packet_log("AAD1 ");
446 if (ctrl1 & SPU2_RETURN_NAAD)
447 packet_log("NAAD ");
448 if (ctrl1 & SPU2_RETURN_AAD2)
449 packet_log("AAD2 ");
450 if (ctrl1 & SPU2_RETURN_PAY)
451 packet_log("Payload");
452 packet_log("\n");
453}
454
455
456static void spu2_dump_fmd_ctrl2(u64 ctrl2)
457{
458 packet_log(" FMD CTRL2 %#16llx\n", ctrl2);
459
460 packet_log(" AAD1 offset %llu length %llu bytes\n",
461 ctrl2 & SPU2_AAD1_OFFSET,
462 (ctrl2 & SPU2_AAD1_LEN) >> SPU2_AAD1_LEN_SHIFT);
463 packet_log(" AAD2 offset %llu\n",
464 (ctrl2 & SPU2_AAD2_OFFSET) >> SPU2_AAD2_OFFSET_SHIFT);
465 packet_log(" Payload offset %llu\n",
466 (ctrl2 & SPU2_PL_OFFSET) >> SPU2_PL_OFFSET_SHIFT);
467}
468
469
470static void spu2_dump_fmd_ctrl3(u64 ctrl3)
471{
472 packet_log(" FMD CTRL3 %#16llx\n", ctrl3);
473
474 packet_log(" Payload length %llu bytes\n", ctrl3 & SPU2_PL_LEN);
475 packet_log(" TLS length %llu bytes\n",
476 (ctrl3 & SPU2_TLS_LEN) >> SPU2_TLS_LEN_SHIFT);
477}
478
479static void spu2_dump_fmd(struct SPU2_FMD *fmd)
480{
481 spu2_dump_fmd_ctrl0(le64_to_cpu(fmd->ctrl0));
482 spu2_dump_fmd_ctrl1(le64_to_cpu(fmd->ctrl1));
483 spu2_dump_fmd_ctrl2(le64_to_cpu(fmd->ctrl2));
484 spu2_dump_fmd_ctrl3(le64_to_cpu(fmd->ctrl3));
485}
486
487static void spu2_dump_omd(u8 *omd, u16 hash_key_len, u16 ciph_key_len,
488 u16 hash_iv_len, u16 ciph_iv_len)
489{
490 u8 *ptr = omd;
491
492 packet_log(" OMD:\n");
493
494 if (hash_key_len) {
495 packet_log(" Hash Key Length %u bytes\n", hash_key_len);
496 packet_dump(" KEY: ", ptr, hash_key_len);
497 ptr += hash_key_len;
498 }
499
500 if (ciph_key_len) {
501 packet_log(" Cipher Key Length %u bytes\n", ciph_key_len);
502 packet_dump(" KEY: ", ptr, ciph_key_len);
503 ptr += ciph_key_len;
504 }
505
506 if (hash_iv_len) {
507 packet_log(" Hash IV Length %u bytes\n", hash_iv_len);
508 packet_dump(" hash IV: ", ptr, hash_iv_len);
509 ptr += ciph_key_len;
510 }
511
512 if (ciph_iv_len) {
513 packet_log(" Cipher IV Length %u bytes\n", ciph_iv_len);
514 packet_dump(" cipher IV: ", ptr, ciph_iv_len);
515 }
516}
517
518
519void spu2_dump_msg_hdr(u8 *buf, unsigned int buf_len)
520{
521 struct SPU2_FMD *fmd = (struct SPU2_FMD *)buf;
522 u8 *omd;
523 u64 ctrl1;
524 u16 hash_key_len;
525 u16 ciph_key_len;
526 u16 hash_iv_len;
527 u16 ciph_iv_len;
528 u16 omd_len;
529
530 packet_log("\n");
531 packet_log("SPU2 message header %p len: %u\n", buf, buf_len);
532
533 spu2_dump_fmd(fmd);
534 omd = (u8 *)(fmd + 1);
535
536 ctrl1 = le64_to_cpu(fmd->ctrl1);
537 hash_key_len = (ctrl1 & SPU2_HASH_KEY_LEN) >> SPU2_HASH_KEY_LEN_SHIFT;
538 ciph_key_len = (ctrl1 & SPU2_CIPH_KEY_LEN) >> SPU2_CIPH_KEY_LEN_SHIFT;
539 hash_iv_len = 0;
540 ciph_iv_len = (ctrl1 & SPU2_IV_LEN) >> SPU2_IV_LEN_SHIFT;
541 spu2_dump_omd(omd, hash_key_len, ciph_key_len, hash_iv_len,
542 ciph_iv_len);
543
544
545 omd_len = hash_key_len + ciph_key_len + hash_iv_len + ciph_iv_len;
546 if (FMD_SIZE + omd_len != buf_len) {
547 packet_log
548 (" Packet parsed incorrectly. buf_len %u, sum of MD %zu\n",
549 buf_len, FMD_SIZE + omd_len);
550 }
551 packet_log("\n");
552}
553
554
555
556
557
558
559
560
561
562
563
564static int spu2_fmd_init(struct SPU2_FMD *fmd,
565 enum spu2_cipher_type spu2_type,
566 enum spu2_cipher_mode spu2_mode,
567 u32 cipher_key_len, u32 cipher_iv_len)
568{
569 u64 ctrl0;
570 u64 ctrl1;
571 u64 ctrl2;
572 u64 ctrl3;
573 u32 aad1_offset;
574 u32 aad2_offset;
575 u16 aad1_len = 0;
576 u64 payload_offset;
577
578 ctrl0 = (spu2_type << SPU2_CIPH_TYPE_SHIFT) |
579 (spu2_mode << SPU2_CIPH_MODE_SHIFT);
580
581 ctrl1 = (cipher_key_len << SPU2_CIPH_KEY_LEN_SHIFT) |
582 ((u64)cipher_iv_len << SPU2_IV_LEN_SHIFT) |
583 ((u64)SPU2_RET_FMD_ONLY << SPU2_RETURN_MD_SHIFT) | SPU2_RETURN_PAY;
584
585
586
587
588
589 aad1_offset = 0;
590 aad2_offset = aad1_offset;
591 payload_offset = 0;
592 ctrl2 = aad1_offset |
593 (aad1_len << SPU2_AAD1_LEN_SHIFT) |
594 (aad2_offset << SPU2_AAD2_OFFSET_SHIFT) |
595 (payload_offset << SPU2_PL_OFFSET_SHIFT);
596
597 ctrl3 = 0;
598
599 fmd->ctrl0 = cpu_to_le64(ctrl0);
600 fmd->ctrl1 = cpu_to_le64(ctrl1);
601 fmd->ctrl2 = cpu_to_le64(ctrl2);
602 fmd->ctrl3 = cpu_to_le64(ctrl3);
603
604 return 0;
605}
606
607
608
609
610
611
612
613
614
615
616
617
618
619static void spu2_fmd_ctrl0_write(struct SPU2_FMD *fmd,
620 bool is_inbound, bool auth_first,
621 enum spu2_proto_sel protocol,
622 enum spu2_cipher_type cipher_type,
623 enum spu2_cipher_mode cipher_mode,
624 enum spu2_hash_type auth_type,
625 enum spu2_hash_mode auth_mode)
626{
627 u64 ctrl0 = 0;
628
629 if ((cipher_type != SPU2_CIPHER_TYPE_NONE) && !is_inbound)
630 ctrl0 |= SPU2_CIPH_ENCRYPT_EN;
631
632 ctrl0 |= ((u64)cipher_type << SPU2_CIPH_TYPE_SHIFT) |
633 ((u64)cipher_mode << SPU2_CIPH_MODE_SHIFT);
634
635 if (protocol)
636 ctrl0 |= (u64)protocol << SPU2_PROTO_SEL_SHIFT;
637
638 if (auth_first)
639 ctrl0 |= SPU2_HASH_FIRST;
640
641 if (is_inbound && (auth_type != SPU2_HASH_TYPE_NONE))
642 ctrl0 |= SPU2_CHK_TAG;
643
644 ctrl0 |= (((u64)auth_type << SPU2_HASH_TYPE_SHIFT) |
645 ((u64)auth_mode << SPU2_HASH_MODE_SHIFT));
646
647 fmd->ctrl0 = cpu_to_le64(ctrl0);
648}
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670static void spu2_fmd_ctrl1_write(struct SPU2_FMD *fmd, bool is_inbound,
671 u64 assoc_size,
672 u64 auth_key_len, u64 cipher_key_len,
673 bool gen_iv, bool hash_iv, bool return_iv,
674 u64 ret_iv_len, u64 ret_iv_offset,
675 u64 cipher_iv_len, u64 digest_size,
676 bool return_payload, bool return_md)
677{
678 u64 ctrl1 = 0;
679
680 if (is_inbound && digest_size)
681 ctrl1 |= SPU2_TAG_LOC;
682
683 if (assoc_size) {
684 ctrl1 |= SPU2_HAS_AAD2;
685 ctrl1 |= SPU2_RETURN_AAD2;
686 }
687
688 if (auth_key_len)
689 ctrl1 |= ((auth_key_len << SPU2_HASH_KEY_LEN_SHIFT) &
690 SPU2_HASH_KEY_LEN);
691
692 if (cipher_key_len)
693 ctrl1 |= ((cipher_key_len << SPU2_CIPH_KEY_LEN_SHIFT) &
694 SPU2_CIPH_KEY_LEN);
695
696 if (gen_iv)
697 ctrl1 |= SPU2_GENIV;
698
699 if (hash_iv)
700 ctrl1 |= SPU2_HASH_IV;
701
702 if (return_iv) {
703 ctrl1 |= SPU2_RET_IV;
704 ctrl1 |= ret_iv_len << SPU2_RET_IV_LEN_SHIFT;
705 ctrl1 |= ret_iv_offset << SPU2_IV_OFFSET_SHIFT;
706 }
707
708 ctrl1 |= ((cipher_iv_len << SPU2_IV_LEN_SHIFT) & SPU2_IV_LEN);
709
710 if (digest_size)
711 ctrl1 |= ((digest_size << SPU2_HASH_TAG_LEN_SHIFT) &
712 SPU2_HASH_TAG_LEN);
713
714
715
716
717 if (return_md)
718 ctrl1 |= ((u64)SPU2_RET_FMD_ONLY << SPU2_RETURN_MD_SHIFT);
719 else
720 ctrl1 |= ((u64)SPU2_RET_NO_MD << SPU2_RETURN_MD_SHIFT);
721
722
723
724 if (return_payload)
725 ctrl1 |= SPU2_RETURN_PAY;
726
727 fmd->ctrl1 = cpu_to_le64(ctrl1);
728}
729
730
731
732
733
734
735
736
737
738
739
740
741static void spu2_fmd_ctrl2_write(struct SPU2_FMD *fmd, u64 cipher_offset,
742 u64 auth_key_len, u64 auth_iv_len,
743 u64 cipher_key_len, u64 cipher_iv_len)
744{
745 u64 ctrl2;
746 u64 aad1_offset;
747 u64 aad2_offset;
748 u16 aad1_len = 0;
749 u64 payload_offset;
750
751
752 aad1_offset = 0;
753
754 aad2_offset = aad1_offset;
755 payload_offset = cipher_offset;
756 ctrl2 = aad1_offset |
757 (aad1_len << SPU2_AAD1_LEN_SHIFT) |
758 (aad2_offset << SPU2_AAD2_OFFSET_SHIFT) |
759 (payload_offset << SPU2_PL_OFFSET_SHIFT);
760
761 fmd->ctrl2 = cpu_to_le64(ctrl2);
762}
763
764
765
766
767
768
769static void spu2_fmd_ctrl3_write(struct SPU2_FMD *fmd, u64 payload_len)
770{
771 u64 ctrl3;
772
773 ctrl3 = payload_len & SPU2_PL_LEN;
774
775 fmd->ctrl3 = cpu_to_le64(ctrl3);
776}
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792u32 spu2_ctx_max_payload(enum spu_cipher_alg cipher_alg,
793 enum spu_cipher_mode cipher_mode,
794 unsigned int blocksize)
795{
796 if ((cipher_alg == CIPHER_ALG_AES) &&
797 (cipher_mode == CIPHER_MODE_CCM)) {
798 u32 excess = SPU2_MAX_PAYLOAD % blocksize;
799
800 return SPU2_MAX_PAYLOAD - excess;
801 } else {
802 return SPU_MAX_PAYLOAD_INF;
803 }
804}
805
806
807
808
809
810
811
812
813u32 spu2_payload_length(u8 *spu_hdr)
814{
815 struct SPU2_FMD *fmd = (struct SPU2_FMD *)spu_hdr;
816 u32 pl_len;
817 u64 ctrl3;
818
819 ctrl3 = le64_to_cpu(fmd->ctrl3);
820 pl_len = ctrl3 & SPU2_PL_LEN;
821
822 return pl_len;
823}
824
825
826
827
828
829
830
831
832
833
834
835u16 spu2_response_hdr_len(u16 auth_key_len, u16 enc_key_len, bool is_hash)
836{
837 return FMD_SIZE;
838}
839
840
841
842
843
844
845
846
847
848
849
850
851
852u16 spu2_hash_pad_len(enum hash_alg hash_alg, enum hash_mode hash_mode,
853 u32 chunksize, u16 hash_block_size)
854{
855 return 0;
856}
857
858
859
860
861
862
863
864u32 spu2_gcm_ccm_pad_len(enum spu_cipher_mode cipher_mode,
865 unsigned int data_size)
866{
867 return 0;
868}
869
870
871
872
873
874
875
876
877
878
879
880u32 spu2_assoc_resp_len(enum spu_cipher_mode cipher_mode,
881 unsigned int assoc_len, unsigned int iv_len,
882 bool is_encrypt)
883{
884 u32 resp_len = assoc_len;
885
886 if (is_encrypt)
887
888 resp_len += iv_len;
889 return resp_len;
890}
891
892
893
894
895
896
897
898
899
900
901
902
903u8 spu2_aead_ivlen(enum spu_cipher_mode cipher_mode, u16 iv_len)
904{
905 return 0;
906}
907
908
909
910
911
912
913
914
915enum hash_type spu2_hash_type(u32 src_sent)
916{
917 return HASH_TYPE_FULL;
918}
919
920
921
922
923
924
925
926
927
928u32 spu2_digest_size(u32 alg_digest_size, enum hash_alg alg,
929 enum hash_type htype)
930{
931 return alg_digest_size;
932}
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950u32 spu2_create_request(u8 *spu_hdr,
951 struct spu_request_opts *req_opts,
952 struct spu_cipher_parms *cipher_parms,
953 struct spu_hash_parms *hash_parms,
954 struct spu_aead_parms *aead_parms,
955 unsigned int data_size)
956{
957 struct SPU2_FMD *fmd;
958 u8 *ptr;
959 unsigned int buf_len;
960 int err;
961 enum spu2_cipher_type spu2_ciph_type = SPU2_CIPHER_TYPE_NONE;
962 enum spu2_cipher_mode spu2_ciph_mode;
963 enum spu2_hash_type spu2_auth_type = SPU2_HASH_TYPE_NONE;
964 enum spu2_hash_mode spu2_auth_mode;
965 bool return_md = true;
966 enum spu2_proto_sel proto = SPU2_PROTO_RESV;
967
968
969 unsigned int payload_len =
970 hash_parms->prebuf_len + data_size + hash_parms->pad_len -
971 ((req_opts->is_aead && req_opts->is_inbound) ?
972 hash_parms->digestsize : 0);
973
974
975 unsigned int cipher_offset = aead_parms->assoc_size +
976 aead_parms->aad_pad_len + aead_parms->iv_len;
977
978#ifdef DEBUG
979
980 unsigned int real_db_size = spu_real_db_size(aead_parms->assoc_size,
981 aead_parms->iv_len,
982 hash_parms->prebuf_len,
983 data_size,
984 aead_parms->aad_pad_len,
985 aead_parms->data_pad_len,
986 hash_parms->pad_len);
987#endif
988 unsigned int assoc_size = aead_parms->assoc_size;
989
990 if (req_opts->is_aead &&
991 (cipher_parms->alg == CIPHER_ALG_AES) &&
992 (cipher_parms->mode == CIPHER_MODE_GCM))
993
994
995
996
997 req_opts->auth_first = req_opts->is_inbound;
998
999
1000 if (req_opts->is_aead &&
1001 (cipher_parms->alg == CIPHER_ALG_AES) &&
1002 (cipher_parms->mode == CIPHER_MODE_CCM))
1003 req_opts->auth_first = !req_opts->is_inbound;
1004
1005 flow_log("%s()\n", __func__);
1006 flow_log(" in:%u authFirst:%u\n",
1007 req_opts->is_inbound, req_opts->auth_first);
1008 flow_log(" cipher alg:%u mode:%u type %u\n", cipher_parms->alg,
1009 cipher_parms->mode, cipher_parms->type);
1010 flow_log(" is_esp: %s\n", req_opts->is_esp ? "yes" : "no");
1011 flow_log(" key: %d\n", cipher_parms->key_len);
1012 flow_dump(" key: ", cipher_parms->key_buf, cipher_parms->key_len);
1013 flow_log(" iv: %d\n", cipher_parms->iv_len);
1014 flow_dump(" iv: ", cipher_parms->iv_buf, cipher_parms->iv_len);
1015 flow_log(" auth alg:%u mode:%u type %u\n",
1016 hash_parms->alg, hash_parms->mode, hash_parms->type);
1017 flow_log(" digestsize: %u\n", hash_parms->digestsize);
1018 flow_log(" authkey: %d\n", hash_parms->key_len);
1019 flow_dump(" authkey: ", hash_parms->key_buf, hash_parms->key_len);
1020 flow_log(" assoc_size:%u\n", assoc_size);
1021 flow_log(" prebuf_len:%u\n", hash_parms->prebuf_len);
1022 flow_log(" data_size:%u\n", data_size);
1023 flow_log(" hash_pad_len:%u\n", hash_parms->pad_len);
1024 flow_log(" real_db_size:%u\n", real_db_size);
1025 flow_log(" cipher_offset:%u payload_len:%u\n",
1026 cipher_offset, payload_len);
1027 flow_log(" aead_iv: %u\n", aead_parms->iv_len);
1028
1029
1030 err = spu2_cipher_xlate(cipher_parms->alg, cipher_parms->mode,
1031 cipher_parms->type,
1032 &spu2_ciph_type, &spu2_ciph_mode);
1033
1034
1035
1036
1037
1038
1039
1040 if ((req_opts->is_rfc4543) ||
1041 ((spu2_ciph_mode == SPU2_CIPHER_MODE_GCM) &&
1042 (payload_len == 0))) {
1043
1044 spu2_ciph_type = SPU2_CIPHER_TYPE_NONE;
1045 hash_parms->key_len = cipher_parms->key_len;
1046 memcpy(hash_parms->key_buf, cipher_parms->key_buf,
1047 cipher_parms->key_len);
1048 cipher_parms->key_len = 0;
1049
1050 if (req_opts->is_rfc4543)
1051 payload_len += assoc_size;
1052 else
1053 payload_len = assoc_size;
1054 cipher_offset = 0;
1055 assoc_size = 0;
1056 }
1057
1058 if (err)
1059 return 0;
1060
1061 flow_log("spu2 cipher type %s, cipher mode %s\n",
1062 spu2_ciph_type_name(spu2_ciph_type),
1063 spu2_ciph_mode_name(spu2_ciph_mode));
1064
1065 err = spu2_hash_xlate(hash_parms->alg, hash_parms->mode,
1066 hash_parms->type,
1067 cipher_parms->type,
1068 &spu2_auth_type, &spu2_auth_mode);
1069 if (err)
1070 return 0;
1071
1072 flow_log("spu2 hash type %s, hash mode %s\n",
1073 spu2_hash_type_name(spu2_auth_type),
1074 spu2_hash_mode_name(spu2_auth_mode));
1075
1076 fmd = (struct SPU2_FMD *)spu_hdr;
1077
1078 spu2_fmd_ctrl0_write(fmd, req_opts->is_inbound, req_opts->auth_first,
1079 proto, spu2_ciph_type, spu2_ciph_mode,
1080 spu2_auth_type, spu2_auth_mode);
1081
1082 spu2_fmd_ctrl1_write(fmd, req_opts->is_inbound, assoc_size,
1083 hash_parms->key_len, cipher_parms->key_len,
1084 false, false,
1085 aead_parms->return_iv, aead_parms->ret_iv_len,
1086 aead_parms->ret_iv_off,
1087 cipher_parms->iv_len, hash_parms->digestsize,
1088 !req_opts->bd_suppress, return_md);
1089
1090 spu2_fmd_ctrl2_write(fmd, cipher_offset, hash_parms->key_len, 0,
1091 cipher_parms->key_len, cipher_parms->iv_len);
1092
1093 spu2_fmd_ctrl3_write(fmd, payload_len);
1094
1095 ptr = (u8 *)(fmd + 1);
1096 buf_len = sizeof(struct SPU2_FMD);
1097
1098
1099 if (hash_parms->key_len) {
1100 memcpy(ptr, hash_parms->key_buf, hash_parms->key_len);
1101 ptr += hash_parms->key_len;
1102 buf_len += hash_parms->key_len;
1103 }
1104 if (cipher_parms->key_len) {
1105 memcpy(ptr, cipher_parms->key_buf, cipher_parms->key_len);
1106 ptr += cipher_parms->key_len;
1107 buf_len += cipher_parms->key_len;
1108 }
1109 if (cipher_parms->iv_len) {
1110 memcpy(ptr, cipher_parms->iv_buf, cipher_parms->iv_len);
1111 ptr += cipher_parms->iv_len;
1112 buf_len += cipher_parms->iv_len;
1113 }
1114
1115 packet_dump(" SPU request header: ", spu_hdr, buf_len);
1116
1117 return buf_len;
1118}
1119
1120
1121
1122
1123
1124
1125
1126
1127
1128
1129
1130
1131
1132
1133
1134u16 spu2_cipher_req_init(u8 *spu_hdr, struct spu_cipher_parms *cipher_parms)
1135{
1136 struct SPU2_FMD *fmd;
1137 u8 *omd;
1138 enum spu2_cipher_type spu2_type = SPU2_CIPHER_TYPE_NONE;
1139 enum spu2_cipher_mode spu2_mode;
1140 int err;
1141
1142 flow_log("%s()\n", __func__);
1143 flow_log(" cipher alg:%u mode:%u type %u\n", cipher_parms->alg,
1144 cipher_parms->mode, cipher_parms->type);
1145 flow_log(" cipher_iv_len: %u\n", cipher_parms->iv_len);
1146 flow_log(" key: %d\n", cipher_parms->key_len);
1147 flow_dump(" key: ", cipher_parms->key_buf, cipher_parms->key_len);
1148
1149
1150 err = spu2_cipher_xlate(cipher_parms->alg, cipher_parms->mode,
1151 cipher_parms->type, &spu2_type, &spu2_mode);
1152 if (err)
1153 return 0;
1154
1155 flow_log("spu2 cipher type %s, cipher mode %s\n",
1156 spu2_ciph_type_name(spu2_type),
1157 spu2_ciph_mode_name(spu2_mode));
1158
1159
1160 fmd = (struct SPU2_FMD *)spu_hdr;
1161 err = spu2_fmd_init(fmd, spu2_type, spu2_mode, cipher_parms->key_len,
1162 cipher_parms->iv_len);
1163 if (err)
1164 return 0;
1165
1166
1167 omd = (u8 *)(fmd + 1);
1168 if (cipher_parms->key_buf && cipher_parms->key_len)
1169 memcpy(omd, cipher_parms->key_buf, cipher_parms->key_len);
1170
1171 packet_dump(" SPU request header: ", spu_hdr,
1172 FMD_SIZE + cipher_parms->key_len + cipher_parms->iv_len);
1173
1174 return FMD_SIZE + cipher_parms->key_len + cipher_parms->iv_len;
1175}
1176
1177
1178
1179
1180
1181
1182
1183
1184
1185
1186
1187
1188
1189
1190
1191
1192
1193
1194void spu2_cipher_req_finish(u8 *spu_hdr,
1195 u16 spu_req_hdr_len,
1196 unsigned int is_inbound,
1197 struct spu_cipher_parms *cipher_parms,
1198 bool update_key,
1199 unsigned int data_size)
1200{
1201 struct SPU2_FMD *fmd;
1202 u8 *omd;
1203 u64 ctrl0;
1204 u64 ctrl3;
1205
1206 flow_log("%s()\n", __func__);
1207 flow_log(" in: %u\n", is_inbound);
1208 flow_log(" cipher alg: %u, cipher_type: %u\n", cipher_parms->alg,
1209 cipher_parms->type);
1210 if (update_key) {
1211 flow_log(" cipher key len: %u\n", cipher_parms->key_len);
1212 flow_dump(" key: ", cipher_parms->key_buf,
1213 cipher_parms->key_len);
1214 }
1215 flow_log(" iv len: %d\n", cipher_parms->iv_len);
1216 flow_dump(" iv: ", cipher_parms->iv_buf, cipher_parms->iv_len);
1217 flow_log(" data_size: %u\n", data_size);
1218
1219 fmd = (struct SPU2_FMD *)spu_hdr;
1220 omd = (u8 *)(fmd + 1);
1221
1222
1223
1224
1225
1226 ctrl0 = le64_to_cpu(fmd->ctrl0);
1227 if (is_inbound)
1228 ctrl0 &= ~SPU2_CIPH_ENCRYPT_EN;
1229 else
1230 ctrl0 |= SPU2_CIPH_ENCRYPT_EN;
1231 fmd->ctrl0 = cpu_to_le64(ctrl0);
1232
1233 if (cipher_parms->alg && cipher_parms->iv_buf && cipher_parms->iv_len) {
1234
1235 memcpy(omd + cipher_parms->key_len, cipher_parms->iv_buf,
1236 cipher_parms->iv_len);
1237 }
1238
1239 ctrl3 = le64_to_cpu(fmd->ctrl3);
1240 data_size &= SPU2_PL_LEN;
1241 ctrl3 |= data_size;
1242 fmd->ctrl3 = cpu_to_le64(ctrl3);
1243
1244 packet_dump(" SPU request header: ", spu_hdr, spu_req_hdr_len);
1245}
1246
1247
1248
1249
1250
1251
1252
1253
1254
1255
1256
1257
1258
1259
1260
1261
1262
1263void spu2_request_pad(u8 *pad_start, u32 gcm_padding, u32 hash_pad_len,
1264 enum hash_alg auth_alg, enum hash_mode auth_mode,
1265 unsigned int total_sent, u32 status_padding)
1266{
1267 u8 *ptr = pad_start;
1268
1269
1270 if (gcm_padding > 0) {
1271 flow_log(" GCM: padding to 16 byte alignment: %u bytes\n",
1272 gcm_padding);
1273 memset(ptr, 0, gcm_padding);
1274 ptr += gcm_padding;
1275 }
1276
1277 if (hash_pad_len > 0) {
1278
1279 memset(ptr, 0, hash_pad_len);
1280
1281
1282 *ptr = 0x80;
1283 ptr += (hash_pad_len - sizeof(u64));
1284
1285
1286 if (auth_alg == HASH_ALG_MD5)
1287 *(u64 *)ptr = cpu_to_le64((u64)total_sent * 8);
1288 else
1289 *(u64 *)ptr = cpu_to_be64((u64)total_sent * 8);
1290 ptr += sizeof(u64);
1291 }
1292
1293
1294 if (status_padding > 0) {
1295 flow_log(" STAT: padding to 4 byte alignment: %u bytes\n",
1296 status_padding);
1297
1298 memset(ptr, 0, status_padding);
1299 ptr += status_padding;
1300 }
1301}
1302
1303
1304
1305
1306
1307
1308
1309u8 spu2_xts_tweak_in_payload(void)
1310{
1311 return 0;
1312}
1313
1314
1315
1316
1317
1318
1319
1320u8 spu2_tx_status_len(void)
1321{
1322 return SPU2_TX_STATUS_LEN;
1323}
1324
1325
1326
1327
1328
1329
1330
1331u8 spu2_rx_status_len(void)
1332{
1333 return SPU2_RX_STATUS_LEN;
1334}
1335
1336
1337
1338
1339
1340
1341
1342
1343int spu2_status_process(u8 *statp)
1344{
1345
1346 u16 status = le16_to_cpu(*(__le16 *)statp);
1347
1348 if (status == 0)
1349 return 0;
1350
1351 flow_log("rx status is %#x\n", status);
1352 if (status == SPU2_INVALID_ICV)
1353 return SPU_INVALID_ICV;
1354
1355 return -EBADMSG;
1356}
1357
1358
1359
1360
1361
1362
1363
1364
1365
1366
1367
1368
1369void spu2_ccm_update_iv(unsigned int digestsize,
1370 struct spu_cipher_parms *cipher_parms,
1371 unsigned int assoclen, unsigned int chunksize,
1372 bool is_encrypt, bool is_esp)
1373{
1374 int L;
1375
1376
1377
1378
1379
1380
1381 if (is_esp)
1382 L = CCM_ESP_L_VALUE;
1383 else
1384 L = ((cipher_parms->iv_buf[0] & CCM_B0_L_PRIME) >>
1385 CCM_B0_L_PRIME_SHIFT) + 1;
1386
1387
1388 cipher_parms->iv_len -= (1 + L);
1389 memmove(cipher_parms->iv_buf, &cipher_parms->iv_buf[1],
1390 cipher_parms->iv_len);
1391}
1392
1393
1394
1395
1396
1397
1398
1399u32 spu2_wordalign_padlen(u32 data_size)
1400{
1401 return 0;
1402}
1403