1
2
3
4
5
6
7
8
9
10
11
12
13
14#include "rk3288_crypto.h"
15
16#define RK_CRYPTO_DEC BIT(0)
17
18static void rk_crypto_complete(struct crypto_async_request *base, int err)
19{
20 if (base->complete)
21 base->complete(base, err);
22}
23
24static int rk_handle_req(struct rk_crypto_info *dev,
25 struct ablkcipher_request *req)
26{
27 if (!IS_ALIGNED(req->nbytes, dev->align_size))
28 return -EINVAL;
29 else
30 return dev->enqueue(dev, &req->base);
31}
32
33static int rk_aes_setkey(struct crypto_ablkcipher *cipher,
34 const u8 *key, unsigned int keylen)
35{
36 struct crypto_tfm *tfm = crypto_ablkcipher_tfm(cipher);
37 struct rk_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
38
39 if (keylen != AES_KEYSIZE_128 && keylen != AES_KEYSIZE_192 &&
40 keylen != AES_KEYSIZE_256) {
41 crypto_ablkcipher_set_flags(cipher, CRYPTO_TFM_RES_BAD_KEY_LEN);
42 return -EINVAL;
43 }
44 ctx->keylen = keylen;
45 memcpy_toio(ctx->dev->reg + RK_CRYPTO_AES_KEY_0, key, keylen);
46 return 0;
47}
48
49static int rk_des_setkey(struct crypto_ablkcipher *cipher,
50 const u8 *key, unsigned int keylen)
51{
52 struct crypto_tfm *tfm = crypto_ablkcipher_tfm(cipher);
53 struct rk_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
54 u32 tmp[DES_EXPKEY_WORDS];
55
56 if (!des_ekey(tmp, key) &&
57 (tfm->crt_flags & CRYPTO_TFM_REQ_WEAK_KEY)) {
58 tfm->crt_flags |= CRYPTO_TFM_RES_WEAK_KEY;
59 return -EINVAL;
60 }
61
62 ctx->keylen = keylen;
63 memcpy_toio(ctx->dev->reg + RK_CRYPTO_TDES_KEY1_0, key, keylen);
64 return 0;
65}
66
67static int rk_tdes_setkey(struct crypto_ablkcipher *cipher,
68 const u8 *key, unsigned int keylen)
69{
70 struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(cipher);
71 u32 flags;
72 int err;
73
74 flags = crypto_ablkcipher_get_flags(cipher);
75 err = __des3_verify_key(&flags, key);
76 if (unlikely(err)) {
77 crypto_ablkcipher_set_flags(cipher, flags);
78 return err;
79 }
80
81 ctx->keylen = keylen;
82 memcpy_toio(ctx->dev->reg + RK_CRYPTO_TDES_KEY1_0, key, keylen);
83 return 0;
84}
85
86static int rk_aes_ecb_encrypt(struct ablkcipher_request *req)
87{
88 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
89 struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
90 struct rk_crypto_info *dev = ctx->dev;
91
92 ctx->mode = RK_CRYPTO_AES_ECB_MODE;
93 return rk_handle_req(dev, req);
94}
95
96static int rk_aes_ecb_decrypt(struct ablkcipher_request *req)
97{
98 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
99 struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
100 struct rk_crypto_info *dev = ctx->dev;
101
102 ctx->mode = RK_CRYPTO_AES_ECB_MODE | RK_CRYPTO_DEC;
103 return rk_handle_req(dev, req);
104}
105
106static int rk_aes_cbc_encrypt(struct ablkcipher_request *req)
107{
108 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
109 struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
110 struct rk_crypto_info *dev = ctx->dev;
111
112 ctx->mode = RK_CRYPTO_AES_CBC_MODE;
113 return rk_handle_req(dev, req);
114}
115
116static int rk_aes_cbc_decrypt(struct ablkcipher_request *req)
117{
118 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
119 struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
120 struct rk_crypto_info *dev = ctx->dev;
121
122 ctx->mode = RK_CRYPTO_AES_CBC_MODE | RK_CRYPTO_DEC;
123 return rk_handle_req(dev, req);
124}
125
126static int rk_des_ecb_encrypt(struct ablkcipher_request *req)
127{
128 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
129 struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
130 struct rk_crypto_info *dev = ctx->dev;
131
132 ctx->mode = 0;
133 return rk_handle_req(dev, req);
134}
135
136static int rk_des_ecb_decrypt(struct ablkcipher_request *req)
137{
138 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
139 struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
140 struct rk_crypto_info *dev = ctx->dev;
141
142 ctx->mode = RK_CRYPTO_DEC;
143 return rk_handle_req(dev, req);
144}
145
146static int rk_des_cbc_encrypt(struct ablkcipher_request *req)
147{
148 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
149 struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
150 struct rk_crypto_info *dev = ctx->dev;
151
152 ctx->mode = RK_CRYPTO_TDES_CHAINMODE_CBC;
153 return rk_handle_req(dev, req);
154}
155
156static int rk_des_cbc_decrypt(struct ablkcipher_request *req)
157{
158 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
159 struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
160 struct rk_crypto_info *dev = ctx->dev;
161
162 ctx->mode = RK_CRYPTO_TDES_CHAINMODE_CBC | RK_CRYPTO_DEC;
163 return rk_handle_req(dev, req);
164}
165
166static int rk_des3_ede_ecb_encrypt(struct ablkcipher_request *req)
167{
168 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
169 struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
170 struct rk_crypto_info *dev = ctx->dev;
171
172 ctx->mode = RK_CRYPTO_TDES_SELECT;
173 return rk_handle_req(dev, req);
174}
175
176static int rk_des3_ede_ecb_decrypt(struct ablkcipher_request *req)
177{
178 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
179 struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
180 struct rk_crypto_info *dev = ctx->dev;
181
182 ctx->mode = RK_CRYPTO_TDES_SELECT | RK_CRYPTO_DEC;
183 return rk_handle_req(dev, req);
184}
185
186static int rk_des3_ede_cbc_encrypt(struct ablkcipher_request *req)
187{
188 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
189 struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
190 struct rk_crypto_info *dev = ctx->dev;
191
192 ctx->mode = RK_CRYPTO_TDES_SELECT | RK_CRYPTO_TDES_CHAINMODE_CBC;
193 return rk_handle_req(dev, req);
194}
195
196static int rk_des3_ede_cbc_decrypt(struct ablkcipher_request *req)
197{
198 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
199 struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
200 struct rk_crypto_info *dev = ctx->dev;
201
202 ctx->mode = RK_CRYPTO_TDES_SELECT | RK_CRYPTO_TDES_CHAINMODE_CBC |
203 RK_CRYPTO_DEC;
204 return rk_handle_req(dev, req);
205}
206
207static void rk_ablk_hw_init(struct rk_crypto_info *dev)
208{
209 struct ablkcipher_request *req =
210 ablkcipher_request_cast(dev->async_req);
211 struct crypto_ablkcipher *cipher = crypto_ablkcipher_reqtfm(req);
212 struct crypto_tfm *tfm = crypto_ablkcipher_tfm(cipher);
213 struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(cipher);
214 u32 ivsize, block, conf_reg = 0;
215
216 block = crypto_tfm_alg_blocksize(tfm);
217 ivsize = crypto_ablkcipher_ivsize(cipher);
218
219 if (block == DES_BLOCK_SIZE) {
220 ctx->mode |= RK_CRYPTO_TDES_FIFO_MODE |
221 RK_CRYPTO_TDES_BYTESWAP_KEY |
222 RK_CRYPTO_TDES_BYTESWAP_IV;
223 CRYPTO_WRITE(dev, RK_CRYPTO_TDES_CTRL, ctx->mode);
224 memcpy_toio(dev->reg + RK_CRYPTO_TDES_IV_0, req->info, ivsize);
225 conf_reg = RK_CRYPTO_DESSEL;
226 } else {
227 ctx->mode |= RK_CRYPTO_AES_FIFO_MODE |
228 RK_CRYPTO_AES_KEY_CHANGE |
229 RK_CRYPTO_AES_BYTESWAP_KEY |
230 RK_CRYPTO_AES_BYTESWAP_IV;
231 if (ctx->keylen == AES_KEYSIZE_192)
232 ctx->mode |= RK_CRYPTO_AES_192BIT_key;
233 else if (ctx->keylen == AES_KEYSIZE_256)
234 ctx->mode |= RK_CRYPTO_AES_256BIT_key;
235 CRYPTO_WRITE(dev, RK_CRYPTO_AES_CTRL, ctx->mode);
236 memcpy_toio(dev->reg + RK_CRYPTO_AES_IV_0, req->info, ivsize);
237 }
238 conf_reg |= RK_CRYPTO_BYTESWAP_BTFIFO |
239 RK_CRYPTO_BYTESWAP_BRFIFO;
240 CRYPTO_WRITE(dev, RK_CRYPTO_CONF, conf_reg);
241 CRYPTO_WRITE(dev, RK_CRYPTO_INTENA,
242 RK_CRYPTO_BCDMA_ERR_ENA | RK_CRYPTO_BCDMA_DONE_ENA);
243}
244
245static void crypto_dma_start(struct rk_crypto_info *dev)
246{
247 CRYPTO_WRITE(dev, RK_CRYPTO_BRDMAS, dev->addr_in);
248 CRYPTO_WRITE(dev, RK_CRYPTO_BRDMAL, dev->count / 4);
249 CRYPTO_WRITE(dev, RK_CRYPTO_BTDMAS, dev->addr_out);
250 CRYPTO_WRITE(dev, RK_CRYPTO_CTRL, RK_CRYPTO_BLOCK_START |
251 _SBF(RK_CRYPTO_BLOCK_START, 16));
252}
253
254static int rk_set_data_start(struct rk_crypto_info *dev)
255{
256 int err;
257
258 err = dev->load_data(dev, dev->sg_src, dev->sg_dst);
259 if (!err)
260 crypto_dma_start(dev);
261 return err;
262}
263
264static int rk_ablk_start(struct rk_crypto_info *dev)
265{
266 struct ablkcipher_request *req =
267 ablkcipher_request_cast(dev->async_req);
268 unsigned long flags;
269 int err = 0;
270
271 dev->left_bytes = req->nbytes;
272 dev->total = req->nbytes;
273 dev->sg_src = req->src;
274 dev->first = req->src;
275 dev->nents = sg_nents(req->src);
276 dev->sg_dst = req->dst;
277 dev->aligned = 1;
278
279 spin_lock_irqsave(&dev->lock, flags);
280 rk_ablk_hw_init(dev);
281 err = rk_set_data_start(dev);
282 spin_unlock_irqrestore(&dev->lock, flags);
283 return err;
284}
285
286static void rk_iv_copyback(struct rk_crypto_info *dev)
287{
288 struct ablkcipher_request *req =
289 ablkcipher_request_cast(dev->async_req);
290 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
291 u32 ivsize = crypto_ablkcipher_ivsize(tfm);
292
293 if (ivsize == DES_BLOCK_SIZE)
294 memcpy_fromio(req->info, dev->reg + RK_CRYPTO_TDES_IV_0,
295 ivsize);
296 else if (ivsize == AES_BLOCK_SIZE)
297 memcpy_fromio(req->info, dev->reg + RK_CRYPTO_AES_IV_0, ivsize);
298}
299
300
301
302
303
304static int rk_ablk_rx(struct rk_crypto_info *dev)
305{
306 int err = 0;
307 struct ablkcipher_request *req =
308 ablkcipher_request_cast(dev->async_req);
309
310 dev->unload_data(dev);
311 if (!dev->aligned) {
312 if (!sg_pcopy_from_buffer(req->dst, dev->nents,
313 dev->addr_vir, dev->count,
314 dev->total - dev->left_bytes -
315 dev->count)) {
316 err = -EINVAL;
317 goto out_rx;
318 }
319 }
320 if (dev->left_bytes) {
321 if (dev->aligned) {
322 if (sg_is_last(dev->sg_src)) {
323 dev_err(dev->dev, "[%s:%d] Lack of data\n",
324 __func__, __LINE__);
325 err = -ENOMEM;
326 goto out_rx;
327 }
328 dev->sg_src = sg_next(dev->sg_src);
329 dev->sg_dst = sg_next(dev->sg_dst);
330 }
331 err = rk_set_data_start(dev);
332 } else {
333 rk_iv_copyback(dev);
334
335 dev->complete(dev->async_req, 0);
336 tasklet_schedule(&dev->queue_task);
337 }
338out_rx:
339 return err;
340}
341
342static int rk_ablk_cra_init(struct crypto_tfm *tfm)
343{
344 struct rk_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
345 struct crypto_alg *alg = tfm->__crt_alg;
346 struct rk_crypto_tmp *algt;
347
348 algt = container_of(alg, struct rk_crypto_tmp, alg.crypto);
349
350 ctx->dev = algt->dev;
351 ctx->dev->align_size = crypto_tfm_alg_alignmask(tfm) + 1;
352 ctx->dev->start = rk_ablk_start;
353 ctx->dev->update = rk_ablk_rx;
354 ctx->dev->complete = rk_crypto_complete;
355 ctx->dev->addr_vir = (char *)__get_free_page(GFP_KERNEL);
356
357 return ctx->dev->addr_vir ? ctx->dev->enable_clk(ctx->dev) : -ENOMEM;
358}
359
360static void rk_ablk_cra_exit(struct crypto_tfm *tfm)
361{
362 struct rk_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
363
364 free_page((unsigned long)ctx->dev->addr_vir);
365 ctx->dev->disable_clk(ctx->dev);
366}
367
368struct rk_crypto_tmp rk_ecb_aes_alg = {
369 .type = ALG_TYPE_CIPHER,
370 .alg.crypto = {
371 .cra_name = "ecb(aes)",
372 .cra_driver_name = "ecb-aes-rk",
373 .cra_priority = 300,
374 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
375 CRYPTO_ALG_ASYNC,
376 .cra_blocksize = AES_BLOCK_SIZE,
377 .cra_ctxsize = sizeof(struct rk_cipher_ctx),
378 .cra_alignmask = 0x0f,
379 .cra_type = &crypto_ablkcipher_type,
380 .cra_module = THIS_MODULE,
381 .cra_init = rk_ablk_cra_init,
382 .cra_exit = rk_ablk_cra_exit,
383 .cra_u.ablkcipher = {
384 .min_keysize = AES_MIN_KEY_SIZE,
385 .max_keysize = AES_MAX_KEY_SIZE,
386 .setkey = rk_aes_setkey,
387 .encrypt = rk_aes_ecb_encrypt,
388 .decrypt = rk_aes_ecb_decrypt,
389 }
390 }
391};
392
393struct rk_crypto_tmp rk_cbc_aes_alg = {
394 .type = ALG_TYPE_CIPHER,
395 .alg.crypto = {
396 .cra_name = "cbc(aes)",
397 .cra_driver_name = "cbc-aes-rk",
398 .cra_priority = 300,
399 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
400 CRYPTO_ALG_ASYNC,
401 .cra_blocksize = AES_BLOCK_SIZE,
402 .cra_ctxsize = sizeof(struct rk_cipher_ctx),
403 .cra_alignmask = 0x0f,
404 .cra_type = &crypto_ablkcipher_type,
405 .cra_module = THIS_MODULE,
406 .cra_init = rk_ablk_cra_init,
407 .cra_exit = rk_ablk_cra_exit,
408 .cra_u.ablkcipher = {
409 .min_keysize = AES_MIN_KEY_SIZE,
410 .max_keysize = AES_MAX_KEY_SIZE,
411 .ivsize = AES_BLOCK_SIZE,
412 .setkey = rk_aes_setkey,
413 .encrypt = rk_aes_cbc_encrypt,
414 .decrypt = rk_aes_cbc_decrypt,
415 }
416 }
417};
418
419struct rk_crypto_tmp rk_ecb_des_alg = {
420 .type = ALG_TYPE_CIPHER,
421 .alg.crypto = {
422 .cra_name = "ecb(des)",
423 .cra_driver_name = "ecb-des-rk",
424 .cra_priority = 300,
425 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
426 CRYPTO_ALG_ASYNC,
427 .cra_blocksize = DES_BLOCK_SIZE,
428 .cra_ctxsize = sizeof(struct rk_cipher_ctx),
429 .cra_alignmask = 0x07,
430 .cra_type = &crypto_ablkcipher_type,
431 .cra_module = THIS_MODULE,
432 .cra_init = rk_ablk_cra_init,
433 .cra_exit = rk_ablk_cra_exit,
434 .cra_u.ablkcipher = {
435 .min_keysize = DES_KEY_SIZE,
436 .max_keysize = DES_KEY_SIZE,
437 .setkey = rk_des_setkey,
438 .encrypt = rk_des_ecb_encrypt,
439 .decrypt = rk_des_ecb_decrypt,
440 }
441 }
442};
443
444struct rk_crypto_tmp rk_cbc_des_alg = {
445 .type = ALG_TYPE_CIPHER,
446 .alg.crypto = {
447 .cra_name = "cbc(des)",
448 .cra_driver_name = "cbc-des-rk",
449 .cra_priority = 300,
450 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
451 CRYPTO_ALG_ASYNC,
452 .cra_blocksize = DES_BLOCK_SIZE,
453 .cra_ctxsize = sizeof(struct rk_cipher_ctx),
454 .cra_alignmask = 0x07,
455 .cra_type = &crypto_ablkcipher_type,
456 .cra_module = THIS_MODULE,
457 .cra_init = rk_ablk_cra_init,
458 .cra_exit = rk_ablk_cra_exit,
459 .cra_u.ablkcipher = {
460 .min_keysize = DES_KEY_SIZE,
461 .max_keysize = DES_KEY_SIZE,
462 .ivsize = DES_BLOCK_SIZE,
463 .setkey = rk_des_setkey,
464 .encrypt = rk_des_cbc_encrypt,
465 .decrypt = rk_des_cbc_decrypt,
466 }
467 }
468};
469
470struct rk_crypto_tmp rk_ecb_des3_ede_alg = {
471 .type = ALG_TYPE_CIPHER,
472 .alg.crypto = {
473 .cra_name = "ecb(des3_ede)",
474 .cra_driver_name = "ecb-des3-ede-rk",
475 .cra_priority = 300,
476 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
477 CRYPTO_ALG_ASYNC,
478 .cra_blocksize = DES_BLOCK_SIZE,
479 .cra_ctxsize = sizeof(struct rk_cipher_ctx),
480 .cra_alignmask = 0x07,
481 .cra_type = &crypto_ablkcipher_type,
482 .cra_module = THIS_MODULE,
483 .cra_init = rk_ablk_cra_init,
484 .cra_exit = rk_ablk_cra_exit,
485 .cra_u.ablkcipher = {
486 .min_keysize = DES3_EDE_KEY_SIZE,
487 .max_keysize = DES3_EDE_KEY_SIZE,
488 .ivsize = DES_BLOCK_SIZE,
489 .setkey = rk_tdes_setkey,
490 .encrypt = rk_des3_ede_ecb_encrypt,
491 .decrypt = rk_des3_ede_ecb_decrypt,
492 }
493 }
494};
495
496struct rk_crypto_tmp rk_cbc_des3_ede_alg = {
497 .type = ALG_TYPE_CIPHER,
498 .alg.crypto = {
499 .cra_name = "cbc(des3_ede)",
500 .cra_driver_name = "cbc-des3-ede-rk",
501 .cra_priority = 300,
502 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
503 CRYPTO_ALG_ASYNC,
504 .cra_blocksize = DES_BLOCK_SIZE,
505 .cra_ctxsize = sizeof(struct rk_cipher_ctx),
506 .cra_alignmask = 0x07,
507 .cra_type = &crypto_ablkcipher_type,
508 .cra_module = THIS_MODULE,
509 .cra_init = rk_ablk_cra_init,
510 .cra_exit = rk_ablk_cra_exit,
511 .cra_u.ablkcipher = {
512 .min_keysize = DES3_EDE_KEY_SIZE,
513 .max_keysize = DES3_EDE_KEY_SIZE,
514 .ivsize = DES_BLOCK_SIZE,
515 .setkey = rk_tdes_setkey,
516 .encrypt = rk_des3_ede_cbc_encrypt,
517 .decrypt = rk_des3_ede_cbc_decrypt,
518 }
519 }
520};
521