1
2
3
4
5
6
7
8
9
10
11#include <linux/device.h>
12#include "rk3288_crypto.h"
13
14#define RK_CRYPTO_DEC BIT(0)
15
16static void rk_crypto_complete(struct crypto_async_request *base, int err)
17{
18 if (base->complete)
19 base->complete(base, err);
20}
21
22static int rk_handle_req(struct rk_crypto_info *dev,
23 struct skcipher_request *req)
24{
25 if (!IS_ALIGNED(req->cryptlen, dev->align_size))
26 return -EINVAL;
27 else
28 return dev->enqueue(dev, &req->base);
29}
30
31static int rk_aes_setkey(struct crypto_skcipher *cipher,
32 const u8 *key, unsigned int keylen)
33{
34 struct crypto_tfm *tfm = crypto_skcipher_tfm(cipher);
35 struct rk_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
36
37 if (keylen != AES_KEYSIZE_128 && keylen != AES_KEYSIZE_192 &&
38 keylen != AES_KEYSIZE_256)
39 return -EINVAL;
40 ctx->keylen = keylen;
41 memcpy_toio(ctx->dev->reg + RK_CRYPTO_AES_KEY_0, key, keylen);
42 return 0;
43}
44
45static int rk_des_setkey(struct crypto_skcipher *cipher,
46 const u8 *key, unsigned int keylen)
47{
48 struct rk_cipher_ctx *ctx = crypto_skcipher_ctx(cipher);
49 int err;
50
51 err = verify_skcipher_des_key(cipher, key);
52 if (err)
53 return err;
54
55 ctx->keylen = keylen;
56 memcpy_toio(ctx->dev->reg + RK_CRYPTO_TDES_KEY1_0, key, keylen);
57 return 0;
58}
59
60static int rk_tdes_setkey(struct crypto_skcipher *cipher,
61 const u8 *key, unsigned int keylen)
62{
63 struct rk_cipher_ctx *ctx = crypto_skcipher_ctx(cipher);
64 int err;
65
66 err = verify_skcipher_des3_key(cipher, key);
67 if (err)
68 return err;
69
70 ctx->keylen = keylen;
71 memcpy_toio(ctx->dev->reg + RK_CRYPTO_TDES_KEY1_0, key, keylen);
72 return 0;
73}
74
75static int rk_aes_ecb_encrypt(struct skcipher_request *req)
76{
77 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
78 struct rk_cipher_ctx *ctx = crypto_skcipher_ctx(tfm);
79 struct rk_crypto_info *dev = ctx->dev;
80
81 ctx->mode = RK_CRYPTO_AES_ECB_MODE;
82 return rk_handle_req(dev, req);
83}
84
85static int rk_aes_ecb_decrypt(struct skcipher_request *req)
86{
87 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
88 struct rk_cipher_ctx *ctx = crypto_skcipher_ctx(tfm);
89 struct rk_crypto_info *dev = ctx->dev;
90
91 ctx->mode = RK_CRYPTO_AES_ECB_MODE | RK_CRYPTO_DEC;
92 return rk_handle_req(dev, req);
93}
94
95static int rk_aes_cbc_encrypt(struct skcipher_request *req)
96{
97 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
98 struct rk_cipher_ctx *ctx = crypto_skcipher_ctx(tfm);
99 struct rk_crypto_info *dev = ctx->dev;
100
101 ctx->mode = RK_CRYPTO_AES_CBC_MODE;
102 return rk_handle_req(dev, req);
103}
104
105static int rk_aes_cbc_decrypt(struct skcipher_request *req)
106{
107 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
108 struct rk_cipher_ctx *ctx = crypto_skcipher_ctx(tfm);
109 struct rk_crypto_info *dev = ctx->dev;
110
111 ctx->mode = RK_CRYPTO_AES_CBC_MODE | RK_CRYPTO_DEC;
112 return rk_handle_req(dev, req);
113}
114
115static int rk_des_ecb_encrypt(struct skcipher_request *req)
116{
117 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
118 struct rk_cipher_ctx *ctx = crypto_skcipher_ctx(tfm);
119 struct rk_crypto_info *dev = ctx->dev;
120
121 ctx->mode = 0;
122 return rk_handle_req(dev, req);
123}
124
125static int rk_des_ecb_decrypt(struct skcipher_request *req)
126{
127 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
128 struct rk_cipher_ctx *ctx = crypto_skcipher_ctx(tfm);
129 struct rk_crypto_info *dev = ctx->dev;
130
131 ctx->mode = RK_CRYPTO_DEC;
132 return rk_handle_req(dev, req);
133}
134
135static int rk_des_cbc_encrypt(struct skcipher_request *req)
136{
137 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
138 struct rk_cipher_ctx *ctx = crypto_skcipher_ctx(tfm);
139 struct rk_crypto_info *dev = ctx->dev;
140
141 ctx->mode = RK_CRYPTO_TDES_CHAINMODE_CBC;
142 return rk_handle_req(dev, req);
143}
144
145static int rk_des_cbc_decrypt(struct skcipher_request *req)
146{
147 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
148 struct rk_cipher_ctx *ctx = crypto_skcipher_ctx(tfm);
149 struct rk_crypto_info *dev = ctx->dev;
150
151 ctx->mode = RK_CRYPTO_TDES_CHAINMODE_CBC | RK_CRYPTO_DEC;
152 return rk_handle_req(dev, req);
153}
154
155static int rk_des3_ede_ecb_encrypt(struct skcipher_request *req)
156{
157 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
158 struct rk_cipher_ctx *ctx = crypto_skcipher_ctx(tfm);
159 struct rk_crypto_info *dev = ctx->dev;
160
161 ctx->mode = RK_CRYPTO_TDES_SELECT;
162 return rk_handle_req(dev, req);
163}
164
165static int rk_des3_ede_ecb_decrypt(struct skcipher_request *req)
166{
167 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
168 struct rk_cipher_ctx *ctx = crypto_skcipher_ctx(tfm);
169 struct rk_crypto_info *dev = ctx->dev;
170
171 ctx->mode = RK_CRYPTO_TDES_SELECT | RK_CRYPTO_DEC;
172 return rk_handle_req(dev, req);
173}
174
175static int rk_des3_ede_cbc_encrypt(struct skcipher_request *req)
176{
177 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
178 struct rk_cipher_ctx *ctx = crypto_skcipher_ctx(tfm);
179 struct rk_crypto_info *dev = ctx->dev;
180
181 ctx->mode = RK_CRYPTO_TDES_SELECT | RK_CRYPTO_TDES_CHAINMODE_CBC;
182 return rk_handle_req(dev, req);
183}
184
185static int rk_des3_ede_cbc_decrypt(struct skcipher_request *req)
186{
187 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
188 struct rk_cipher_ctx *ctx = crypto_skcipher_ctx(tfm);
189 struct rk_crypto_info *dev = ctx->dev;
190
191 ctx->mode = RK_CRYPTO_TDES_SELECT | RK_CRYPTO_TDES_CHAINMODE_CBC |
192 RK_CRYPTO_DEC;
193 return rk_handle_req(dev, req);
194}
195
196static void rk_ablk_hw_init(struct rk_crypto_info *dev)
197{
198 struct skcipher_request *req =
199 skcipher_request_cast(dev->async_req);
200 struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(req);
201 struct crypto_tfm *tfm = crypto_skcipher_tfm(cipher);
202 struct rk_cipher_ctx *ctx = crypto_skcipher_ctx(cipher);
203 u32 ivsize, block, conf_reg = 0;
204
205 block = crypto_tfm_alg_blocksize(tfm);
206 ivsize = crypto_skcipher_ivsize(cipher);
207
208 if (block == DES_BLOCK_SIZE) {
209 ctx->mode |= RK_CRYPTO_TDES_FIFO_MODE |
210 RK_CRYPTO_TDES_BYTESWAP_KEY |
211 RK_CRYPTO_TDES_BYTESWAP_IV;
212 CRYPTO_WRITE(dev, RK_CRYPTO_TDES_CTRL, ctx->mode);
213 memcpy_toio(dev->reg + RK_CRYPTO_TDES_IV_0, req->iv, ivsize);
214 conf_reg = RK_CRYPTO_DESSEL;
215 } else {
216 ctx->mode |= RK_CRYPTO_AES_FIFO_MODE |
217 RK_CRYPTO_AES_KEY_CHANGE |
218 RK_CRYPTO_AES_BYTESWAP_KEY |
219 RK_CRYPTO_AES_BYTESWAP_IV;
220 if (ctx->keylen == AES_KEYSIZE_192)
221 ctx->mode |= RK_CRYPTO_AES_192BIT_key;
222 else if (ctx->keylen == AES_KEYSIZE_256)
223 ctx->mode |= RK_CRYPTO_AES_256BIT_key;
224 CRYPTO_WRITE(dev, RK_CRYPTO_AES_CTRL, ctx->mode);
225 memcpy_toio(dev->reg + RK_CRYPTO_AES_IV_0, req->iv, ivsize);
226 }
227 conf_reg |= RK_CRYPTO_BYTESWAP_BTFIFO |
228 RK_CRYPTO_BYTESWAP_BRFIFO;
229 CRYPTO_WRITE(dev, RK_CRYPTO_CONF, conf_reg);
230 CRYPTO_WRITE(dev, RK_CRYPTO_INTENA,
231 RK_CRYPTO_BCDMA_ERR_ENA | RK_CRYPTO_BCDMA_DONE_ENA);
232}
233
234static void crypto_dma_start(struct rk_crypto_info *dev)
235{
236 CRYPTO_WRITE(dev, RK_CRYPTO_BRDMAS, dev->addr_in);
237 CRYPTO_WRITE(dev, RK_CRYPTO_BRDMAL, dev->count / 4);
238 CRYPTO_WRITE(dev, RK_CRYPTO_BTDMAS, dev->addr_out);
239 CRYPTO_WRITE(dev, RK_CRYPTO_CTRL, RK_CRYPTO_BLOCK_START |
240 _SBF(RK_CRYPTO_BLOCK_START, 16));
241}
242
243static int rk_set_data_start(struct rk_crypto_info *dev)
244{
245 int err;
246 struct skcipher_request *req =
247 skcipher_request_cast(dev->async_req);
248 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
249 struct rk_cipher_ctx *ctx = crypto_skcipher_ctx(tfm);
250 u32 ivsize = crypto_skcipher_ivsize(tfm);
251 u8 *src_last_blk = page_address(sg_page(dev->sg_src)) +
252 dev->sg_src->offset + dev->sg_src->length - ivsize;
253
254
255
256
257 if (ctx->mode & RK_CRYPTO_DEC) {
258 memcpy(ctx->iv, src_last_blk, ivsize);
259 sg_pcopy_to_buffer(dev->first, dev->src_nents, req->iv,
260 ivsize, dev->total - ivsize);
261 }
262
263 err = dev->load_data(dev, dev->sg_src, dev->sg_dst);
264 if (!err)
265 crypto_dma_start(dev);
266 return err;
267}
268
269static int rk_ablk_start(struct rk_crypto_info *dev)
270{
271 struct skcipher_request *req =
272 skcipher_request_cast(dev->async_req);
273 unsigned long flags;
274 int err = 0;
275
276 dev->left_bytes = req->cryptlen;
277 dev->total = req->cryptlen;
278 dev->sg_src = req->src;
279 dev->first = req->src;
280 dev->src_nents = sg_nents(req->src);
281 dev->sg_dst = req->dst;
282 dev->dst_nents = sg_nents(req->dst);
283 dev->aligned = 1;
284
285 spin_lock_irqsave(&dev->lock, flags);
286 rk_ablk_hw_init(dev);
287 err = rk_set_data_start(dev);
288 spin_unlock_irqrestore(&dev->lock, flags);
289 return err;
290}
291
292static void rk_iv_copyback(struct rk_crypto_info *dev)
293{
294 struct skcipher_request *req =
295 skcipher_request_cast(dev->async_req);
296 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
297 struct rk_cipher_ctx *ctx = crypto_skcipher_ctx(tfm);
298 u32 ivsize = crypto_skcipher_ivsize(tfm);
299
300
301 if (!(ctx->mode & RK_CRYPTO_DEC)) {
302 if (dev->aligned) {
303 memcpy(req->iv, sg_virt(dev->sg_dst) +
304 dev->sg_dst->length - ivsize, ivsize);
305 } else {
306 memcpy(req->iv, dev->addr_vir +
307 dev->count - ivsize, ivsize);
308 }
309 }
310}
311
312static void rk_update_iv(struct rk_crypto_info *dev)
313{
314 struct skcipher_request *req =
315 skcipher_request_cast(dev->async_req);
316 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
317 struct rk_cipher_ctx *ctx = crypto_skcipher_ctx(tfm);
318 u32 ivsize = crypto_skcipher_ivsize(tfm);
319 u8 *new_iv = NULL;
320
321 if (ctx->mode & RK_CRYPTO_DEC) {
322 new_iv = ctx->iv;
323 } else {
324 new_iv = page_address(sg_page(dev->sg_dst)) +
325 dev->sg_dst->offset + dev->sg_dst->length - ivsize;
326 }
327
328 if (ivsize == DES_BLOCK_SIZE)
329 memcpy_toio(dev->reg + RK_CRYPTO_TDES_IV_0, new_iv, ivsize);
330 else if (ivsize == AES_BLOCK_SIZE)
331 memcpy_toio(dev->reg + RK_CRYPTO_AES_IV_0, new_iv, ivsize);
332}
333
334
335
336
337
338static int rk_ablk_rx(struct rk_crypto_info *dev)
339{
340 int err = 0;
341 struct skcipher_request *req =
342 skcipher_request_cast(dev->async_req);
343
344 dev->unload_data(dev);
345 if (!dev->aligned) {
346 if (!sg_pcopy_from_buffer(req->dst, dev->dst_nents,
347 dev->addr_vir, dev->count,
348 dev->total - dev->left_bytes -
349 dev->count)) {
350 err = -EINVAL;
351 goto out_rx;
352 }
353 }
354 if (dev->left_bytes) {
355 rk_update_iv(dev);
356 if (dev->aligned) {
357 if (sg_is_last(dev->sg_src)) {
358 dev_err(dev->dev, "[%s:%d] Lack of data\n",
359 __func__, __LINE__);
360 err = -ENOMEM;
361 goto out_rx;
362 }
363 dev->sg_src = sg_next(dev->sg_src);
364 dev->sg_dst = sg_next(dev->sg_dst);
365 }
366 err = rk_set_data_start(dev);
367 } else {
368 rk_iv_copyback(dev);
369
370 dev->complete(dev->async_req, 0);
371 tasklet_schedule(&dev->queue_task);
372 }
373out_rx:
374 return err;
375}
376
377static int rk_ablk_init_tfm(struct crypto_skcipher *tfm)
378{
379 struct rk_cipher_ctx *ctx = crypto_skcipher_ctx(tfm);
380 struct skcipher_alg *alg = crypto_skcipher_alg(tfm);
381 struct rk_crypto_tmp *algt;
382
383 algt = container_of(alg, struct rk_crypto_tmp, alg.skcipher);
384
385 ctx->dev = algt->dev;
386 ctx->dev->align_size = crypto_tfm_alg_alignmask(crypto_skcipher_tfm(tfm)) + 1;
387 ctx->dev->start = rk_ablk_start;
388 ctx->dev->update = rk_ablk_rx;
389 ctx->dev->complete = rk_crypto_complete;
390 ctx->dev->addr_vir = (char *)__get_free_page(GFP_KERNEL);
391
392 return ctx->dev->addr_vir ? ctx->dev->enable_clk(ctx->dev) : -ENOMEM;
393}
394
395static void rk_ablk_exit_tfm(struct crypto_skcipher *tfm)
396{
397 struct rk_cipher_ctx *ctx = crypto_skcipher_ctx(tfm);
398
399 free_page((unsigned long)ctx->dev->addr_vir);
400 ctx->dev->disable_clk(ctx->dev);
401}
402
403struct rk_crypto_tmp rk_ecb_aes_alg = {
404 .type = ALG_TYPE_CIPHER,
405 .alg.skcipher = {
406 .base.cra_name = "ecb(aes)",
407 .base.cra_driver_name = "ecb-aes-rk",
408 .base.cra_priority = 300,
409 .base.cra_flags = CRYPTO_ALG_ASYNC,
410 .base.cra_blocksize = AES_BLOCK_SIZE,
411 .base.cra_ctxsize = sizeof(struct rk_cipher_ctx),
412 .base.cra_alignmask = 0x0f,
413 .base.cra_module = THIS_MODULE,
414
415 .init = rk_ablk_init_tfm,
416 .exit = rk_ablk_exit_tfm,
417 .min_keysize = AES_MIN_KEY_SIZE,
418 .max_keysize = AES_MAX_KEY_SIZE,
419 .setkey = rk_aes_setkey,
420 .encrypt = rk_aes_ecb_encrypt,
421 .decrypt = rk_aes_ecb_decrypt,
422 }
423};
424
425struct rk_crypto_tmp rk_cbc_aes_alg = {
426 .type = ALG_TYPE_CIPHER,
427 .alg.skcipher = {
428 .base.cra_name = "cbc(aes)",
429 .base.cra_driver_name = "cbc-aes-rk",
430 .base.cra_priority = 300,
431 .base.cra_flags = CRYPTO_ALG_ASYNC,
432 .base.cra_blocksize = AES_BLOCK_SIZE,
433 .base.cra_ctxsize = sizeof(struct rk_cipher_ctx),
434 .base.cra_alignmask = 0x0f,
435 .base.cra_module = THIS_MODULE,
436
437 .init = rk_ablk_init_tfm,
438 .exit = rk_ablk_exit_tfm,
439 .min_keysize = AES_MIN_KEY_SIZE,
440 .max_keysize = AES_MAX_KEY_SIZE,
441 .ivsize = AES_BLOCK_SIZE,
442 .setkey = rk_aes_setkey,
443 .encrypt = rk_aes_cbc_encrypt,
444 .decrypt = rk_aes_cbc_decrypt,
445 }
446};
447
448struct rk_crypto_tmp rk_ecb_des_alg = {
449 .type = ALG_TYPE_CIPHER,
450 .alg.skcipher = {
451 .base.cra_name = "ecb(des)",
452 .base.cra_driver_name = "ecb-des-rk",
453 .base.cra_priority = 300,
454 .base.cra_flags = CRYPTO_ALG_ASYNC,
455 .base.cra_blocksize = DES_BLOCK_SIZE,
456 .base.cra_ctxsize = sizeof(struct rk_cipher_ctx),
457 .base.cra_alignmask = 0x07,
458 .base.cra_module = THIS_MODULE,
459
460 .init = rk_ablk_init_tfm,
461 .exit = rk_ablk_exit_tfm,
462 .min_keysize = DES_KEY_SIZE,
463 .max_keysize = DES_KEY_SIZE,
464 .setkey = rk_des_setkey,
465 .encrypt = rk_des_ecb_encrypt,
466 .decrypt = rk_des_ecb_decrypt,
467 }
468};
469
470struct rk_crypto_tmp rk_cbc_des_alg = {
471 .type = ALG_TYPE_CIPHER,
472 .alg.skcipher = {
473 .base.cra_name = "cbc(des)",
474 .base.cra_driver_name = "cbc-des-rk",
475 .base.cra_priority = 300,
476 .base.cra_flags = CRYPTO_ALG_ASYNC,
477 .base.cra_blocksize = DES_BLOCK_SIZE,
478 .base.cra_ctxsize = sizeof(struct rk_cipher_ctx),
479 .base.cra_alignmask = 0x07,
480 .base.cra_module = THIS_MODULE,
481
482 .init = rk_ablk_init_tfm,
483 .exit = rk_ablk_exit_tfm,
484 .min_keysize = DES_KEY_SIZE,
485 .max_keysize = DES_KEY_SIZE,
486 .ivsize = DES_BLOCK_SIZE,
487 .setkey = rk_des_setkey,
488 .encrypt = rk_des_cbc_encrypt,
489 .decrypt = rk_des_cbc_decrypt,
490 }
491};
492
493struct rk_crypto_tmp rk_ecb_des3_ede_alg = {
494 .type = ALG_TYPE_CIPHER,
495 .alg.skcipher = {
496 .base.cra_name = "ecb(des3_ede)",
497 .base.cra_driver_name = "ecb-des3-ede-rk",
498 .base.cra_priority = 300,
499 .base.cra_flags = CRYPTO_ALG_ASYNC,
500 .base.cra_blocksize = DES_BLOCK_SIZE,
501 .base.cra_ctxsize = sizeof(struct rk_cipher_ctx),
502 .base.cra_alignmask = 0x07,
503 .base.cra_module = THIS_MODULE,
504
505 .init = rk_ablk_init_tfm,
506 .exit = rk_ablk_exit_tfm,
507 .min_keysize = DES3_EDE_KEY_SIZE,
508 .max_keysize = DES3_EDE_KEY_SIZE,
509 .ivsize = DES_BLOCK_SIZE,
510 .setkey = rk_tdes_setkey,
511 .encrypt = rk_des3_ede_ecb_encrypt,
512 .decrypt = rk_des3_ede_ecb_decrypt,
513 }
514};
515
516struct rk_crypto_tmp rk_cbc_des3_ede_alg = {
517 .type = ALG_TYPE_CIPHER,
518 .alg.skcipher = {
519 .base.cra_name = "cbc(des3_ede)",
520 .base.cra_driver_name = "cbc-des3-ede-rk",
521 .base.cra_priority = 300,
522 .base.cra_flags = CRYPTO_ALG_ASYNC,
523 .base.cra_blocksize = DES_BLOCK_SIZE,
524 .base.cra_ctxsize = sizeof(struct rk_cipher_ctx),
525 .base.cra_alignmask = 0x07,
526 .base.cra_module = THIS_MODULE,
527
528 .init = rk_ablk_init_tfm,
529 .exit = rk_ablk_exit_tfm,
530 .min_keysize = DES3_EDE_KEY_SIZE,
531 .max_keysize = DES3_EDE_KEY_SIZE,
532 .ivsize = DES_BLOCK_SIZE,
533 .setkey = rk_tdes_setkey,
534 .encrypt = rk_des3_ede_cbc_encrypt,
535 .decrypt = rk_des3_ede_cbc_decrypt,
536 }
537};
538