1
2
3
4
5
6
7
8
9
10
11
12
13#include <linux/module.h>
14#include <linux/types.h>
15#include <linux/crypto.h>
16#include <linux/err.h>
17#include <crypto/ablk_helper.h>
18#include <crypto/algapi.h>
19#include <crypto/ctr.h>
20#include <crypto/lrw.h>
21#include <crypto/xts.h>
22#include <asm/fpu/api.h>
23#include <asm/crypto/camellia.h>
24#include <asm/crypto/glue_helper.h>
25
26#define CAMELLIA_AESNI_PARALLEL_BLOCKS 16
27
28
29asmlinkage void camellia_ecb_enc_16way(struct camellia_ctx *ctx, u8 *dst,
30 const u8 *src);
31EXPORT_SYMBOL_GPL(camellia_ecb_enc_16way);
32
33asmlinkage void camellia_ecb_dec_16way(struct camellia_ctx *ctx, u8 *dst,
34 const u8 *src);
35EXPORT_SYMBOL_GPL(camellia_ecb_dec_16way);
36
37asmlinkage void camellia_cbc_dec_16way(struct camellia_ctx *ctx, u8 *dst,
38 const u8 *src);
39EXPORT_SYMBOL_GPL(camellia_cbc_dec_16way);
40
41asmlinkage void camellia_ctr_16way(struct camellia_ctx *ctx, u8 *dst,
42 const u8 *src, le128 *iv);
43EXPORT_SYMBOL_GPL(camellia_ctr_16way);
44
45asmlinkage void camellia_xts_enc_16way(struct camellia_ctx *ctx, u8 *dst,
46 const u8 *src, le128 *iv);
47EXPORT_SYMBOL_GPL(camellia_xts_enc_16way);
48
49asmlinkage void camellia_xts_dec_16way(struct camellia_ctx *ctx, u8 *dst,
50 const u8 *src, le128 *iv);
51EXPORT_SYMBOL_GPL(camellia_xts_dec_16way);
52
53void camellia_xts_enc(void *ctx, u128 *dst, const u128 *src, le128 *iv)
54{
55 glue_xts_crypt_128bit_one(ctx, dst, src, iv,
56 GLUE_FUNC_CAST(camellia_enc_blk));
57}
58EXPORT_SYMBOL_GPL(camellia_xts_enc);
59
60void camellia_xts_dec(void *ctx, u128 *dst, const u128 *src, le128 *iv)
61{
62 glue_xts_crypt_128bit_one(ctx, dst, src, iv,
63 GLUE_FUNC_CAST(camellia_dec_blk));
64}
65EXPORT_SYMBOL_GPL(camellia_xts_dec);
66
67static const struct common_glue_ctx camellia_enc = {
68 .num_funcs = 3,
69 .fpu_blocks_limit = CAMELLIA_AESNI_PARALLEL_BLOCKS,
70
71 .funcs = { {
72 .num_blocks = CAMELLIA_AESNI_PARALLEL_BLOCKS,
73 .fn_u = { .ecb = GLUE_FUNC_CAST(camellia_ecb_enc_16way) }
74 }, {
75 .num_blocks = 2,
76 .fn_u = { .ecb = GLUE_FUNC_CAST(camellia_enc_blk_2way) }
77 }, {
78 .num_blocks = 1,
79 .fn_u = { .ecb = GLUE_FUNC_CAST(camellia_enc_blk) }
80 } }
81};
82
83static const struct common_glue_ctx camellia_ctr = {
84 .num_funcs = 3,
85 .fpu_blocks_limit = CAMELLIA_AESNI_PARALLEL_BLOCKS,
86
87 .funcs = { {
88 .num_blocks = CAMELLIA_AESNI_PARALLEL_BLOCKS,
89 .fn_u = { .ctr = GLUE_CTR_FUNC_CAST(camellia_ctr_16way) }
90 }, {
91 .num_blocks = 2,
92 .fn_u = { .ctr = GLUE_CTR_FUNC_CAST(camellia_crypt_ctr_2way) }
93 }, {
94 .num_blocks = 1,
95 .fn_u = { .ctr = GLUE_CTR_FUNC_CAST(camellia_crypt_ctr) }
96 } }
97};
98
99static const struct common_glue_ctx camellia_enc_xts = {
100 .num_funcs = 2,
101 .fpu_blocks_limit = CAMELLIA_AESNI_PARALLEL_BLOCKS,
102
103 .funcs = { {
104 .num_blocks = CAMELLIA_AESNI_PARALLEL_BLOCKS,
105 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(camellia_xts_enc_16way) }
106 }, {
107 .num_blocks = 1,
108 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(camellia_xts_enc) }
109 } }
110};
111
112static const struct common_glue_ctx camellia_dec = {
113 .num_funcs = 3,
114 .fpu_blocks_limit = CAMELLIA_AESNI_PARALLEL_BLOCKS,
115
116 .funcs = { {
117 .num_blocks = CAMELLIA_AESNI_PARALLEL_BLOCKS,
118 .fn_u = { .ecb = GLUE_FUNC_CAST(camellia_ecb_dec_16way) }
119 }, {
120 .num_blocks = 2,
121 .fn_u = { .ecb = GLUE_FUNC_CAST(camellia_dec_blk_2way) }
122 }, {
123 .num_blocks = 1,
124 .fn_u = { .ecb = GLUE_FUNC_CAST(camellia_dec_blk) }
125 } }
126};
127
128static const struct common_glue_ctx camellia_dec_cbc = {
129 .num_funcs = 3,
130 .fpu_blocks_limit = CAMELLIA_AESNI_PARALLEL_BLOCKS,
131
132 .funcs = { {
133 .num_blocks = CAMELLIA_AESNI_PARALLEL_BLOCKS,
134 .fn_u = { .cbc = GLUE_CBC_FUNC_CAST(camellia_cbc_dec_16way) }
135 }, {
136 .num_blocks = 2,
137 .fn_u = { .cbc = GLUE_CBC_FUNC_CAST(camellia_decrypt_cbc_2way) }
138 }, {
139 .num_blocks = 1,
140 .fn_u = { .cbc = GLUE_CBC_FUNC_CAST(camellia_dec_blk) }
141 } }
142};
143
144static const struct common_glue_ctx camellia_dec_xts = {
145 .num_funcs = 2,
146 .fpu_blocks_limit = CAMELLIA_AESNI_PARALLEL_BLOCKS,
147
148 .funcs = { {
149 .num_blocks = CAMELLIA_AESNI_PARALLEL_BLOCKS,
150 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(camellia_xts_dec_16way) }
151 }, {
152 .num_blocks = 1,
153 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(camellia_xts_dec) }
154 } }
155};
156
157static int ecb_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
158 struct scatterlist *src, unsigned int nbytes)
159{
160 return glue_ecb_crypt_128bit(&camellia_enc, desc, dst, src, nbytes);
161}
162
163static int ecb_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
164 struct scatterlist *src, unsigned int nbytes)
165{
166 return glue_ecb_crypt_128bit(&camellia_dec, desc, dst, src, nbytes);
167}
168
169static int cbc_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
170 struct scatterlist *src, unsigned int nbytes)
171{
172 return glue_cbc_encrypt_128bit(GLUE_FUNC_CAST(camellia_enc_blk), desc,
173 dst, src, nbytes);
174}
175
176static int cbc_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
177 struct scatterlist *src, unsigned int nbytes)
178{
179 return glue_cbc_decrypt_128bit(&camellia_dec_cbc, desc, dst, src,
180 nbytes);
181}
182
183static int ctr_crypt(struct blkcipher_desc *desc, struct scatterlist *dst,
184 struct scatterlist *src, unsigned int nbytes)
185{
186 return glue_ctr_crypt_128bit(&camellia_ctr, desc, dst, src, nbytes);
187}
188
189static inline bool camellia_fpu_begin(bool fpu_enabled, unsigned int nbytes)
190{
191 return glue_fpu_begin(CAMELLIA_BLOCK_SIZE,
192 CAMELLIA_AESNI_PARALLEL_BLOCKS, NULL, fpu_enabled,
193 nbytes);
194}
195
196static inline void camellia_fpu_end(bool fpu_enabled)
197{
198 glue_fpu_end(fpu_enabled);
199}
200
201static int camellia_setkey(struct crypto_tfm *tfm, const u8 *in_key,
202 unsigned int key_len)
203{
204 return __camellia_setkey(crypto_tfm_ctx(tfm), in_key, key_len,
205 &tfm->crt_flags);
206}
207
208struct crypt_priv {
209 struct camellia_ctx *ctx;
210 bool fpu_enabled;
211};
212
213static void encrypt_callback(void *priv, u8 *srcdst, unsigned int nbytes)
214{
215 const unsigned int bsize = CAMELLIA_BLOCK_SIZE;
216 struct crypt_priv *ctx = priv;
217 int i;
218
219 ctx->fpu_enabled = camellia_fpu_begin(ctx->fpu_enabled, nbytes);
220
221 if (nbytes >= CAMELLIA_AESNI_PARALLEL_BLOCKS * bsize) {
222 camellia_ecb_enc_16way(ctx->ctx, srcdst, srcdst);
223 srcdst += bsize * CAMELLIA_AESNI_PARALLEL_BLOCKS;
224 nbytes -= bsize * CAMELLIA_AESNI_PARALLEL_BLOCKS;
225 }
226
227 while (nbytes >= CAMELLIA_PARALLEL_BLOCKS * bsize) {
228 camellia_enc_blk_2way(ctx->ctx, srcdst, srcdst);
229 srcdst += bsize * CAMELLIA_PARALLEL_BLOCKS;
230 nbytes -= bsize * CAMELLIA_PARALLEL_BLOCKS;
231 }
232
233 for (i = 0; i < nbytes / bsize; i++, srcdst += bsize)
234 camellia_enc_blk(ctx->ctx, srcdst, srcdst);
235}
236
237static void decrypt_callback(void *priv, u8 *srcdst, unsigned int nbytes)
238{
239 const unsigned int bsize = CAMELLIA_BLOCK_SIZE;
240 struct crypt_priv *ctx = priv;
241 int i;
242
243 ctx->fpu_enabled = camellia_fpu_begin(ctx->fpu_enabled, nbytes);
244
245 if (nbytes >= CAMELLIA_AESNI_PARALLEL_BLOCKS * bsize) {
246 camellia_ecb_dec_16way(ctx->ctx, srcdst, srcdst);
247 srcdst += bsize * CAMELLIA_AESNI_PARALLEL_BLOCKS;
248 nbytes -= bsize * CAMELLIA_AESNI_PARALLEL_BLOCKS;
249 }
250
251 while (nbytes >= CAMELLIA_PARALLEL_BLOCKS * bsize) {
252 camellia_dec_blk_2way(ctx->ctx, srcdst, srcdst);
253 srcdst += bsize * CAMELLIA_PARALLEL_BLOCKS;
254 nbytes -= bsize * CAMELLIA_PARALLEL_BLOCKS;
255 }
256
257 for (i = 0; i < nbytes / bsize; i++, srcdst += bsize)
258 camellia_dec_blk(ctx->ctx, srcdst, srcdst);
259}
260
261static int lrw_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
262 struct scatterlist *src, unsigned int nbytes)
263{
264 struct camellia_lrw_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
265 be128 buf[CAMELLIA_AESNI_PARALLEL_BLOCKS];
266 struct crypt_priv crypt_ctx = {
267 .ctx = &ctx->camellia_ctx,
268 .fpu_enabled = false,
269 };
270 struct lrw_crypt_req req = {
271 .tbuf = buf,
272 .tbuflen = sizeof(buf),
273
274 .table_ctx = &ctx->lrw_table,
275 .crypt_ctx = &crypt_ctx,
276 .crypt_fn = encrypt_callback,
277 };
278 int ret;
279
280 desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
281 ret = lrw_crypt(desc, dst, src, nbytes, &req);
282 camellia_fpu_end(crypt_ctx.fpu_enabled);
283
284 return ret;
285}
286
287static int lrw_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
288 struct scatterlist *src, unsigned int nbytes)
289{
290 struct camellia_lrw_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
291 be128 buf[CAMELLIA_AESNI_PARALLEL_BLOCKS];
292 struct crypt_priv crypt_ctx = {
293 .ctx = &ctx->camellia_ctx,
294 .fpu_enabled = false,
295 };
296 struct lrw_crypt_req req = {
297 .tbuf = buf,
298 .tbuflen = sizeof(buf),
299
300 .table_ctx = &ctx->lrw_table,
301 .crypt_ctx = &crypt_ctx,
302 .crypt_fn = decrypt_callback,
303 };
304 int ret;
305
306 desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
307 ret = lrw_crypt(desc, dst, src, nbytes, &req);
308 camellia_fpu_end(crypt_ctx.fpu_enabled);
309
310 return ret;
311}
312
313static int xts_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
314 struct scatterlist *src, unsigned int nbytes)
315{
316 struct camellia_xts_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
317
318 return glue_xts_crypt_128bit(&camellia_enc_xts, desc, dst, src, nbytes,
319 XTS_TWEAK_CAST(camellia_enc_blk),
320 &ctx->tweak_ctx, &ctx->crypt_ctx);
321}
322
323static int xts_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
324 struct scatterlist *src, unsigned int nbytes)
325{
326 struct camellia_xts_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
327
328 return glue_xts_crypt_128bit(&camellia_dec_xts, desc, dst, src, nbytes,
329 XTS_TWEAK_CAST(camellia_enc_blk),
330 &ctx->tweak_ctx, &ctx->crypt_ctx);
331}
332
333static struct crypto_alg cmll_algs[10] = { {
334 .cra_name = "__ecb-camellia-aesni",
335 .cra_driver_name = "__driver-ecb-camellia-aesni",
336 .cra_priority = 0,
337 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
338 CRYPTO_ALG_INTERNAL,
339 .cra_blocksize = CAMELLIA_BLOCK_SIZE,
340 .cra_ctxsize = sizeof(struct camellia_ctx),
341 .cra_alignmask = 0,
342 .cra_type = &crypto_blkcipher_type,
343 .cra_module = THIS_MODULE,
344 .cra_u = {
345 .blkcipher = {
346 .min_keysize = CAMELLIA_MIN_KEY_SIZE,
347 .max_keysize = CAMELLIA_MAX_KEY_SIZE,
348 .setkey = camellia_setkey,
349 .encrypt = ecb_encrypt,
350 .decrypt = ecb_decrypt,
351 },
352 },
353}, {
354 .cra_name = "__cbc-camellia-aesni",
355 .cra_driver_name = "__driver-cbc-camellia-aesni",
356 .cra_priority = 0,
357 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
358 CRYPTO_ALG_INTERNAL,
359 .cra_blocksize = CAMELLIA_BLOCK_SIZE,
360 .cra_ctxsize = sizeof(struct camellia_ctx),
361 .cra_alignmask = 0,
362 .cra_type = &crypto_blkcipher_type,
363 .cra_module = THIS_MODULE,
364 .cra_u = {
365 .blkcipher = {
366 .min_keysize = CAMELLIA_MIN_KEY_SIZE,
367 .max_keysize = CAMELLIA_MAX_KEY_SIZE,
368 .setkey = camellia_setkey,
369 .encrypt = cbc_encrypt,
370 .decrypt = cbc_decrypt,
371 },
372 },
373}, {
374 .cra_name = "__ctr-camellia-aesni",
375 .cra_driver_name = "__driver-ctr-camellia-aesni",
376 .cra_priority = 0,
377 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
378 CRYPTO_ALG_INTERNAL,
379 .cra_blocksize = 1,
380 .cra_ctxsize = sizeof(struct camellia_ctx),
381 .cra_alignmask = 0,
382 .cra_type = &crypto_blkcipher_type,
383 .cra_module = THIS_MODULE,
384 .cra_u = {
385 .blkcipher = {
386 .min_keysize = CAMELLIA_MIN_KEY_SIZE,
387 .max_keysize = CAMELLIA_MAX_KEY_SIZE,
388 .ivsize = CAMELLIA_BLOCK_SIZE,
389 .setkey = camellia_setkey,
390 .encrypt = ctr_crypt,
391 .decrypt = ctr_crypt,
392 },
393 },
394}, {
395 .cra_name = "__lrw-camellia-aesni",
396 .cra_driver_name = "__driver-lrw-camellia-aesni",
397 .cra_priority = 0,
398 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
399 CRYPTO_ALG_INTERNAL,
400 .cra_blocksize = CAMELLIA_BLOCK_SIZE,
401 .cra_ctxsize = sizeof(struct camellia_lrw_ctx),
402 .cra_alignmask = 0,
403 .cra_type = &crypto_blkcipher_type,
404 .cra_module = THIS_MODULE,
405 .cra_exit = lrw_camellia_exit_tfm,
406 .cra_u = {
407 .blkcipher = {
408 .min_keysize = CAMELLIA_MIN_KEY_SIZE +
409 CAMELLIA_BLOCK_SIZE,
410 .max_keysize = CAMELLIA_MAX_KEY_SIZE +
411 CAMELLIA_BLOCK_SIZE,
412 .ivsize = CAMELLIA_BLOCK_SIZE,
413 .setkey = lrw_camellia_setkey,
414 .encrypt = lrw_encrypt,
415 .decrypt = lrw_decrypt,
416 },
417 },
418}, {
419 .cra_name = "__xts-camellia-aesni",
420 .cra_driver_name = "__driver-xts-camellia-aesni",
421 .cra_priority = 0,
422 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
423 CRYPTO_ALG_INTERNAL,
424 .cra_blocksize = CAMELLIA_BLOCK_SIZE,
425 .cra_ctxsize = sizeof(struct camellia_xts_ctx),
426 .cra_alignmask = 0,
427 .cra_type = &crypto_blkcipher_type,
428 .cra_module = THIS_MODULE,
429 .cra_u = {
430 .blkcipher = {
431 .min_keysize = CAMELLIA_MIN_KEY_SIZE * 2,
432 .max_keysize = CAMELLIA_MAX_KEY_SIZE * 2,
433 .ivsize = CAMELLIA_BLOCK_SIZE,
434 .setkey = xts_camellia_setkey,
435 .encrypt = xts_encrypt,
436 .decrypt = xts_decrypt,
437 },
438 },
439}, {
440 .cra_name = "ecb(camellia)",
441 .cra_driver_name = "ecb-camellia-aesni",
442 .cra_priority = 400,
443 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
444 .cra_blocksize = CAMELLIA_BLOCK_SIZE,
445 .cra_ctxsize = sizeof(struct async_helper_ctx),
446 .cra_alignmask = 0,
447 .cra_type = &crypto_ablkcipher_type,
448 .cra_module = THIS_MODULE,
449 .cra_init = ablk_init,
450 .cra_exit = ablk_exit,
451 .cra_u = {
452 .ablkcipher = {
453 .min_keysize = CAMELLIA_MIN_KEY_SIZE,
454 .max_keysize = CAMELLIA_MAX_KEY_SIZE,
455 .setkey = ablk_set_key,
456 .encrypt = ablk_encrypt,
457 .decrypt = ablk_decrypt,
458 },
459 },
460}, {
461 .cra_name = "cbc(camellia)",
462 .cra_driver_name = "cbc-camellia-aesni",
463 .cra_priority = 400,
464 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
465 .cra_blocksize = CAMELLIA_BLOCK_SIZE,
466 .cra_ctxsize = sizeof(struct async_helper_ctx),
467 .cra_alignmask = 0,
468 .cra_type = &crypto_ablkcipher_type,
469 .cra_module = THIS_MODULE,
470 .cra_init = ablk_init,
471 .cra_exit = ablk_exit,
472 .cra_u = {
473 .ablkcipher = {
474 .min_keysize = CAMELLIA_MIN_KEY_SIZE,
475 .max_keysize = CAMELLIA_MAX_KEY_SIZE,
476 .ivsize = CAMELLIA_BLOCK_SIZE,
477 .setkey = ablk_set_key,
478 .encrypt = __ablk_encrypt,
479 .decrypt = ablk_decrypt,
480 },
481 },
482}, {
483 .cra_name = "ctr(camellia)",
484 .cra_driver_name = "ctr-camellia-aesni",
485 .cra_priority = 400,
486 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
487 .cra_blocksize = 1,
488 .cra_ctxsize = sizeof(struct async_helper_ctx),
489 .cra_alignmask = 0,
490 .cra_type = &crypto_ablkcipher_type,
491 .cra_module = THIS_MODULE,
492 .cra_init = ablk_init,
493 .cra_exit = ablk_exit,
494 .cra_u = {
495 .ablkcipher = {
496 .min_keysize = CAMELLIA_MIN_KEY_SIZE,
497 .max_keysize = CAMELLIA_MAX_KEY_SIZE,
498 .ivsize = CAMELLIA_BLOCK_SIZE,
499 .setkey = ablk_set_key,
500 .encrypt = ablk_encrypt,
501 .decrypt = ablk_encrypt,
502 .geniv = "chainiv",
503 },
504 },
505}, {
506 .cra_name = "lrw(camellia)",
507 .cra_driver_name = "lrw-camellia-aesni",
508 .cra_priority = 400,
509 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
510 .cra_blocksize = CAMELLIA_BLOCK_SIZE,
511 .cra_ctxsize = sizeof(struct async_helper_ctx),
512 .cra_alignmask = 0,
513 .cra_type = &crypto_ablkcipher_type,
514 .cra_module = THIS_MODULE,
515 .cra_init = ablk_init,
516 .cra_exit = ablk_exit,
517 .cra_u = {
518 .ablkcipher = {
519 .min_keysize = CAMELLIA_MIN_KEY_SIZE +
520 CAMELLIA_BLOCK_SIZE,
521 .max_keysize = CAMELLIA_MAX_KEY_SIZE +
522 CAMELLIA_BLOCK_SIZE,
523 .ivsize = CAMELLIA_BLOCK_SIZE,
524 .setkey = ablk_set_key,
525 .encrypt = ablk_encrypt,
526 .decrypt = ablk_decrypt,
527 },
528 },
529}, {
530 .cra_name = "xts(camellia)",
531 .cra_driver_name = "xts-camellia-aesni",
532 .cra_priority = 400,
533 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
534 .cra_blocksize = CAMELLIA_BLOCK_SIZE,
535 .cra_ctxsize = sizeof(struct async_helper_ctx),
536 .cra_alignmask = 0,
537 .cra_type = &crypto_ablkcipher_type,
538 .cra_module = THIS_MODULE,
539 .cra_init = ablk_init,
540 .cra_exit = ablk_exit,
541 .cra_u = {
542 .ablkcipher = {
543 .min_keysize = CAMELLIA_MIN_KEY_SIZE * 2,
544 .max_keysize = CAMELLIA_MAX_KEY_SIZE * 2,
545 .ivsize = CAMELLIA_BLOCK_SIZE,
546 .setkey = ablk_set_key,
547 .encrypt = ablk_encrypt,
548 .decrypt = ablk_decrypt,
549 },
550 },
551} };
552
553static int __init camellia_aesni_init(void)
554{
555 const char *feature_name;
556
557 if (!boot_cpu_has(X86_FEATURE_AVX) ||
558 !boot_cpu_has(X86_FEATURE_AES) ||
559 !boot_cpu_has(X86_FEATURE_OSXSAVE)) {
560 pr_info("AVX or AES-NI instructions are not detected.\n");
561 return -ENODEV;
562 }
563
564 if (!cpu_has_xfeatures(XFEATURE_MASK_SSE | XFEATURE_MASK_YMM,
565 &feature_name)) {
566 pr_info("CPU feature '%s' is not supported.\n", feature_name);
567 return -ENODEV;
568 }
569
570 return crypto_register_algs(cmll_algs, ARRAY_SIZE(cmll_algs));
571}
572
573static void __exit camellia_aesni_fini(void)
574{
575 crypto_unregister_algs(cmll_algs, ARRAY_SIZE(cmll_algs));
576}
577
578module_init(camellia_aesni_init);
579module_exit(camellia_aesni_fini);
580
581MODULE_LICENSE("GPL");
582MODULE_DESCRIPTION("Camellia Cipher Algorithm, AES-NI/AVX optimized");
583MODULE_ALIAS_CRYPTO("camellia");
584MODULE_ALIAS_CRYPTO("camellia-asm");
585