linux/arch/arm/crypto/aes-ce-glue.c
<<
>>
Prefs
   1/*
   2 * aes-ce-glue.c - wrapper code for ARMv8 AES
   3 *
   4 * Copyright (C) 2015 Linaro Ltd <ard.biesheuvel@linaro.org>
   5 *
   6 * This program is free software; you can redistribute it and/or modify
   7 * it under the terms of the GNU General Public License version 2 as
   8 * published by the Free Software Foundation.
   9 */
  10
  11#include <asm/hwcap.h>
  12#include <asm/neon.h>
  13#include <asm/hwcap.h>
  14#include <crypto/aes.h>
  15#include <crypto/ablk_helper.h>
  16#include <crypto/algapi.h>
  17#include <linux/module.h>
  18
  19MODULE_DESCRIPTION("AES-ECB/CBC/CTR/XTS using ARMv8 Crypto Extensions");
  20MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>");
  21MODULE_LICENSE("GPL v2");
  22
  23/* defined in aes-ce-core.S */
  24asmlinkage u32 ce_aes_sub(u32 input);
  25asmlinkage void ce_aes_invert(void *dst, void *src);
  26
  27asmlinkage void ce_aes_ecb_encrypt(u8 out[], u8 const in[], u8 const rk[],
  28                                   int rounds, int blocks);
  29asmlinkage void ce_aes_ecb_decrypt(u8 out[], u8 const in[], u8 const rk[],
  30                                   int rounds, int blocks);
  31
  32asmlinkage void ce_aes_cbc_encrypt(u8 out[], u8 const in[], u8 const rk[],
  33                                   int rounds, int blocks, u8 iv[]);
  34asmlinkage void ce_aes_cbc_decrypt(u8 out[], u8 const in[], u8 const rk[],
  35                                   int rounds, int blocks, u8 iv[]);
  36
  37asmlinkage void ce_aes_ctr_encrypt(u8 out[], u8 const in[], u8 const rk[],
  38                                   int rounds, int blocks, u8 ctr[]);
  39
  40asmlinkage void ce_aes_xts_encrypt(u8 out[], u8 const in[], u8 const rk1[],
  41                                   int rounds, int blocks, u8 iv[],
  42                                   u8 const rk2[], int first);
  43asmlinkage void ce_aes_xts_decrypt(u8 out[], u8 const in[], u8 const rk1[],
  44                                   int rounds, int blocks, u8 iv[],
  45                                   u8 const rk2[], int first);
  46
  47struct aes_block {
  48        u8 b[AES_BLOCK_SIZE];
  49};
  50
  51static int num_rounds(struct crypto_aes_ctx *ctx)
  52{
  53        /*
  54         * # of rounds specified by AES:
  55         * 128 bit key          10 rounds
  56         * 192 bit key          12 rounds
  57         * 256 bit key          14 rounds
  58         * => n byte key        => 6 + (n/4) rounds
  59         */
  60        return 6 + ctx->key_length / 4;
  61}
  62
  63static int ce_aes_expandkey(struct crypto_aes_ctx *ctx, const u8 *in_key,
  64                            unsigned int key_len)
  65{
  66        /*
  67         * The AES key schedule round constants
  68         */
  69        static u8 const rcon[] = {
  70                0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, 0x1b, 0x36,
  71        };
  72
  73        u32 kwords = key_len / sizeof(u32);
  74        struct aes_block *key_enc, *key_dec;
  75        int i, j;
  76
  77        if (key_len != AES_KEYSIZE_128 &&
  78            key_len != AES_KEYSIZE_192 &&
  79            key_len != AES_KEYSIZE_256)
  80                return -EINVAL;
  81
  82        memcpy(ctx->key_enc, in_key, key_len);
  83        ctx->key_length = key_len;
  84
  85        kernel_neon_begin();
  86        for (i = 0; i < sizeof(rcon); i++) {
  87                u32 *rki = ctx->key_enc + (i * kwords);
  88                u32 *rko = rki + kwords;
  89
  90                rko[0] = ror32(ce_aes_sub(rki[kwords - 1]), 8);
  91                rko[0] = rko[0] ^ rki[0] ^ rcon[i];
  92                rko[1] = rko[0] ^ rki[1];
  93                rko[2] = rko[1] ^ rki[2];
  94                rko[3] = rko[2] ^ rki[3];
  95
  96                if (key_len == AES_KEYSIZE_192) {
  97                        if (i >= 7)
  98                                break;
  99                        rko[4] = rko[3] ^ rki[4];
 100                        rko[5] = rko[4] ^ rki[5];
 101                } else if (key_len == AES_KEYSIZE_256) {
 102                        if (i >= 6)
 103                                break;
 104                        rko[4] = ce_aes_sub(rko[3]) ^ rki[4];
 105                        rko[5] = rko[4] ^ rki[5];
 106                        rko[6] = rko[5] ^ rki[6];
 107                        rko[7] = rko[6] ^ rki[7];
 108                }
 109        }
 110
 111        /*
 112         * Generate the decryption keys for the Equivalent Inverse Cipher.
 113         * This involves reversing the order of the round keys, and applying
 114         * the Inverse Mix Columns transformation on all but the first and
 115         * the last one.
 116         */
 117        key_enc = (struct aes_block *)ctx->key_enc;
 118        key_dec = (struct aes_block *)ctx->key_dec;
 119        j = num_rounds(ctx);
 120
 121        key_dec[0] = key_enc[j];
 122        for (i = 1, j--; j > 0; i++, j--)
 123                ce_aes_invert(key_dec + i, key_enc + j);
 124        key_dec[i] = key_enc[0];
 125
 126        kernel_neon_end();
 127        return 0;
 128}
 129
 130static int ce_aes_setkey(struct crypto_tfm *tfm, const u8 *in_key,
 131                         unsigned int key_len)
 132{
 133        struct crypto_aes_ctx *ctx = crypto_tfm_ctx(tfm);
 134        int ret;
 135
 136        ret = ce_aes_expandkey(ctx, in_key, key_len);
 137        if (!ret)
 138                return 0;
 139
 140        tfm->crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
 141        return -EINVAL;
 142}
 143
 144struct crypto_aes_xts_ctx {
 145        struct crypto_aes_ctx key1;
 146        struct crypto_aes_ctx __aligned(8) key2;
 147};
 148
 149static int xts_set_key(struct crypto_tfm *tfm, const u8 *in_key,
 150                       unsigned int key_len)
 151{
 152        struct crypto_aes_xts_ctx *ctx = crypto_tfm_ctx(tfm);
 153        int ret;
 154
 155        ret = ce_aes_expandkey(&ctx->key1, in_key, key_len / 2);
 156        if (!ret)
 157                ret = ce_aes_expandkey(&ctx->key2, &in_key[key_len / 2],
 158                                       key_len / 2);
 159        if (!ret)
 160                return 0;
 161
 162        tfm->crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
 163        return -EINVAL;
 164}
 165
 166static int ecb_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
 167                       struct scatterlist *src, unsigned int nbytes)
 168{
 169        struct crypto_aes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
 170        struct blkcipher_walk walk;
 171        unsigned int blocks;
 172        int err;
 173
 174        desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
 175        blkcipher_walk_init(&walk, dst, src, nbytes);
 176        err = blkcipher_walk_virt(desc, &walk);
 177
 178        kernel_neon_begin();
 179        while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) {
 180                ce_aes_ecb_encrypt(walk.dst.virt.addr, walk.src.virt.addr,
 181                                   (u8 *)ctx->key_enc, num_rounds(ctx), blocks);
 182                err = blkcipher_walk_done(desc, &walk,
 183                                          walk.nbytes % AES_BLOCK_SIZE);
 184        }
 185        kernel_neon_end();
 186        return err;
 187}
 188
 189static int ecb_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
 190                       struct scatterlist *src, unsigned int nbytes)
 191{
 192        struct crypto_aes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
 193        struct blkcipher_walk walk;
 194        unsigned int blocks;
 195        int err;
 196
 197        desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
 198        blkcipher_walk_init(&walk, dst, src, nbytes);
 199        err = blkcipher_walk_virt(desc, &walk);
 200
 201        kernel_neon_begin();
 202        while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) {
 203                ce_aes_ecb_decrypt(walk.dst.virt.addr, walk.src.virt.addr,
 204                                   (u8 *)ctx->key_dec, num_rounds(ctx), blocks);
 205                err = blkcipher_walk_done(desc, &walk,
 206                                          walk.nbytes % AES_BLOCK_SIZE);
 207        }
 208        kernel_neon_end();
 209        return err;
 210}
 211
 212static int cbc_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
 213                       struct scatterlist *src, unsigned int nbytes)
 214{
 215        struct crypto_aes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
 216        struct blkcipher_walk walk;
 217        unsigned int blocks;
 218        int err;
 219
 220        desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
 221        blkcipher_walk_init(&walk, dst, src, nbytes);
 222        err = blkcipher_walk_virt(desc, &walk);
 223
 224        kernel_neon_begin();
 225        while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) {
 226                ce_aes_cbc_encrypt(walk.dst.virt.addr, walk.src.virt.addr,
 227                                   (u8 *)ctx->key_enc, num_rounds(ctx), blocks,
 228                                   walk.iv);
 229                err = blkcipher_walk_done(desc, &walk,
 230                                          walk.nbytes % AES_BLOCK_SIZE);
 231        }
 232        kernel_neon_end();
 233        return err;
 234}
 235
 236static int cbc_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
 237                       struct scatterlist *src, unsigned int nbytes)
 238{
 239        struct crypto_aes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
 240        struct blkcipher_walk walk;
 241        unsigned int blocks;
 242        int err;
 243
 244        desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
 245        blkcipher_walk_init(&walk, dst, src, nbytes);
 246        err = blkcipher_walk_virt(desc, &walk);
 247
 248        kernel_neon_begin();
 249        while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) {
 250                ce_aes_cbc_decrypt(walk.dst.virt.addr, walk.src.virt.addr,
 251                                   (u8 *)ctx->key_dec, num_rounds(ctx), blocks,
 252                                   walk.iv);
 253                err = blkcipher_walk_done(desc, &walk,
 254                                          walk.nbytes % AES_BLOCK_SIZE);
 255        }
 256        kernel_neon_end();
 257        return err;
 258}
 259
 260static int ctr_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
 261                       struct scatterlist *src, unsigned int nbytes)
 262{
 263        struct crypto_aes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
 264        struct blkcipher_walk walk;
 265        int err, blocks;
 266
 267        desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
 268        blkcipher_walk_init(&walk, dst, src, nbytes);
 269        err = blkcipher_walk_virt_block(desc, &walk, AES_BLOCK_SIZE);
 270
 271        kernel_neon_begin();
 272        while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) {
 273                ce_aes_ctr_encrypt(walk.dst.virt.addr, walk.src.virt.addr,
 274                                   (u8 *)ctx->key_enc, num_rounds(ctx), blocks,
 275                                   walk.iv);
 276                nbytes -= blocks * AES_BLOCK_SIZE;
 277                if (nbytes && nbytes == walk.nbytes % AES_BLOCK_SIZE)
 278                        break;
 279                err = blkcipher_walk_done(desc, &walk,
 280                                          walk.nbytes % AES_BLOCK_SIZE);
 281        }
 282        if (nbytes) {
 283                u8 *tdst = walk.dst.virt.addr + blocks * AES_BLOCK_SIZE;
 284                u8 *tsrc = walk.src.virt.addr + blocks * AES_BLOCK_SIZE;
 285                u8 __aligned(8) tail[AES_BLOCK_SIZE];
 286
 287                /*
 288                 * Minimum alignment is 8 bytes, so if nbytes is <= 8, we need
 289                 * to tell aes_ctr_encrypt() to only read half a block.
 290                 */
 291                blocks = (nbytes <= 8) ? -1 : 1;
 292
 293                ce_aes_ctr_encrypt(tail, tsrc, (u8 *)ctx->key_enc,
 294                                   num_rounds(ctx), blocks, walk.iv);
 295                memcpy(tdst, tail, nbytes);
 296                err = blkcipher_walk_done(desc, &walk, 0);
 297        }
 298        kernel_neon_end();
 299
 300        return err;
 301}
 302
 303static int xts_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
 304                       struct scatterlist *src, unsigned int nbytes)
 305{
 306        struct crypto_aes_xts_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
 307        int err, first, rounds = num_rounds(&ctx->key1);
 308        struct blkcipher_walk walk;
 309        unsigned int blocks;
 310
 311        desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
 312        blkcipher_walk_init(&walk, dst, src, nbytes);
 313        err = blkcipher_walk_virt(desc, &walk);
 314
 315        kernel_neon_begin();
 316        for (first = 1; (blocks = (walk.nbytes / AES_BLOCK_SIZE)); first = 0) {
 317                ce_aes_xts_encrypt(walk.dst.virt.addr, walk.src.virt.addr,
 318                                   (u8 *)ctx->key1.key_enc, rounds, blocks,
 319                                   walk.iv, (u8 *)ctx->key2.key_enc, first);
 320                err = blkcipher_walk_done(desc, &walk,
 321                                          walk.nbytes % AES_BLOCK_SIZE);
 322        }
 323        kernel_neon_end();
 324
 325        return err;
 326}
 327
 328static int xts_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
 329                       struct scatterlist *src, unsigned int nbytes)
 330{
 331        struct crypto_aes_xts_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
 332        int err, first, rounds = num_rounds(&ctx->key1);
 333        struct blkcipher_walk walk;
 334        unsigned int blocks;
 335
 336        desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
 337        blkcipher_walk_init(&walk, dst, src, nbytes);
 338        err = blkcipher_walk_virt(desc, &walk);
 339
 340        kernel_neon_begin();
 341        for (first = 1; (blocks = (walk.nbytes / AES_BLOCK_SIZE)); first = 0) {
 342                ce_aes_xts_decrypt(walk.dst.virt.addr, walk.src.virt.addr,
 343                                   (u8 *)ctx->key1.key_dec, rounds, blocks,
 344                                   walk.iv, (u8 *)ctx->key2.key_enc, first);
 345                err = blkcipher_walk_done(desc, &walk,
 346                                          walk.nbytes % AES_BLOCK_SIZE);
 347        }
 348        kernel_neon_end();
 349
 350        return err;
 351}
 352
 353static struct crypto_alg aes_algs[] = { {
 354        .cra_name               = "__ecb-aes-ce",
 355        .cra_driver_name        = "__driver-ecb-aes-ce",
 356        .cra_priority           = 0,
 357        .cra_flags              = CRYPTO_ALG_TYPE_BLKCIPHER |
 358                                  CRYPTO_ALG_INTERNAL,
 359        .cra_blocksize          = AES_BLOCK_SIZE,
 360        .cra_ctxsize            = sizeof(struct crypto_aes_ctx),
 361        .cra_alignmask          = 7,
 362        .cra_type               = &crypto_blkcipher_type,
 363        .cra_module             = THIS_MODULE,
 364        .cra_blkcipher = {
 365                .min_keysize    = AES_MIN_KEY_SIZE,
 366                .max_keysize    = AES_MAX_KEY_SIZE,
 367                .ivsize         = AES_BLOCK_SIZE,
 368                .setkey         = ce_aes_setkey,
 369                .encrypt        = ecb_encrypt,
 370                .decrypt        = ecb_decrypt,
 371        },
 372}, {
 373        .cra_name               = "__cbc-aes-ce",
 374        .cra_driver_name        = "__driver-cbc-aes-ce",
 375        .cra_priority           = 0,
 376        .cra_flags              = CRYPTO_ALG_TYPE_BLKCIPHER |
 377                                  CRYPTO_ALG_INTERNAL,
 378        .cra_blocksize          = AES_BLOCK_SIZE,
 379        .cra_ctxsize            = sizeof(struct crypto_aes_ctx),
 380        .cra_alignmask          = 7,
 381        .cra_type               = &crypto_blkcipher_type,
 382        .cra_module             = THIS_MODULE,
 383        .cra_blkcipher = {
 384                .min_keysize    = AES_MIN_KEY_SIZE,
 385                .max_keysize    = AES_MAX_KEY_SIZE,
 386                .ivsize         = AES_BLOCK_SIZE,
 387                .setkey         = ce_aes_setkey,
 388                .encrypt        = cbc_encrypt,
 389                .decrypt        = cbc_decrypt,
 390        },
 391}, {
 392        .cra_name               = "__ctr-aes-ce",
 393        .cra_driver_name        = "__driver-ctr-aes-ce",
 394        .cra_priority           = 0,
 395        .cra_flags              = CRYPTO_ALG_TYPE_BLKCIPHER |
 396                                  CRYPTO_ALG_INTERNAL,
 397        .cra_blocksize          = 1,
 398        .cra_ctxsize            = sizeof(struct crypto_aes_ctx),
 399        .cra_alignmask          = 7,
 400        .cra_type               = &crypto_blkcipher_type,
 401        .cra_module             = THIS_MODULE,
 402        .cra_blkcipher = {
 403                .min_keysize    = AES_MIN_KEY_SIZE,
 404                .max_keysize    = AES_MAX_KEY_SIZE,
 405                .ivsize         = AES_BLOCK_SIZE,
 406                .setkey         = ce_aes_setkey,
 407                .encrypt        = ctr_encrypt,
 408                .decrypt        = ctr_encrypt,
 409        },
 410}, {
 411        .cra_name               = "__xts-aes-ce",
 412        .cra_driver_name        = "__driver-xts-aes-ce",
 413        .cra_priority           = 0,
 414        .cra_flags              = CRYPTO_ALG_TYPE_BLKCIPHER |
 415                                  CRYPTO_ALG_INTERNAL,
 416        .cra_blocksize          = AES_BLOCK_SIZE,
 417        .cra_ctxsize            = sizeof(struct crypto_aes_xts_ctx),
 418        .cra_alignmask          = 7,
 419        .cra_type               = &crypto_blkcipher_type,
 420        .cra_module             = THIS_MODULE,
 421        .cra_blkcipher = {
 422                .min_keysize    = 2 * AES_MIN_KEY_SIZE,
 423                .max_keysize    = 2 * AES_MAX_KEY_SIZE,
 424                .ivsize         = AES_BLOCK_SIZE,
 425                .setkey         = xts_set_key,
 426                .encrypt        = xts_encrypt,
 427                .decrypt        = xts_decrypt,
 428        },
 429}, {
 430        .cra_name               = "ecb(aes)",
 431        .cra_driver_name        = "ecb-aes-ce",
 432        .cra_priority           = 300,
 433        .cra_flags              = CRYPTO_ALG_TYPE_ABLKCIPHER|CRYPTO_ALG_ASYNC,
 434        .cra_blocksize          = AES_BLOCK_SIZE,
 435        .cra_ctxsize            = sizeof(struct async_helper_ctx),
 436        .cra_alignmask          = 7,
 437        .cra_type               = &crypto_ablkcipher_type,
 438        .cra_module             = THIS_MODULE,
 439        .cra_init               = ablk_init,
 440        .cra_exit               = ablk_exit,
 441        .cra_ablkcipher = {
 442                .min_keysize    = AES_MIN_KEY_SIZE,
 443                .max_keysize    = AES_MAX_KEY_SIZE,
 444                .ivsize         = AES_BLOCK_SIZE,
 445                .setkey         = ablk_set_key,
 446                .encrypt        = ablk_encrypt,
 447                .decrypt        = ablk_decrypt,
 448        }
 449}, {
 450        .cra_name               = "cbc(aes)",
 451        .cra_driver_name        = "cbc-aes-ce",
 452        .cra_priority           = 300,
 453        .cra_flags              = CRYPTO_ALG_TYPE_ABLKCIPHER|CRYPTO_ALG_ASYNC,
 454        .cra_blocksize          = AES_BLOCK_SIZE,
 455        .cra_ctxsize            = sizeof(struct async_helper_ctx),
 456        .cra_alignmask          = 7,
 457        .cra_type               = &crypto_ablkcipher_type,
 458        .cra_module             = THIS_MODULE,
 459        .cra_init               = ablk_init,
 460        .cra_exit               = ablk_exit,
 461        .cra_ablkcipher = {
 462                .min_keysize    = AES_MIN_KEY_SIZE,
 463                .max_keysize    = AES_MAX_KEY_SIZE,
 464                .ivsize         = AES_BLOCK_SIZE,
 465                .setkey         = ablk_set_key,
 466                .encrypt        = ablk_encrypt,
 467                .decrypt        = ablk_decrypt,
 468        }
 469}, {
 470        .cra_name               = "ctr(aes)",
 471        .cra_driver_name        = "ctr-aes-ce",
 472        .cra_priority           = 300,
 473        .cra_flags              = CRYPTO_ALG_TYPE_ABLKCIPHER|CRYPTO_ALG_ASYNC,
 474        .cra_blocksize          = 1,
 475        .cra_ctxsize            = sizeof(struct async_helper_ctx),
 476        .cra_alignmask          = 7,
 477        .cra_type               = &crypto_ablkcipher_type,
 478        .cra_module             = THIS_MODULE,
 479        .cra_init               = ablk_init,
 480        .cra_exit               = ablk_exit,
 481        .cra_ablkcipher = {
 482                .min_keysize    = AES_MIN_KEY_SIZE,
 483                .max_keysize    = AES_MAX_KEY_SIZE,
 484                .ivsize         = AES_BLOCK_SIZE,
 485                .setkey         = ablk_set_key,
 486                .encrypt        = ablk_encrypt,
 487                .decrypt        = ablk_decrypt,
 488        }
 489}, {
 490        .cra_name               = "xts(aes)",
 491        .cra_driver_name        = "xts-aes-ce",
 492        .cra_priority           = 300,
 493        .cra_flags              = CRYPTO_ALG_TYPE_ABLKCIPHER|CRYPTO_ALG_ASYNC,
 494        .cra_blocksize          = AES_BLOCK_SIZE,
 495        .cra_ctxsize            = sizeof(struct async_helper_ctx),
 496        .cra_alignmask          = 7,
 497        .cra_type               = &crypto_ablkcipher_type,
 498        .cra_module             = THIS_MODULE,
 499        .cra_init               = ablk_init,
 500        .cra_exit               = ablk_exit,
 501        .cra_ablkcipher = {
 502                .min_keysize    = 2 * AES_MIN_KEY_SIZE,
 503                .max_keysize    = 2 * AES_MAX_KEY_SIZE,
 504                .ivsize         = AES_BLOCK_SIZE,
 505                .setkey         = ablk_set_key,
 506                .encrypt        = ablk_encrypt,
 507                .decrypt        = ablk_decrypt,
 508        }
 509} };
 510
 511static int __init aes_init(void)
 512{
 513        if (!(elf_hwcap2 & HWCAP2_AES))
 514                return -ENODEV;
 515        return crypto_register_algs(aes_algs, ARRAY_SIZE(aes_algs));
 516}
 517
 518static void __exit aes_exit(void)
 519{
 520        crypto_unregister_algs(aes_algs, ARRAY_SIZE(aes_algs));
 521}
 522
 523module_init(aes_init);
 524module_exit(aes_exit);
 525