linux/arch/arm64/crypto/aes-glue.c
<<
>>
Prefs
   1/*
   2 * linux/arch/arm64/crypto/aes-glue.c - wrapper code for ARMv8 AES
   3 *
   4 * Copyright (C) 2013 - 2017 Linaro Ltd <ard.biesheuvel@linaro.org>
   5 *
   6 * This program is free software; you can redistribute it and/or modify
   7 * it under the terms of the GNU General Public License version 2 as
   8 * published by the Free Software Foundation.
   9 */
  10
  11#include <asm/neon.h>
  12#include <asm/hwcap.h>
  13#include <asm/simd.h>
  14#include <crypto/aes.h>
  15#include <crypto/internal/hash.h>
  16#include <crypto/internal/simd.h>
  17#include <crypto/internal/skcipher.h>
  18#include <linux/module.h>
  19#include <linux/cpufeature.h>
  20#include <crypto/xts.h>
  21
  22#include "aes-ce-setkey.h"
  23#include "aes-ctr-fallback.h"
  24
  25#ifdef USE_V8_CRYPTO_EXTENSIONS
  26#define MODE                    "ce"
  27#define PRIO                    300
  28#define aes_setkey              ce_aes_setkey
  29#define aes_expandkey           ce_aes_expandkey
  30#define aes_ecb_encrypt         ce_aes_ecb_encrypt
  31#define aes_ecb_decrypt         ce_aes_ecb_decrypt
  32#define aes_cbc_encrypt         ce_aes_cbc_encrypt
  33#define aes_cbc_decrypt         ce_aes_cbc_decrypt
  34#define aes_ctr_encrypt         ce_aes_ctr_encrypt
  35#define aes_xts_encrypt         ce_aes_xts_encrypt
  36#define aes_xts_decrypt         ce_aes_xts_decrypt
  37#define aes_mac_update          ce_aes_mac_update
  38MODULE_DESCRIPTION("AES-ECB/CBC/CTR/XTS using ARMv8 Crypto Extensions");
  39#else
  40#define MODE                    "neon"
  41#define PRIO                    200
  42#define aes_setkey              crypto_aes_set_key
  43#define aes_expandkey           crypto_aes_expand_key
  44#define aes_ecb_encrypt         neon_aes_ecb_encrypt
  45#define aes_ecb_decrypt         neon_aes_ecb_decrypt
  46#define aes_cbc_encrypt         neon_aes_cbc_encrypt
  47#define aes_cbc_decrypt         neon_aes_cbc_decrypt
  48#define aes_ctr_encrypt         neon_aes_ctr_encrypt
  49#define aes_xts_encrypt         neon_aes_xts_encrypt
  50#define aes_xts_decrypt         neon_aes_xts_decrypt
  51#define aes_mac_update          neon_aes_mac_update
  52MODULE_DESCRIPTION("AES-ECB/CBC/CTR/XTS using ARMv8 NEON");
  53MODULE_ALIAS_CRYPTO("ecb(aes)");
  54MODULE_ALIAS_CRYPTO("cbc(aes)");
  55MODULE_ALIAS_CRYPTO("ctr(aes)");
  56MODULE_ALIAS_CRYPTO("xts(aes)");
  57MODULE_ALIAS_CRYPTO("cmac(aes)");
  58MODULE_ALIAS_CRYPTO("xcbc(aes)");
  59MODULE_ALIAS_CRYPTO("cbcmac(aes)");
  60#endif
  61
  62MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>");
  63MODULE_LICENSE("GPL v2");
  64
  65/* defined in aes-modes.S */
  66asmlinkage void aes_ecb_encrypt(u8 out[], u8 const in[], u8 const rk[],
  67                                int rounds, int blocks);
  68asmlinkage void aes_ecb_decrypt(u8 out[], u8 const in[], u8 const rk[],
  69                                int rounds, int blocks);
  70
  71asmlinkage void aes_cbc_encrypt(u8 out[], u8 const in[], u8 const rk[],
  72                                int rounds, int blocks, u8 iv[]);
  73asmlinkage void aes_cbc_decrypt(u8 out[], u8 const in[], u8 const rk[],
  74                                int rounds, int blocks, u8 iv[]);
  75
  76asmlinkage void aes_ctr_encrypt(u8 out[], u8 const in[], u8 const rk[],
  77                                int rounds, int blocks, u8 ctr[]);
  78
  79asmlinkage void aes_xts_encrypt(u8 out[], u8 const in[], u8 const rk1[],
  80                                int rounds, int blocks, u8 const rk2[], u8 iv[],
  81                                int first);
  82asmlinkage void aes_xts_decrypt(u8 out[], u8 const in[], u8 const rk1[],
  83                                int rounds, int blocks, u8 const rk2[], u8 iv[],
  84                                int first);
  85
  86asmlinkage void aes_mac_update(u8 const in[], u32 const rk[], int rounds,
  87                               int blocks, u8 dg[], int enc_before,
  88                               int enc_after);
  89
  90struct crypto_aes_xts_ctx {
  91        struct crypto_aes_ctx key1;
  92        struct crypto_aes_ctx __aligned(8) key2;
  93};
  94
  95struct mac_tfm_ctx {
  96        struct crypto_aes_ctx key;
  97        u8 __aligned(8) consts[];
  98};
  99
 100struct mac_desc_ctx {
 101        unsigned int len;
 102        u8 dg[AES_BLOCK_SIZE];
 103};
 104
 105static int skcipher_aes_setkey(struct crypto_skcipher *tfm, const u8 *in_key,
 106                               unsigned int key_len)
 107{
 108        return aes_setkey(crypto_skcipher_tfm(tfm), in_key, key_len);
 109}
 110
 111static int xts_set_key(struct crypto_skcipher *tfm, const u8 *in_key,
 112                       unsigned int key_len)
 113{
 114        struct crypto_aes_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
 115        int ret;
 116
 117        ret = xts_verify_key(tfm, in_key, key_len);
 118        if (ret)
 119                return ret;
 120
 121        ret = aes_expandkey(&ctx->key1, in_key, key_len / 2);
 122        if (!ret)
 123                ret = aes_expandkey(&ctx->key2, &in_key[key_len / 2],
 124                                    key_len / 2);
 125        if (!ret)
 126                return 0;
 127
 128        crypto_skcipher_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
 129        return -EINVAL;
 130}
 131
 132static int ecb_encrypt(struct skcipher_request *req)
 133{
 134        struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
 135        struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
 136        int err, rounds = 6 + ctx->key_length / 4;
 137        struct skcipher_walk walk;
 138        unsigned int blocks;
 139
 140        err = skcipher_walk_virt(&walk, req, false);
 141
 142        while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) {
 143                kernel_neon_begin();
 144                aes_ecb_encrypt(walk.dst.virt.addr, walk.src.virt.addr,
 145                                (u8 *)ctx->key_enc, rounds, blocks);
 146                kernel_neon_end();
 147                err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE);
 148        }
 149        return err;
 150}
 151
 152static int ecb_decrypt(struct skcipher_request *req)
 153{
 154        struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
 155        struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
 156        int err, rounds = 6 + ctx->key_length / 4;
 157        struct skcipher_walk walk;
 158        unsigned int blocks;
 159
 160        err = skcipher_walk_virt(&walk, req, false);
 161
 162        while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) {
 163                kernel_neon_begin();
 164                aes_ecb_decrypt(walk.dst.virt.addr, walk.src.virt.addr,
 165                                (u8 *)ctx->key_dec, rounds, blocks);
 166                kernel_neon_end();
 167                err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE);
 168        }
 169        return err;
 170}
 171
 172static int cbc_encrypt(struct skcipher_request *req)
 173{
 174        struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
 175        struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
 176        int err, rounds = 6 + ctx->key_length / 4;
 177        struct skcipher_walk walk;
 178        unsigned int blocks;
 179
 180        err = skcipher_walk_virt(&walk, req, false);
 181
 182        while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) {
 183                kernel_neon_begin();
 184                aes_cbc_encrypt(walk.dst.virt.addr, walk.src.virt.addr,
 185                                (u8 *)ctx->key_enc, rounds, blocks, walk.iv);
 186                kernel_neon_end();
 187                err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE);
 188        }
 189        return err;
 190}
 191
 192static int cbc_decrypt(struct skcipher_request *req)
 193{
 194        struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
 195        struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
 196        int err, rounds = 6 + ctx->key_length / 4;
 197        struct skcipher_walk walk;
 198        unsigned int blocks;
 199
 200        err = skcipher_walk_virt(&walk, req, false);
 201
 202        while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) {
 203                kernel_neon_begin();
 204                aes_cbc_decrypt(walk.dst.virt.addr, walk.src.virt.addr,
 205                                (u8 *)ctx->key_dec, rounds, blocks, walk.iv);
 206                kernel_neon_end();
 207                err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE);
 208        }
 209        return err;
 210}
 211
 212static int ctr_encrypt(struct skcipher_request *req)
 213{
 214        struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
 215        struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
 216        int err, rounds = 6 + ctx->key_length / 4;
 217        struct skcipher_walk walk;
 218        int blocks;
 219
 220        err = skcipher_walk_virt(&walk, req, false);
 221
 222        while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) {
 223                kernel_neon_begin();
 224                aes_ctr_encrypt(walk.dst.virt.addr, walk.src.virt.addr,
 225                                (u8 *)ctx->key_enc, rounds, blocks, walk.iv);
 226                kernel_neon_end();
 227                err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE);
 228        }
 229        if (walk.nbytes) {
 230                u8 __aligned(8) tail[AES_BLOCK_SIZE];
 231                unsigned int nbytes = walk.nbytes;
 232                u8 *tdst = walk.dst.virt.addr;
 233                u8 *tsrc = walk.src.virt.addr;
 234
 235                /*
 236                 * Tell aes_ctr_encrypt() to process a tail block.
 237                 */
 238                blocks = -1;
 239
 240                kernel_neon_begin();
 241                aes_ctr_encrypt(tail, NULL, (u8 *)ctx->key_enc, rounds,
 242                                blocks, walk.iv);
 243                kernel_neon_end();
 244                crypto_xor_cpy(tdst, tsrc, tail, nbytes);
 245                err = skcipher_walk_done(&walk, 0);
 246        }
 247
 248        return err;
 249}
 250
 251static int ctr_encrypt_sync(struct skcipher_request *req)
 252{
 253        struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
 254        struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
 255
 256        if (!may_use_simd())
 257                return aes_ctr_encrypt_fallback(ctx, req);
 258
 259        return ctr_encrypt(req);
 260}
 261
 262static int xts_encrypt(struct skcipher_request *req)
 263{
 264        struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
 265        struct crypto_aes_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
 266        int err, first, rounds = 6 + ctx->key1.key_length / 4;
 267        struct skcipher_walk walk;
 268        unsigned int blocks;
 269
 270        err = skcipher_walk_virt(&walk, req, false);
 271
 272        for (first = 1; (blocks = (walk.nbytes / AES_BLOCK_SIZE)); first = 0) {
 273                kernel_neon_begin();
 274                aes_xts_encrypt(walk.dst.virt.addr, walk.src.virt.addr,
 275                                (u8 *)ctx->key1.key_enc, rounds, blocks,
 276                                (u8 *)ctx->key2.key_enc, walk.iv, first);
 277                kernel_neon_end();
 278                err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE);
 279        }
 280
 281        return err;
 282}
 283
 284static int xts_decrypt(struct skcipher_request *req)
 285{
 286        struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
 287        struct crypto_aes_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
 288        int err, first, rounds = 6 + ctx->key1.key_length / 4;
 289        struct skcipher_walk walk;
 290        unsigned int blocks;
 291
 292        err = skcipher_walk_virt(&walk, req, false);
 293
 294        for (first = 1; (blocks = (walk.nbytes / AES_BLOCK_SIZE)); first = 0) {
 295                kernel_neon_begin();
 296                aes_xts_decrypt(walk.dst.virt.addr, walk.src.virt.addr,
 297                                (u8 *)ctx->key1.key_dec, rounds, blocks,
 298                                (u8 *)ctx->key2.key_enc, walk.iv, first);
 299                kernel_neon_end();
 300                err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE);
 301        }
 302
 303        return err;
 304}
 305
 306static struct skcipher_alg aes_algs[] = { {
 307        .base = {
 308                .cra_name               = "__ecb(aes)",
 309                .cra_driver_name        = "__ecb-aes-" MODE,
 310                .cra_priority           = PRIO,
 311                .cra_flags              = CRYPTO_ALG_INTERNAL,
 312                .cra_blocksize          = AES_BLOCK_SIZE,
 313                .cra_ctxsize            = sizeof(struct crypto_aes_ctx),
 314                .cra_module             = THIS_MODULE,
 315        },
 316        .min_keysize    = AES_MIN_KEY_SIZE,
 317        .max_keysize    = AES_MAX_KEY_SIZE,
 318        .setkey         = skcipher_aes_setkey,
 319        .encrypt        = ecb_encrypt,
 320        .decrypt        = ecb_decrypt,
 321}, {
 322        .base = {
 323                .cra_name               = "__cbc(aes)",
 324                .cra_driver_name        = "__cbc-aes-" MODE,
 325                .cra_priority           = PRIO,
 326                .cra_flags              = CRYPTO_ALG_INTERNAL,
 327                .cra_blocksize          = AES_BLOCK_SIZE,
 328                .cra_ctxsize            = sizeof(struct crypto_aes_ctx),
 329                .cra_module             = THIS_MODULE,
 330        },
 331        .min_keysize    = AES_MIN_KEY_SIZE,
 332        .max_keysize    = AES_MAX_KEY_SIZE,
 333        .ivsize         = AES_BLOCK_SIZE,
 334        .setkey         = skcipher_aes_setkey,
 335        .encrypt        = cbc_encrypt,
 336        .decrypt        = cbc_decrypt,
 337}, {
 338        .base = {
 339                .cra_name               = "__ctr(aes)",
 340                .cra_driver_name        = "__ctr-aes-" MODE,
 341                .cra_priority           = PRIO,
 342                .cra_flags              = CRYPTO_ALG_INTERNAL,
 343                .cra_blocksize          = 1,
 344                .cra_ctxsize            = sizeof(struct crypto_aes_ctx),
 345                .cra_module             = THIS_MODULE,
 346        },
 347        .min_keysize    = AES_MIN_KEY_SIZE,
 348        .max_keysize    = AES_MAX_KEY_SIZE,
 349        .ivsize         = AES_BLOCK_SIZE,
 350        .chunksize      = AES_BLOCK_SIZE,
 351        .setkey         = skcipher_aes_setkey,
 352        .encrypt        = ctr_encrypt,
 353        .decrypt        = ctr_encrypt,
 354}, {
 355        .base = {
 356                .cra_name               = "ctr(aes)",
 357                .cra_driver_name        = "ctr-aes-" MODE,
 358                .cra_priority           = PRIO - 1,
 359                .cra_blocksize          = 1,
 360                .cra_ctxsize            = sizeof(struct crypto_aes_ctx),
 361                .cra_module             = THIS_MODULE,
 362        },
 363        .min_keysize    = AES_MIN_KEY_SIZE,
 364        .max_keysize    = AES_MAX_KEY_SIZE,
 365        .ivsize         = AES_BLOCK_SIZE,
 366        .chunksize      = AES_BLOCK_SIZE,
 367        .setkey         = skcipher_aes_setkey,
 368        .encrypt        = ctr_encrypt_sync,
 369        .decrypt        = ctr_encrypt_sync,
 370}, {
 371        .base = {
 372                .cra_name               = "__xts(aes)",
 373                .cra_driver_name        = "__xts-aes-" MODE,
 374                .cra_priority           = PRIO,
 375                .cra_flags              = CRYPTO_ALG_INTERNAL,
 376                .cra_blocksize          = AES_BLOCK_SIZE,
 377                .cra_ctxsize            = sizeof(struct crypto_aes_xts_ctx),
 378                .cra_module             = THIS_MODULE,
 379        },
 380        .min_keysize    = 2 * AES_MIN_KEY_SIZE,
 381        .max_keysize    = 2 * AES_MAX_KEY_SIZE,
 382        .ivsize         = AES_BLOCK_SIZE,
 383        .setkey         = xts_set_key,
 384        .encrypt        = xts_encrypt,
 385        .decrypt        = xts_decrypt,
 386} };
 387
 388static int cbcmac_setkey(struct crypto_shash *tfm, const u8 *in_key,
 389                         unsigned int key_len)
 390{
 391        struct mac_tfm_ctx *ctx = crypto_shash_ctx(tfm);
 392        int err;
 393
 394        err = aes_expandkey(&ctx->key, in_key, key_len);
 395        if (err)
 396                crypto_shash_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
 397
 398        return err;
 399}
 400
 401static void cmac_gf128_mul_by_x(be128 *y, const be128 *x)
 402{
 403        u64 a = be64_to_cpu(x->a);
 404        u64 b = be64_to_cpu(x->b);
 405
 406        y->a = cpu_to_be64((a << 1) | (b >> 63));
 407        y->b = cpu_to_be64((b << 1) ^ ((a >> 63) ? 0x87 : 0));
 408}
 409
 410static int cmac_setkey(struct crypto_shash *tfm, const u8 *in_key,
 411                       unsigned int key_len)
 412{
 413        struct mac_tfm_ctx *ctx = crypto_shash_ctx(tfm);
 414        be128 *consts = (be128 *)ctx->consts;
 415        u8 *rk = (u8 *)ctx->key.key_enc;
 416        int rounds = 6 + key_len / 4;
 417        int err;
 418
 419        err = cbcmac_setkey(tfm, in_key, key_len);
 420        if (err)
 421                return err;
 422
 423        /* encrypt the zero vector */
 424        kernel_neon_begin();
 425        aes_ecb_encrypt(ctx->consts, (u8[AES_BLOCK_SIZE]){}, rk, rounds, 1);
 426        kernel_neon_end();
 427
 428        cmac_gf128_mul_by_x(consts, consts);
 429        cmac_gf128_mul_by_x(consts + 1, consts);
 430
 431        return 0;
 432}
 433
 434static int xcbc_setkey(struct crypto_shash *tfm, const u8 *in_key,
 435                       unsigned int key_len)
 436{
 437        static u8 const ks[3][AES_BLOCK_SIZE] = {
 438                { [0 ... AES_BLOCK_SIZE - 1] = 0x1 },
 439                { [0 ... AES_BLOCK_SIZE - 1] = 0x2 },
 440                { [0 ... AES_BLOCK_SIZE - 1] = 0x3 },
 441        };
 442
 443        struct mac_tfm_ctx *ctx = crypto_shash_ctx(tfm);
 444        u8 *rk = (u8 *)ctx->key.key_enc;
 445        int rounds = 6 + key_len / 4;
 446        u8 key[AES_BLOCK_SIZE];
 447        int err;
 448
 449        err = cbcmac_setkey(tfm, in_key, key_len);
 450        if (err)
 451                return err;
 452
 453        kernel_neon_begin();
 454        aes_ecb_encrypt(key, ks[0], rk, rounds, 1);
 455        aes_ecb_encrypt(ctx->consts, ks[1], rk, rounds, 2);
 456        kernel_neon_end();
 457
 458        return cbcmac_setkey(tfm, key, sizeof(key));
 459}
 460
 461static int mac_init(struct shash_desc *desc)
 462{
 463        struct mac_desc_ctx *ctx = shash_desc_ctx(desc);
 464
 465        memset(ctx->dg, 0, AES_BLOCK_SIZE);
 466        ctx->len = 0;
 467
 468        return 0;
 469}
 470
 471static void mac_do_update(struct crypto_aes_ctx *ctx, u8 const in[], int blocks,
 472                          u8 dg[], int enc_before, int enc_after)
 473{
 474        int rounds = 6 + ctx->key_length / 4;
 475
 476        if (may_use_simd()) {
 477                kernel_neon_begin();
 478                aes_mac_update(in, ctx->key_enc, rounds, blocks, dg, enc_before,
 479                               enc_after);
 480                kernel_neon_end();
 481        } else {
 482                if (enc_before)
 483                        __aes_arm64_encrypt(ctx->key_enc, dg, dg, rounds);
 484
 485                while (blocks--) {
 486                        crypto_xor(dg, in, AES_BLOCK_SIZE);
 487                        in += AES_BLOCK_SIZE;
 488
 489                        if (blocks || enc_after)
 490                                __aes_arm64_encrypt(ctx->key_enc, dg, dg,
 491                                                    rounds);
 492                }
 493        }
 494}
 495
 496static int mac_update(struct shash_desc *desc, const u8 *p, unsigned int len)
 497{
 498        struct mac_tfm_ctx *tctx = crypto_shash_ctx(desc->tfm);
 499        struct mac_desc_ctx *ctx = shash_desc_ctx(desc);
 500
 501        while (len > 0) {
 502                unsigned int l;
 503
 504                if ((ctx->len % AES_BLOCK_SIZE) == 0 &&
 505                    (ctx->len + len) > AES_BLOCK_SIZE) {
 506
 507                        int blocks = len / AES_BLOCK_SIZE;
 508
 509                        len %= AES_BLOCK_SIZE;
 510
 511                        mac_do_update(&tctx->key, p, blocks, ctx->dg,
 512                                      (ctx->len != 0), (len != 0));
 513
 514                        p += blocks * AES_BLOCK_SIZE;
 515
 516                        if (!len) {
 517                                ctx->len = AES_BLOCK_SIZE;
 518                                break;
 519                        }
 520                        ctx->len = 0;
 521                }
 522
 523                l = min(len, AES_BLOCK_SIZE - ctx->len);
 524
 525                if (l <= AES_BLOCK_SIZE) {
 526                        crypto_xor(ctx->dg + ctx->len, p, l);
 527                        ctx->len += l;
 528                        len -= l;
 529                        p += l;
 530                }
 531        }
 532
 533        return 0;
 534}
 535
 536static int cbcmac_final(struct shash_desc *desc, u8 *out)
 537{
 538        struct mac_tfm_ctx *tctx = crypto_shash_ctx(desc->tfm);
 539        struct mac_desc_ctx *ctx = shash_desc_ctx(desc);
 540
 541        mac_do_update(&tctx->key, NULL, 0, ctx->dg, 1, 0);
 542
 543        memcpy(out, ctx->dg, AES_BLOCK_SIZE);
 544
 545        return 0;
 546}
 547
 548static int cmac_final(struct shash_desc *desc, u8 *out)
 549{
 550        struct mac_tfm_ctx *tctx = crypto_shash_ctx(desc->tfm);
 551        struct mac_desc_ctx *ctx = shash_desc_ctx(desc);
 552        u8 *consts = tctx->consts;
 553
 554        if (ctx->len != AES_BLOCK_SIZE) {
 555                ctx->dg[ctx->len] ^= 0x80;
 556                consts += AES_BLOCK_SIZE;
 557        }
 558
 559        mac_do_update(&tctx->key, consts, 1, ctx->dg, 0, 1);
 560
 561        memcpy(out, ctx->dg, AES_BLOCK_SIZE);
 562
 563        return 0;
 564}
 565
 566static struct shash_alg mac_algs[] = { {
 567        .base.cra_name          = "cmac(aes)",
 568        .base.cra_driver_name   = "cmac-aes-" MODE,
 569        .base.cra_priority      = PRIO,
 570        .base.cra_flags         = CRYPTO_ALG_TYPE_SHASH,
 571        .base.cra_blocksize     = AES_BLOCK_SIZE,
 572        .base.cra_ctxsize       = sizeof(struct mac_tfm_ctx) +
 573                                  2 * AES_BLOCK_SIZE,
 574        .base.cra_module        = THIS_MODULE,
 575
 576        .digestsize             = AES_BLOCK_SIZE,
 577        .init                   = mac_init,
 578        .update                 = mac_update,
 579        .final                  = cmac_final,
 580        .setkey                 = cmac_setkey,
 581        .descsize               = sizeof(struct mac_desc_ctx),
 582}, {
 583        .base.cra_name          = "xcbc(aes)",
 584        .base.cra_driver_name   = "xcbc-aes-" MODE,
 585        .base.cra_priority      = PRIO,
 586        .base.cra_flags         = CRYPTO_ALG_TYPE_SHASH,
 587        .base.cra_blocksize     = AES_BLOCK_SIZE,
 588        .base.cra_ctxsize       = sizeof(struct mac_tfm_ctx) +
 589                                  2 * AES_BLOCK_SIZE,
 590        .base.cra_module        = THIS_MODULE,
 591
 592        .digestsize             = AES_BLOCK_SIZE,
 593        .init                   = mac_init,
 594        .update                 = mac_update,
 595        .final                  = cmac_final,
 596        .setkey                 = xcbc_setkey,
 597        .descsize               = sizeof(struct mac_desc_ctx),
 598}, {
 599        .base.cra_name          = "cbcmac(aes)",
 600        .base.cra_driver_name   = "cbcmac-aes-" MODE,
 601        .base.cra_priority      = PRIO,
 602        .base.cra_flags         = CRYPTO_ALG_TYPE_SHASH,
 603        .base.cra_blocksize     = 1,
 604        .base.cra_ctxsize       = sizeof(struct mac_tfm_ctx),
 605        .base.cra_module        = THIS_MODULE,
 606
 607        .digestsize             = AES_BLOCK_SIZE,
 608        .init                   = mac_init,
 609        .update                 = mac_update,
 610        .final                  = cbcmac_final,
 611        .setkey                 = cbcmac_setkey,
 612        .descsize               = sizeof(struct mac_desc_ctx),
 613} };
 614
 615static struct simd_skcipher_alg *aes_simd_algs[ARRAY_SIZE(aes_algs)];
 616
 617static void aes_exit(void)
 618{
 619        int i;
 620
 621        for (i = 0; i < ARRAY_SIZE(aes_simd_algs); i++)
 622                if (aes_simd_algs[i])
 623                        simd_skcipher_free(aes_simd_algs[i]);
 624
 625        crypto_unregister_shashes(mac_algs, ARRAY_SIZE(mac_algs));
 626        crypto_unregister_skciphers(aes_algs, ARRAY_SIZE(aes_algs));
 627}
 628
 629static int __init aes_init(void)
 630{
 631        struct simd_skcipher_alg *simd;
 632        const char *basename;
 633        const char *algname;
 634        const char *drvname;
 635        int err;
 636        int i;
 637
 638        err = crypto_register_skciphers(aes_algs, ARRAY_SIZE(aes_algs));
 639        if (err)
 640                return err;
 641
 642        err = crypto_register_shashes(mac_algs, ARRAY_SIZE(mac_algs));
 643        if (err)
 644                goto unregister_ciphers;
 645
 646        for (i = 0; i < ARRAY_SIZE(aes_algs); i++) {
 647                if (!(aes_algs[i].base.cra_flags & CRYPTO_ALG_INTERNAL))
 648                        continue;
 649
 650                algname = aes_algs[i].base.cra_name + 2;
 651                drvname = aes_algs[i].base.cra_driver_name + 2;
 652                basename = aes_algs[i].base.cra_driver_name;
 653                simd = simd_skcipher_create_compat(algname, drvname, basename);
 654                err = PTR_ERR(simd);
 655                if (IS_ERR(simd))
 656                        goto unregister_simds;
 657
 658                aes_simd_algs[i] = simd;
 659        }
 660
 661        return 0;
 662
 663unregister_simds:
 664        aes_exit();
 665        return err;
 666unregister_ciphers:
 667        crypto_unregister_skciphers(aes_algs, ARRAY_SIZE(aes_algs));
 668        return err;
 669}
 670
 671#ifdef USE_V8_CRYPTO_EXTENSIONS
 672module_cpu_feature_match(AES, aes_init);
 673#else
 674module_init(aes_init);
 675EXPORT_SYMBOL(neon_aes_ecb_encrypt);
 676EXPORT_SYMBOL(neon_aes_cbc_encrypt);
 677#endif
 678module_exit(aes_exit);
 679