linux/arch/arm64/crypto/aes-ce-glue.c
<<
>>
Prefs
   1// SPDX-License-Identifier: GPL-2.0-only
   2/*
   3 * aes-ce-cipher.c - core AES cipher using ARMv8 Crypto Extensions
   4 *
   5 * Copyright (C) 2013 - 2017 Linaro Ltd <ard.biesheuvel@linaro.org>
   6 */
   7
   8#include <asm/neon.h>
   9#include <asm/simd.h>
  10#include <asm/unaligned.h>
  11#include <crypto/aes.h>
  12#include <crypto/internal/simd.h>
  13#include <linux/cpufeature.h>
  14#include <linux/crypto.h>
  15#include <linux/module.h>
  16
  17#include "aes-ce-setkey.h"
  18
  19MODULE_DESCRIPTION("Synchronous AES cipher using ARMv8 Crypto Extensions");
  20MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>");
  21MODULE_LICENSE("GPL v2");
  22
  23struct aes_block {
  24        u8 b[AES_BLOCK_SIZE];
  25};
  26
  27asmlinkage void __aes_ce_encrypt(u32 *rk, u8 *out, const u8 *in, int rounds);
  28asmlinkage void __aes_ce_decrypt(u32 *rk, u8 *out, const u8 *in, int rounds);
  29
  30asmlinkage u32 __aes_ce_sub(u32 l);
  31asmlinkage void __aes_ce_invert(struct aes_block *out,
  32                                const struct aes_block *in);
  33
  34static int num_rounds(struct crypto_aes_ctx *ctx)
  35{
  36        /*
  37         * # of rounds specified by AES:
  38         * 128 bit key          10 rounds
  39         * 192 bit key          12 rounds
  40         * 256 bit key          14 rounds
  41         * => n byte key        => 6 + (n/4) rounds
  42         */
  43        return 6 + ctx->key_length / 4;
  44}
  45
  46static void aes_cipher_encrypt(struct crypto_tfm *tfm, u8 dst[], u8 const src[])
  47{
  48        struct crypto_aes_ctx *ctx = crypto_tfm_ctx(tfm);
  49
  50        if (!crypto_simd_usable()) {
  51                aes_encrypt(ctx, dst, src);
  52                return;
  53        }
  54
  55        kernel_neon_begin();
  56        __aes_ce_encrypt(ctx->key_enc, dst, src, num_rounds(ctx));
  57        kernel_neon_end();
  58}
  59
  60static void aes_cipher_decrypt(struct crypto_tfm *tfm, u8 dst[], u8 const src[])
  61{
  62        struct crypto_aes_ctx *ctx = crypto_tfm_ctx(tfm);
  63
  64        if (!crypto_simd_usable()) {
  65                aes_decrypt(ctx, dst, src);
  66                return;
  67        }
  68
  69        kernel_neon_begin();
  70        __aes_ce_decrypt(ctx->key_dec, dst, src, num_rounds(ctx));
  71        kernel_neon_end();
  72}
  73
  74int ce_aes_expandkey(struct crypto_aes_ctx *ctx, const u8 *in_key,
  75                     unsigned int key_len)
  76{
  77        /*
  78         * The AES key schedule round constants
  79         */
  80        static u8 const rcon[] = {
  81                0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, 0x1b, 0x36,
  82        };
  83
  84        u32 kwords = key_len / sizeof(u32);
  85        struct aes_block *key_enc, *key_dec;
  86        int i, j;
  87
  88        if (key_len != AES_KEYSIZE_128 &&
  89            key_len != AES_KEYSIZE_192 &&
  90            key_len != AES_KEYSIZE_256)
  91                return -EINVAL;
  92
  93        ctx->key_length = key_len;
  94        for (i = 0; i < kwords; i++)
  95                ctx->key_enc[i] = get_unaligned_le32(in_key + i * sizeof(u32));
  96
  97        kernel_neon_begin();
  98        for (i = 0; i < sizeof(rcon); i++) {
  99                u32 *rki = ctx->key_enc + (i * kwords);
 100                u32 *rko = rki + kwords;
 101
 102                rko[0] = ror32(__aes_ce_sub(rki[kwords - 1]), 8) ^ rcon[i] ^ rki[0];
 103                rko[1] = rko[0] ^ rki[1];
 104                rko[2] = rko[1] ^ rki[2];
 105                rko[3] = rko[2] ^ rki[3];
 106
 107                if (key_len == AES_KEYSIZE_192) {
 108                        if (i >= 7)
 109                                break;
 110                        rko[4] = rko[3] ^ rki[4];
 111                        rko[5] = rko[4] ^ rki[5];
 112                } else if (key_len == AES_KEYSIZE_256) {
 113                        if (i >= 6)
 114                                break;
 115                        rko[4] = __aes_ce_sub(rko[3]) ^ rki[4];
 116                        rko[5] = rko[4] ^ rki[5];
 117                        rko[6] = rko[5] ^ rki[6];
 118                        rko[7] = rko[6] ^ rki[7];
 119                }
 120        }
 121
 122        /*
 123         * Generate the decryption keys for the Equivalent Inverse Cipher.
 124         * This involves reversing the order of the round keys, and applying
 125         * the Inverse Mix Columns transformation on all but the first and
 126         * the last one.
 127         */
 128        key_enc = (struct aes_block *)ctx->key_enc;
 129        key_dec = (struct aes_block *)ctx->key_dec;
 130        j = num_rounds(ctx);
 131
 132        key_dec[0] = key_enc[j];
 133        for (i = 1, j--; j > 0; i++, j--)
 134                __aes_ce_invert(key_dec + i, key_enc + j);
 135        key_dec[i] = key_enc[0];
 136
 137        kernel_neon_end();
 138        return 0;
 139}
 140EXPORT_SYMBOL(ce_aes_expandkey);
 141
 142int ce_aes_setkey(struct crypto_tfm *tfm, const u8 *in_key,
 143                  unsigned int key_len)
 144{
 145        struct crypto_aes_ctx *ctx = crypto_tfm_ctx(tfm);
 146
 147        return ce_aes_expandkey(ctx, in_key, key_len);
 148}
 149EXPORT_SYMBOL(ce_aes_setkey);
 150
 151static struct crypto_alg aes_alg = {
 152        .cra_name               = "aes",
 153        .cra_driver_name        = "aes-ce",
 154        .cra_priority           = 250,
 155        .cra_flags              = CRYPTO_ALG_TYPE_CIPHER,
 156        .cra_blocksize          = AES_BLOCK_SIZE,
 157        .cra_ctxsize            = sizeof(struct crypto_aes_ctx),
 158        .cra_module             = THIS_MODULE,
 159        .cra_cipher = {
 160                .cia_min_keysize        = AES_MIN_KEY_SIZE,
 161                .cia_max_keysize        = AES_MAX_KEY_SIZE,
 162                .cia_setkey             = ce_aes_setkey,
 163                .cia_encrypt            = aes_cipher_encrypt,
 164                .cia_decrypt            = aes_cipher_decrypt
 165        }
 166};
 167
 168static int __init aes_mod_init(void)
 169{
 170        return crypto_register_alg(&aes_alg);
 171}
 172
 173static void __exit aes_mod_exit(void)
 174{
 175        crypto_unregister_alg(&aes_alg);
 176}
 177
 178module_cpu_feature_match(AES, aes_mod_init);
 179module_exit(aes_mod_exit);
 180