linux/arch/arm64/crypto/aes-ce-ccm-glue.c
<<
>>
Prefs
   1// SPDX-License-Identifier: GPL-2.0-only
   2/*
   3 * aes-ccm-glue.c - AES-CCM transform for ARMv8 with Crypto Extensions
   4 *
   5 * Copyright (C) 2013 - 2017 Linaro Ltd <ard.biesheuvel@linaro.org>
   6 */
   7
   8#include <asm/neon.h>
   9#include <asm/simd.h>
  10#include <asm/unaligned.h>
  11#include <crypto/aes.h>
  12#include <crypto/scatterwalk.h>
  13#include <crypto/internal/aead.h>
  14#include <crypto/internal/simd.h>
  15#include <crypto/internal/skcipher.h>
  16#include <linux/module.h>
  17
  18#include "aes-ce-setkey.h"
  19
  20static int num_rounds(struct crypto_aes_ctx *ctx)
  21{
  22        /*
  23         * # of rounds specified by AES:
  24         * 128 bit key          10 rounds
  25         * 192 bit key          12 rounds
  26         * 256 bit key          14 rounds
  27         * => n byte key        => 6 + (n/4) rounds
  28         */
  29        return 6 + ctx->key_length / 4;
  30}
  31
  32asmlinkage void ce_aes_ccm_auth_data(u8 mac[], u8 const in[], u32 abytes,
  33                                     u32 *macp, u32 const rk[], u32 rounds);
  34
  35asmlinkage void ce_aes_ccm_encrypt(u8 out[], u8 const in[], u32 cbytes,
  36                                   u32 const rk[], u32 rounds, u8 mac[],
  37                                   u8 ctr[]);
  38
  39asmlinkage void ce_aes_ccm_decrypt(u8 out[], u8 const in[], u32 cbytes,
  40                                   u32 const rk[], u32 rounds, u8 mac[],
  41                                   u8 ctr[]);
  42
  43asmlinkage void ce_aes_ccm_final(u8 mac[], u8 const ctr[], u32 const rk[],
  44                                 u32 rounds);
  45
  46asmlinkage void __aes_arm64_encrypt(u32 *rk, u8 *out, const u8 *in, int rounds);
  47
  48static int ccm_setkey(struct crypto_aead *tfm, const u8 *in_key,
  49                      unsigned int key_len)
  50{
  51        struct crypto_aes_ctx *ctx = crypto_aead_ctx(tfm);
  52        int ret;
  53
  54        ret = ce_aes_expandkey(ctx, in_key, key_len);
  55        if (!ret)
  56                return 0;
  57
  58        tfm->base.crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
  59        return -EINVAL;
  60}
  61
  62static int ccm_setauthsize(struct crypto_aead *tfm, unsigned int authsize)
  63{
  64        if ((authsize & 1) || authsize < 4)
  65                return -EINVAL;
  66        return 0;
  67}
  68
  69static int ccm_init_mac(struct aead_request *req, u8 maciv[], u32 msglen)
  70{
  71        struct crypto_aead *aead = crypto_aead_reqtfm(req);
  72        __be32 *n = (__be32 *)&maciv[AES_BLOCK_SIZE - 8];
  73        u32 l = req->iv[0] + 1;
  74
  75        /* verify that CCM dimension 'L' is set correctly in the IV */
  76        if (l < 2 || l > 8)
  77                return -EINVAL;
  78
  79        /* verify that msglen can in fact be represented in L bytes */
  80        if (l < 4 && msglen >> (8 * l))
  81                return -EOVERFLOW;
  82
  83        /*
  84         * Even if the CCM spec allows L values of up to 8, the Linux cryptoapi
  85         * uses a u32 type to represent msglen so the top 4 bytes are always 0.
  86         */
  87        n[0] = 0;
  88        n[1] = cpu_to_be32(msglen);
  89
  90        memcpy(maciv, req->iv, AES_BLOCK_SIZE - l);
  91
  92        /*
  93         * Meaning of byte 0 according to CCM spec (RFC 3610/NIST 800-38C)
  94         * - bits 0..2  : max # of bytes required to represent msglen, minus 1
  95         *                (already set by caller)
  96         * - bits 3..5  : size of auth tag (1 => 4 bytes, 2 => 6 bytes, etc)
  97         * - bit 6      : indicates presence of authenticate-only data
  98         */
  99        maciv[0] |= (crypto_aead_authsize(aead) - 2) << 2;
 100        if (req->assoclen)
 101                maciv[0] |= 0x40;
 102
 103        memset(&req->iv[AES_BLOCK_SIZE - l], 0, l);
 104        return 0;
 105}
 106
 107static void ccm_update_mac(struct crypto_aes_ctx *key, u8 mac[], u8 const in[],
 108                           u32 abytes, u32 *macp)
 109{
 110        if (crypto_simd_usable()) {
 111                kernel_neon_begin();
 112                ce_aes_ccm_auth_data(mac, in, abytes, macp, key->key_enc,
 113                                     num_rounds(key));
 114                kernel_neon_end();
 115        } else {
 116                if (*macp > 0 && *macp < AES_BLOCK_SIZE) {
 117                        int added = min(abytes, AES_BLOCK_SIZE - *macp);
 118
 119                        crypto_xor(&mac[*macp], in, added);
 120
 121                        *macp += added;
 122                        in += added;
 123                        abytes -= added;
 124                }
 125
 126                while (abytes >= AES_BLOCK_SIZE) {
 127                        __aes_arm64_encrypt(key->key_enc, mac, mac,
 128                                            num_rounds(key));
 129                        crypto_xor(mac, in, AES_BLOCK_SIZE);
 130
 131                        in += AES_BLOCK_SIZE;
 132                        abytes -= AES_BLOCK_SIZE;
 133                }
 134
 135                if (abytes > 0) {
 136                        __aes_arm64_encrypt(key->key_enc, mac, mac,
 137                                            num_rounds(key));
 138                        crypto_xor(mac, in, abytes);
 139                        *macp = abytes;
 140                }
 141        }
 142}
 143
 144static void ccm_calculate_auth_mac(struct aead_request *req, u8 mac[])
 145{
 146        struct crypto_aead *aead = crypto_aead_reqtfm(req);
 147        struct crypto_aes_ctx *ctx = crypto_aead_ctx(aead);
 148        struct __packed { __be16 l; __be32 h; u16 len; } ltag;
 149        struct scatter_walk walk;
 150        u32 len = req->assoclen;
 151        u32 macp = 0;
 152
 153        /* prepend the AAD with a length tag */
 154        if (len < 0xff00) {
 155                ltag.l = cpu_to_be16(len);
 156                ltag.len = 2;
 157        } else  {
 158                ltag.l = cpu_to_be16(0xfffe);
 159                put_unaligned_be32(len, &ltag.h);
 160                ltag.len = 6;
 161        }
 162
 163        ccm_update_mac(ctx, mac, (u8 *)&ltag, ltag.len, &macp);
 164        scatterwalk_start(&walk, req->src);
 165
 166        do {
 167                u32 n = scatterwalk_clamp(&walk, len);
 168                u8 *p;
 169
 170                if (!n) {
 171                        scatterwalk_start(&walk, sg_next(walk.sg));
 172                        n = scatterwalk_clamp(&walk, len);
 173                }
 174                p = scatterwalk_map(&walk);
 175                ccm_update_mac(ctx, mac, p, n, &macp);
 176                len -= n;
 177
 178                scatterwalk_unmap(p);
 179                scatterwalk_advance(&walk, n);
 180                scatterwalk_done(&walk, 0, len);
 181        } while (len);
 182}
 183
 184static int ccm_crypt_fallback(struct skcipher_walk *walk, u8 mac[], u8 iv0[],
 185                              struct crypto_aes_ctx *ctx, bool enc)
 186{
 187        u8 buf[AES_BLOCK_SIZE];
 188        int err = 0;
 189
 190        while (walk->nbytes) {
 191                int blocks = walk->nbytes / AES_BLOCK_SIZE;
 192                u32 tail = walk->nbytes % AES_BLOCK_SIZE;
 193                u8 *dst = walk->dst.virt.addr;
 194                u8 *src = walk->src.virt.addr;
 195                u32 nbytes = walk->nbytes;
 196
 197                if (nbytes == walk->total && tail > 0) {
 198                        blocks++;
 199                        tail = 0;
 200                }
 201
 202                do {
 203                        u32 bsize = AES_BLOCK_SIZE;
 204
 205                        if (nbytes < AES_BLOCK_SIZE)
 206                                bsize = nbytes;
 207
 208                        crypto_inc(walk->iv, AES_BLOCK_SIZE);
 209                        __aes_arm64_encrypt(ctx->key_enc, buf, walk->iv,
 210                                            num_rounds(ctx));
 211                        __aes_arm64_encrypt(ctx->key_enc, mac, mac,
 212                                            num_rounds(ctx));
 213                        if (enc)
 214                                crypto_xor(mac, src, bsize);
 215                        crypto_xor_cpy(dst, src, buf, bsize);
 216                        if (!enc)
 217                                crypto_xor(mac, dst, bsize);
 218                        dst += bsize;
 219                        src += bsize;
 220                        nbytes -= bsize;
 221                } while (--blocks);
 222
 223                err = skcipher_walk_done(walk, tail);
 224        }
 225
 226        if (!err) {
 227                __aes_arm64_encrypt(ctx->key_enc, buf, iv0, num_rounds(ctx));
 228                __aes_arm64_encrypt(ctx->key_enc, mac, mac, num_rounds(ctx));
 229                crypto_xor(mac, buf, AES_BLOCK_SIZE);
 230        }
 231        return err;
 232}
 233
 234static int ccm_encrypt(struct aead_request *req)
 235{
 236        struct crypto_aead *aead = crypto_aead_reqtfm(req);
 237        struct crypto_aes_ctx *ctx = crypto_aead_ctx(aead);
 238        struct skcipher_walk walk;
 239        u8 __aligned(8) mac[AES_BLOCK_SIZE];
 240        u8 buf[AES_BLOCK_SIZE];
 241        u32 len = req->cryptlen;
 242        int err;
 243
 244        err = ccm_init_mac(req, mac, len);
 245        if (err)
 246                return err;
 247
 248        if (req->assoclen)
 249                ccm_calculate_auth_mac(req, mac);
 250
 251        /* preserve the original iv for the final round */
 252        memcpy(buf, req->iv, AES_BLOCK_SIZE);
 253
 254        err = skcipher_walk_aead_encrypt(&walk, req, false);
 255
 256        if (crypto_simd_usable()) {
 257                while (walk.nbytes) {
 258                        u32 tail = walk.nbytes % AES_BLOCK_SIZE;
 259
 260                        if (walk.nbytes == walk.total)
 261                                tail = 0;
 262
 263                        kernel_neon_begin();
 264                        ce_aes_ccm_encrypt(walk.dst.virt.addr,
 265                                           walk.src.virt.addr,
 266                                           walk.nbytes - tail, ctx->key_enc,
 267                                           num_rounds(ctx), mac, walk.iv);
 268                        kernel_neon_end();
 269
 270                        err = skcipher_walk_done(&walk, tail);
 271                }
 272                if (!err) {
 273                        kernel_neon_begin();
 274                        ce_aes_ccm_final(mac, buf, ctx->key_enc,
 275                                         num_rounds(ctx));
 276                        kernel_neon_end();
 277                }
 278        } else {
 279                err = ccm_crypt_fallback(&walk, mac, buf, ctx, true);
 280        }
 281        if (err)
 282                return err;
 283
 284        /* copy authtag to end of dst */
 285        scatterwalk_map_and_copy(mac, req->dst, req->assoclen + req->cryptlen,
 286                                 crypto_aead_authsize(aead), 1);
 287
 288        return 0;
 289}
 290
 291static int ccm_decrypt(struct aead_request *req)
 292{
 293        struct crypto_aead *aead = crypto_aead_reqtfm(req);
 294        struct crypto_aes_ctx *ctx = crypto_aead_ctx(aead);
 295        unsigned int authsize = crypto_aead_authsize(aead);
 296        struct skcipher_walk walk;
 297        u8 __aligned(8) mac[AES_BLOCK_SIZE];
 298        u8 buf[AES_BLOCK_SIZE];
 299        u32 len = req->cryptlen - authsize;
 300        int err;
 301
 302        err = ccm_init_mac(req, mac, len);
 303        if (err)
 304                return err;
 305
 306        if (req->assoclen)
 307                ccm_calculate_auth_mac(req, mac);
 308
 309        /* preserve the original iv for the final round */
 310        memcpy(buf, req->iv, AES_BLOCK_SIZE);
 311
 312        err = skcipher_walk_aead_decrypt(&walk, req, false);
 313
 314        if (crypto_simd_usable()) {
 315                while (walk.nbytes) {
 316                        u32 tail = walk.nbytes % AES_BLOCK_SIZE;
 317
 318                        if (walk.nbytes == walk.total)
 319                                tail = 0;
 320
 321                        kernel_neon_begin();
 322                        ce_aes_ccm_decrypt(walk.dst.virt.addr,
 323                                           walk.src.virt.addr,
 324                                           walk.nbytes - tail, ctx->key_enc,
 325                                           num_rounds(ctx), mac, walk.iv);
 326                        kernel_neon_end();
 327
 328                        err = skcipher_walk_done(&walk, tail);
 329                }
 330                if (!err) {
 331                        kernel_neon_begin();
 332                        ce_aes_ccm_final(mac, buf, ctx->key_enc,
 333                                         num_rounds(ctx));
 334                        kernel_neon_end();
 335                }
 336        } else {
 337                err = ccm_crypt_fallback(&walk, mac, buf, ctx, false);
 338        }
 339
 340        if (err)
 341                return err;
 342
 343        /* compare calculated auth tag with the stored one */
 344        scatterwalk_map_and_copy(buf, req->src,
 345                                 req->assoclen + req->cryptlen - authsize,
 346                                 authsize, 0);
 347
 348        if (crypto_memneq(mac, buf, authsize))
 349                return -EBADMSG;
 350        return 0;
 351}
 352
 353static struct aead_alg ccm_aes_alg = {
 354        .base = {
 355                .cra_name               = "ccm(aes)",
 356                .cra_driver_name        = "ccm-aes-ce",
 357                .cra_priority           = 300,
 358                .cra_blocksize          = 1,
 359                .cra_ctxsize            = sizeof(struct crypto_aes_ctx),
 360                .cra_module             = THIS_MODULE,
 361        },
 362        .ivsize         = AES_BLOCK_SIZE,
 363        .chunksize      = AES_BLOCK_SIZE,
 364        .maxauthsize    = AES_BLOCK_SIZE,
 365        .setkey         = ccm_setkey,
 366        .setauthsize    = ccm_setauthsize,
 367        .encrypt        = ccm_encrypt,
 368        .decrypt        = ccm_decrypt,
 369};
 370
 371static int __init aes_mod_init(void)
 372{
 373        if (!cpu_have_named_feature(AES))
 374                return -ENODEV;
 375        return crypto_register_aead(&ccm_aes_alg);
 376}
 377
 378static void __exit aes_mod_exit(void)
 379{
 380        crypto_unregister_aead(&ccm_aes_alg);
 381}
 382
 383module_init(aes_mod_init);
 384module_exit(aes_mod_exit);
 385
 386MODULE_DESCRIPTION("Synchronous AES in CCM mode using ARMv8 Crypto Extensions");
 387MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>");
 388MODULE_LICENSE("GPL v2");
 389MODULE_ALIAS_CRYPTO("ccm(aes)");
 390