linux/arch/arm64/crypto/aes-ce-ccm-glue.c
<<
>>
Prefs
   1// SPDX-License-Identifier: GPL-2.0-only
   2/*
   3 * aes-ccm-glue.c - AES-CCM transform for ARMv8 with Crypto Extensions
   4 *
   5 * Copyright (C) 2013 - 2017 Linaro Ltd <ard.biesheuvel@linaro.org>
   6 */
   7
   8#include <asm/neon.h>
   9#include <asm/simd.h>
  10#include <asm/unaligned.h>
  11#include <crypto/aes.h>
  12#include <crypto/scatterwalk.h>
  13#include <crypto/internal/aead.h>
  14#include <crypto/internal/simd.h>
  15#include <crypto/internal/skcipher.h>
  16#include <linux/module.h>
  17
  18#include "aes-ce-setkey.h"
  19
  20static int num_rounds(struct crypto_aes_ctx *ctx)
  21{
  22        /*
  23         * # of rounds specified by AES:
  24         * 128 bit key          10 rounds
  25         * 192 bit key          12 rounds
  26         * 256 bit key          14 rounds
  27         * => n byte key        => 6 + (n/4) rounds
  28         */
  29        return 6 + ctx->key_length / 4;
  30}
  31
  32asmlinkage void ce_aes_ccm_auth_data(u8 mac[], u8 const in[], u32 abytes,
  33                                     u32 *macp, u32 const rk[], u32 rounds);
  34
  35asmlinkage void ce_aes_ccm_encrypt(u8 out[], u8 const in[], u32 cbytes,
  36                                   u32 const rk[], u32 rounds, u8 mac[],
  37                                   u8 ctr[]);
  38
  39asmlinkage void ce_aes_ccm_decrypt(u8 out[], u8 const in[], u32 cbytes,
  40                                   u32 const rk[], u32 rounds, u8 mac[],
  41                                   u8 ctr[]);
  42
  43asmlinkage void ce_aes_ccm_final(u8 mac[], u8 const ctr[], u32 const rk[],
  44                                 u32 rounds);
  45
  46static int ccm_setkey(struct crypto_aead *tfm, const u8 *in_key,
  47                      unsigned int key_len)
  48{
  49        struct crypto_aes_ctx *ctx = crypto_aead_ctx(tfm);
  50
  51        return ce_aes_expandkey(ctx, in_key, key_len);
  52}
  53
  54static int ccm_setauthsize(struct crypto_aead *tfm, unsigned int authsize)
  55{
  56        if ((authsize & 1) || authsize < 4)
  57                return -EINVAL;
  58        return 0;
  59}
  60
  61static int ccm_init_mac(struct aead_request *req, u8 maciv[], u32 msglen)
  62{
  63        struct crypto_aead *aead = crypto_aead_reqtfm(req);
  64        __be32 *n = (__be32 *)&maciv[AES_BLOCK_SIZE - 8];
  65        u32 l = req->iv[0] + 1;
  66
  67        /* verify that CCM dimension 'L' is set correctly in the IV */
  68        if (l < 2 || l > 8)
  69                return -EINVAL;
  70
  71        /* verify that msglen can in fact be represented in L bytes */
  72        if (l < 4 && msglen >> (8 * l))
  73                return -EOVERFLOW;
  74
  75        /*
  76         * Even if the CCM spec allows L values of up to 8, the Linux cryptoapi
  77         * uses a u32 type to represent msglen so the top 4 bytes are always 0.
  78         */
  79        n[0] = 0;
  80        n[1] = cpu_to_be32(msglen);
  81
  82        memcpy(maciv, req->iv, AES_BLOCK_SIZE - l);
  83
  84        /*
  85         * Meaning of byte 0 according to CCM spec (RFC 3610/NIST 800-38C)
  86         * - bits 0..2  : max # of bytes required to represent msglen, minus 1
  87         *                (already set by caller)
  88         * - bits 3..5  : size of auth tag (1 => 4 bytes, 2 => 6 bytes, etc)
  89         * - bit 6      : indicates presence of authenticate-only data
  90         */
  91        maciv[0] |= (crypto_aead_authsize(aead) - 2) << 2;
  92        if (req->assoclen)
  93                maciv[0] |= 0x40;
  94
  95        memset(&req->iv[AES_BLOCK_SIZE - l], 0, l);
  96        return 0;
  97}
  98
  99static void ccm_update_mac(struct crypto_aes_ctx *key, u8 mac[], u8 const in[],
 100                           u32 abytes, u32 *macp)
 101{
 102        if (crypto_simd_usable()) {
 103                kernel_neon_begin();
 104                ce_aes_ccm_auth_data(mac, in, abytes, macp, key->key_enc,
 105                                     num_rounds(key));
 106                kernel_neon_end();
 107        } else {
 108                if (*macp > 0 && *macp < AES_BLOCK_SIZE) {
 109                        int added = min(abytes, AES_BLOCK_SIZE - *macp);
 110
 111                        crypto_xor(&mac[*macp], in, added);
 112
 113                        *macp += added;
 114                        in += added;
 115                        abytes -= added;
 116                }
 117
 118                while (abytes >= AES_BLOCK_SIZE) {
 119                        aes_encrypt(key, mac, mac);
 120                        crypto_xor(mac, in, AES_BLOCK_SIZE);
 121
 122                        in += AES_BLOCK_SIZE;
 123                        abytes -= AES_BLOCK_SIZE;
 124                }
 125
 126                if (abytes > 0) {
 127                        aes_encrypt(key, mac, mac);
 128                        crypto_xor(mac, in, abytes);
 129                        *macp = abytes;
 130                }
 131        }
 132}
 133
 134static void ccm_calculate_auth_mac(struct aead_request *req, u8 mac[])
 135{
 136        struct crypto_aead *aead = crypto_aead_reqtfm(req);
 137        struct crypto_aes_ctx *ctx = crypto_aead_ctx(aead);
 138        struct __packed { __be16 l; __be32 h; u16 len; } ltag;
 139        struct scatter_walk walk;
 140        u32 len = req->assoclen;
 141        u32 macp = 0;
 142
 143        /* prepend the AAD with a length tag */
 144        if (len < 0xff00) {
 145                ltag.l = cpu_to_be16(len);
 146                ltag.len = 2;
 147        } else  {
 148                ltag.l = cpu_to_be16(0xfffe);
 149                put_unaligned_be32(len, &ltag.h);
 150                ltag.len = 6;
 151        }
 152
 153        ccm_update_mac(ctx, mac, (u8 *)&ltag, ltag.len, &macp);
 154        scatterwalk_start(&walk, req->src);
 155
 156        do {
 157                u32 n = scatterwalk_clamp(&walk, len);
 158                u8 *p;
 159
 160                if (!n) {
 161                        scatterwalk_start(&walk, sg_next(walk.sg));
 162                        n = scatterwalk_clamp(&walk, len);
 163                }
 164                p = scatterwalk_map(&walk);
 165                ccm_update_mac(ctx, mac, p, n, &macp);
 166                len -= n;
 167
 168                scatterwalk_unmap(p);
 169                scatterwalk_advance(&walk, n);
 170                scatterwalk_done(&walk, 0, len);
 171        } while (len);
 172}
 173
 174static int ccm_crypt_fallback(struct skcipher_walk *walk, u8 mac[], u8 iv0[],
 175                              struct crypto_aes_ctx *ctx, bool enc)
 176{
 177        u8 buf[AES_BLOCK_SIZE];
 178        int err = 0;
 179
 180        while (walk->nbytes) {
 181                int blocks = walk->nbytes / AES_BLOCK_SIZE;
 182                u32 tail = walk->nbytes % AES_BLOCK_SIZE;
 183                u8 *dst = walk->dst.virt.addr;
 184                u8 *src = walk->src.virt.addr;
 185                u32 nbytes = walk->nbytes;
 186
 187                if (nbytes == walk->total && tail > 0) {
 188                        blocks++;
 189                        tail = 0;
 190                }
 191
 192                do {
 193                        u32 bsize = AES_BLOCK_SIZE;
 194
 195                        if (nbytes < AES_BLOCK_SIZE)
 196                                bsize = nbytes;
 197
 198                        crypto_inc(walk->iv, AES_BLOCK_SIZE);
 199                        aes_encrypt(ctx, buf, walk->iv);
 200                        aes_encrypt(ctx, mac, mac);
 201                        if (enc)
 202                                crypto_xor(mac, src, bsize);
 203                        crypto_xor_cpy(dst, src, buf, bsize);
 204                        if (!enc)
 205                                crypto_xor(mac, dst, bsize);
 206                        dst += bsize;
 207                        src += bsize;
 208                        nbytes -= bsize;
 209                } while (--blocks);
 210
 211                err = skcipher_walk_done(walk, tail);
 212        }
 213
 214        if (!err) {
 215                aes_encrypt(ctx, buf, iv0);
 216                aes_encrypt(ctx, mac, mac);
 217                crypto_xor(mac, buf, AES_BLOCK_SIZE);
 218        }
 219        return err;
 220}
 221
 222static int ccm_encrypt(struct aead_request *req)
 223{
 224        struct crypto_aead *aead = crypto_aead_reqtfm(req);
 225        struct crypto_aes_ctx *ctx = crypto_aead_ctx(aead);
 226        struct skcipher_walk walk;
 227        u8 __aligned(8) mac[AES_BLOCK_SIZE];
 228        u8 buf[AES_BLOCK_SIZE];
 229        u32 len = req->cryptlen;
 230        int err;
 231
 232        err = ccm_init_mac(req, mac, len);
 233        if (err)
 234                return err;
 235
 236        if (req->assoclen)
 237                ccm_calculate_auth_mac(req, mac);
 238
 239        /* preserve the original iv for the final round */
 240        memcpy(buf, req->iv, AES_BLOCK_SIZE);
 241
 242        err = skcipher_walk_aead_encrypt(&walk, req, false);
 243
 244        if (crypto_simd_usable()) {
 245                while (walk.nbytes) {
 246                        u32 tail = walk.nbytes % AES_BLOCK_SIZE;
 247
 248                        if (walk.nbytes == walk.total)
 249                                tail = 0;
 250
 251                        kernel_neon_begin();
 252                        ce_aes_ccm_encrypt(walk.dst.virt.addr,
 253                                           walk.src.virt.addr,
 254                                           walk.nbytes - tail, ctx->key_enc,
 255                                           num_rounds(ctx), mac, walk.iv);
 256                        kernel_neon_end();
 257
 258                        err = skcipher_walk_done(&walk, tail);
 259                }
 260                if (!err) {
 261                        kernel_neon_begin();
 262                        ce_aes_ccm_final(mac, buf, ctx->key_enc,
 263                                         num_rounds(ctx));
 264                        kernel_neon_end();
 265                }
 266        } else {
 267                err = ccm_crypt_fallback(&walk, mac, buf, ctx, true);
 268        }
 269        if (err)
 270                return err;
 271
 272        /* copy authtag to end of dst */
 273        scatterwalk_map_and_copy(mac, req->dst, req->assoclen + req->cryptlen,
 274                                 crypto_aead_authsize(aead), 1);
 275
 276        return 0;
 277}
 278
 279static int ccm_decrypt(struct aead_request *req)
 280{
 281        struct crypto_aead *aead = crypto_aead_reqtfm(req);
 282        struct crypto_aes_ctx *ctx = crypto_aead_ctx(aead);
 283        unsigned int authsize = crypto_aead_authsize(aead);
 284        struct skcipher_walk walk;
 285        u8 __aligned(8) mac[AES_BLOCK_SIZE];
 286        u8 buf[AES_BLOCK_SIZE];
 287        u32 len = req->cryptlen - authsize;
 288        int err;
 289
 290        err = ccm_init_mac(req, mac, len);
 291        if (err)
 292                return err;
 293
 294        if (req->assoclen)
 295                ccm_calculate_auth_mac(req, mac);
 296
 297        /* preserve the original iv for the final round */
 298        memcpy(buf, req->iv, AES_BLOCK_SIZE);
 299
 300        err = skcipher_walk_aead_decrypt(&walk, req, false);
 301
 302        if (crypto_simd_usable()) {
 303                while (walk.nbytes) {
 304                        u32 tail = walk.nbytes % AES_BLOCK_SIZE;
 305
 306                        if (walk.nbytes == walk.total)
 307                                tail = 0;
 308
 309                        kernel_neon_begin();
 310                        ce_aes_ccm_decrypt(walk.dst.virt.addr,
 311                                           walk.src.virt.addr,
 312                                           walk.nbytes - tail, ctx->key_enc,
 313                                           num_rounds(ctx), mac, walk.iv);
 314                        kernel_neon_end();
 315
 316                        err = skcipher_walk_done(&walk, tail);
 317                }
 318                if (!err) {
 319                        kernel_neon_begin();
 320                        ce_aes_ccm_final(mac, buf, ctx->key_enc,
 321                                         num_rounds(ctx));
 322                        kernel_neon_end();
 323                }
 324        } else {
 325                err = ccm_crypt_fallback(&walk, mac, buf, ctx, false);
 326        }
 327
 328        if (err)
 329                return err;
 330
 331        /* compare calculated auth tag with the stored one */
 332        scatterwalk_map_and_copy(buf, req->src,
 333                                 req->assoclen + req->cryptlen - authsize,
 334                                 authsize, 0);
 335
 336        if (crypto_memneq(mac, buf, authsize))
 337                return -EBADMSG;
 338        return 0;
 339}
 340
 341static struct aead_alg ccm_aes_alg = {
 342        .base = {
 343                .cra_name               = "ccm(aes)",
 344                .cra_driver_name        = "ccm-aes-ce",
 345                .cra_priority           = 300,
 346                .cra_blocksize          = 1,
 347                .cra_ctxsize            = sizeof(struct crypto_aes_ctx),
 348                .cra_module             = THIS_MODULE,
 349        },
 350        .ivsize         = AES_BLOCK_SIZE,
 351        .chunksize      = AES_BLOCK_SIZE,
 352        .maxauthsize    = AES_BLOCK_SIZE,
 353        .setkey         = ccm_setkey,
 354        .setauthsize    = ccm_setauthsize,
 355        .encrypt        = ccm_encrypt,
 356        .decrypt        = ccm_decrypt,
 357};
 358
 359static int __init aes_mod_init(void)
 360{
 361        if (!cpu_have_named_feature(AES))
 362                return -ENODEV;
 363        return crypto_register_aead(&ccm_aes_alg);
 364}
 365
 366static void __exit aes_mod_exit(void)
 367{
 368        crypto_unregister_aead(&ccm_aes_alg);
 369}
 370
 371module_init(aes_mod_init);
 372module_exit(aes_mod_exit);
 373
 374MODULE_DESCRIPTION("Synchronous AES in CCM mode using ARMv8 Crypto Extensions");
 375MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>");
 376MODULE_LICENSE("GPL v2");
 377MODULE_ALIAS_CRYPTO("ccm(aes)");
 378