linux/arch/arm64/crypto/aes-ce-ccm-glue.c
<<
>>
Prefs
   1// SPDX-License-Identifier: GPL-2.0-only
   2/*
   3 * aes-ccm-glue.c - AES-CCM transform for ARMv8 with Crypto Extensions
   4 *
   5 * Copyright (C) 2013 - 2017 Linaro Ltd <ard.biesheuvel@linaro.org>
   6 */
   7
   8#include <asm/neon.h>
   9#include <asm/simd.h>
  10#include <asm/unaligned.h>
  11#include <crypto/aes.h>
  12#include <crypto/scatterwalk.h>
  13#include <crypto/internal/aead.h>
  14#include <crypto/internal/simd.h>
  15#include <crypto/internal/skcipher.h>
  16#include <linux/module.h>
  17
  18#include "aes-ce-setkey.h"
  19
  20static int num_rounds(struct crypto_aes_ctx *ctx)
  21{
  22        /*
  23         * # of rounds specified by AES:
  24         * 128 bit key          10 rounds
  25         * 192 bit key          12 rounds
  26         * 256 bit key          14 rounds
  27         * => n byte key        => 6 + (n/4) rounds
  28         */
  29        return 6 + ctx->key_length / 4;
  30}
  31
  32asmlinkage void ce_aes_ccm_auth_data(u8 mac[], u8 const in[], u32 abytes,
  33                                     u32 *macp, u32 const rk[], u32 rounds);
  34
  35asmlinkage void ce_aes_ccm_encrypt(u8 out[], u8 const in[], u32 cbytes,
  36                                   u32 const rk[], u32 rounds, u8 mac[],
  37                                   u8 ctr[]);
  38
  39asmlinkage void ce_aes_ccm_decrypt(u8 out[], u8 const in[], u32 cbytes,
  40                                   u32 const rk[], u32 rounds, u8 mac[],
  41                                   u8 ctr[]);
  42
  43asmlinkage void ce_aes_ccm_final(u8 mac[], u8 const ctr[], u32 const rk[],
  44                                 u32 rounds);
  45
  46static int ccm_setkey(struct crypto_aead *tfm, const u8 *in_key,
  47                      unsigned int key_len)
  48{
  49        struct crypto_aes_ctx *ctx = crypto_aead_ctx(tfm);
  50        int ret;
  51
  52        ret = ce_aes_expandkey(ctx, in_key, key_len);
  53        if (!ret)
  54                return 0;
  55
  56        tfm->base.crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
  57        return -EINVAL;
  58}
  59
  60static int ccm_setauthsize(struct crypto_aead *tfm, unsigned int authsize)
  61{
  62        if ((authsize & 1) || authsize < 4)
  63                return -EINVAL;
  64        return 0;
  65}
  66
  67static int ccm_init_mac(struct aead_request *req, u8 maciv[], u32 msglen)
  68{
  69        struct crypto_aead *aead = crypto_aead_reqtfm(req);
  70        __be32 *n = (__be32 *)&maciv[AES_BLOCK_SIZE - 8];
  71        u32 l = req->iv[0] + 1;
  72
  73        /* verify that CCM dimension 'L' is set correctly in the IV */
  74        if (l < 2 || l > 8)
  75                return -EINVAL;
  76
  77        /* verify that msglen can in fact be represented in L bytes */
  78        if (l < 4 && msglen >> (8 * l))
  79                return -EOVERFLOW;
  80
  81        /*
  82         * Even if the CCM spec allows L values of up to 8, the Linux cryptoapi
  83         * uses a u32 type to represent msglen so the top 4 bytes are always 0.
  84         */
  85        n[0] = 0;
  86        n[1] = cpu_to_be32(msglen);
  87
  88        memcpy(maciv, req->iv, AES_BLOCK_SIZE - l);
  89
  90        /*
  91         * Meaning of byte 0 according to CCM spec (RFC 3610/NIST 800-38C)
  92         * - bits 0..2  : max # of bytes required to represent msglen, minus 1
  93         *                (already set by caller)
  94         * - bits 3..5  : size of auth tag (1 => 4 bytes, 2 => 6 bytes, etc)
  95         * - bit 6      : indicates presence of authenticate-only data
  96         */
  97        maciv[0] |= (crypto_aead_authsize(aead) - 2) << 2;
  98        if (req->assoclen)
  99                maciv[0] |= 0x40;
 100
 101        memset(&req->iv[AES_BLOCK_SIZE - l], 0, l);
 102        return 0;
 103}
 104
 105static void ccm_update_mac(struct crypto_aes_ctx *key, u8 mac[], u8 const in[],
 106                           u32 abytes, u32 *macp)
 107{
 108        if (crypto_simd_usable()) {
 109                kernel_neon_begin();
 110                ce_aes_ccm_auth_data(mac, in, abytes, macp, key->key_enc,
 111                                     num_rounds(key));
 112                kernel_neon_end();
 113        } else {
 114                if (*macp > 0 && *macp < AES_BLOCK_SIZE) {
 115                        int added = min(abytes, AES_BLOCK_SIZE - *macp);
 116
 117                        crypto_xor(&mac[*macp], in, added);
 118
 119                        *macp += added;
 120                        in += added;
 121                        abytes -= added;
 122                }
 123
 124                while (abytes >= AES_BLOCK_SIZE) {
 125                        aes_encrypt(key, mac, mac);
 126                        crypto_xor(mac, in, AES_BLOCK_SIZE);
 127
 128                        in += AES_BLOCK_SIZE;
 129                        abytes -= AES_BLOCK_SIZE;
 130                }
 131
 132                if (abytes > 0) {
 133                        aes_encrypt(key, mac, mac);
 134                        crypto_xor(mac, in, abytes);
 135                        *macp = abytes;
 136                }
 137        }
 138}
 139
 140static void ccm_calculate_auth_mac(struct aead_request *req, u8 mac[])
 141{
 142        struct crypto_aead *aead = crypto_aead_reqtfm(req);
 143        struct crypto_aes_ctx *ctx = crypto_aead_ctx(aead);
 144        struct __packed { __be16 l; __be32 h; u16 len; } ltag;
 145        struct scatter_walk walk;
 146        u32 len = req->assoclen;
 147        u32 macp = 0;
 148
 149        /* prepend the AAD with a length tag */
 150        if (len < 0xff00) {
 151                ltag.l = cpu_to_be16(len);
 152                ltag.len = 2;
 153        } else  {
 154                ltag.l = cpu_to_be16(0xfffe);
 155                put_unaligned_be32(len, &ltag.h);
 156                ltag.len = 6;
 157        }
 158
 159        ccm_update_mac(ctx, mac, (u8 *)&ltag, ltag.len, &macp);
 160        scatterwalk_start(&walk, req->src);
 161
 162        do {
 163                u32 n = scatterwalk_clamp(&walk, len);
 164                u8 *p;
 165
 166                if (!n) {
 167                        scatterwalk_start(&walk, sg_next(walk.sg));
 168                        n = scatterwalk_clamp(&walk, len);
 169                }
 170                p = scatterwalk_map(&walk);
 171                ccm_update_mac(ctx, mac, p, n, &macp);
 172                len -= n;
 173
 174                scatterwalk_unmap(p);
 175                scatterwalk_advance(&walk, n);
 176                scatterwalk_done(&walk, 0, len);
 177        } while (len);
 178}
 179
 180static int ccm_crypt_fallback(struct skcipher_walk *walk, u8 mac[], u8 iv0[],
 181                              struct crypto_aes_ctx *ctx, bool enc)
 182{
 183        u8 buf[AES_BLOCK_SIZE];
 184        int err = 0;
 185
 186        while (walk->nbytes) {
 187                int blocks = walk->nbytes / AES_BLOCK_SIZE;
 188                u32 tail = walk->nbytes % AES_BLOCK_SIZE;
 189                u8 *dst = walk->dst.virt.addr;
 190                u8 *src = walk->src.virt.addr;
 191                u32 nbytes = walk->nbytes;
 192
 193                if (nbytes == walk->total && tail > 0) {
 194                        blocks++;
 195                        tail = 0;
 196                }
 197
 198                do {
 199                        u32 bsize = AES_BLOCK_SIZE;
 200
 201                        if (nbytes < AES_BLOCK_SIZE)
 202                                bsize = nbytes;
 203
 204                        crypto_inc(walk->iv, AES_BLOCK_SIZE);
 205                        aes_encrypt(ctx, buf, walk->iv);
 206                        aes_encrypt(ctx, mac, mac);
 207                        if (enc)
 208                                crypto_xor(mac, src, bsize);
 209                        crypto_xor_cpy(dst, src, buf, bsize);
 210                        if (!enc)
 211                                crypto_xor(mac, dst, bsize);
 212                        dst += bsize;
 213                        src += bsize;
 214                        nbytes -= bsize;
 215                } while (--blocks);
 216
 217                err = skcipher_walk_done(walk, tail);
 218        }
 219
 220        if (!err) {
 221                aes_encrypt(ctx, buf, iv0);
 222                aes_encrypt(ctx, mac, mac);
 223                crypto_xor(mac, buf, AES_BLOCK_SIZE);
 224        }
 225        return err;
 226}
 227
 228static int ccm_encrypt(struct aead_request *req)
 229{
 230        struct crypto_aead *aead = crypto_aead_reqtfm(req);
 231        struct crypto_aes_ctx *ctx = crypto_aead_ctx(aead);
 232        struct skcipher_walk walk;
 233        u8 __aligned(8) mac[AES_BLOCK_SIZE];
 234        u8 buf[AES_BLOCK_SIZE];
 235        u32 len = req->cryptlen;
 236        int err;
 237
 238        err = ccm_init_mac(req, mac, len);
 239        if (err)
 240                return err;
 241
 242        if (req->assoclen)
 243                ccm_calculate_auth_mac(req, mac);
 244
 245        /* preserve the original iv for the final round */
 246        memcpy(buf, req->iv, AES_BLOCK_SIZE);
 247
 248        err = skcipher_walk_aead_encrypt(&walk, req, false);
 249
 250        if (crypto_simd_usable()) {
 251                while (walk.nbytes) {
 252                        u32 tail = walk.nbytes % AES_BLOCK_SIZE;
 253
 254                        if (walk.nbytes == walk.total)
 255                                tail = 0;
 256
 257                        kernel_neon_begin();
 258                        ce_aes_ccm_encrypt(walk.dst.virt.addr,
 259                                           walk.src.virt.addr,
 260                                           walk.nbytes - tail, ctx->key_enc,
 261                                           num_rounds(ctx), mac, walk.iv);
 262                        kernel_neon_end();
 263
 264                        err = skcipher_walk_done(&walk, tail);
 265                }
 266                if (!err) {
 267                        kernel_neon_begin();
 268                        ce_aes_ccm_final(mac, buf, ctx->key_enc,
 269                                         num_rounds(ctx));
 270                        kernel_neon_end();
 271                }
 272        } else {
 273                err = ccm_crypt_fallback(&walk, mac, buf, ctx, true);
 274        }
 275        if (err)
 276                return err;
 277
 278        /* copy authtag to end of dst */
 279        scatterwalk_map_and_copy(mac, req->dst, req->assoclen + req->cryptlen,
 280                                 crypto_aead_authsize(aead), 1);
 281
 282        return 0;
 283}
 284
 285static int ccm_decrypt(struct aead_request *req)
 286{
 287        struct crypto_aead *aead = crypto_aead_reqtfm(req);
 288        struct crypto_aes_ctx *ctx = crypto_aead_ctx(aead);
 289        unsigned int authsize = crypto_aead_authsize(aead);
 290        struct skcipher_walk walk;
 291        u8 __aligned(8) mac[AES_BLOCK_SIZE];
 292        u8 buf[AES_BLOCK_SIZE];
 293        u32 len = req->cryptlen - authsize;
 294        int err;
 295
 296        err = ccm_init_mac(req, mac, len);
 297        if (err)
 298                return err;
 299
 300        if (req->assoclen)
 301                ccm_calculate_auth_mac(req, mac);
 302
 303        /* preserve the original iv for the final round */
 304        memcpy(buf, req->iv, AES_BLOCK_SIZE);
 305
 306        err = skcipher_walk_aead_decrypt(&walk, req, false);
 307
 308        if (crypto_simd_usable()) {
 309                while (walk.nbytes) {
 310                        u32 tail = walk.nbytes % AES_BLOCK_SIZE;
 311
 312                        if (walk.nbytes == walk.total)
 313                                tail = 0;
 314
 315                        kernel_neon_begin();
 316                        ce_aes_ccm_decrypt(walk.dst.virt.addr,
 317                                           walk.src.virt.addr,
 318                                           walk.nbytes - tail, ctx->key_enc,
 319                                           num_rounds(ctx), mac, walk.iv);
 320                        kernel_neon_end();
 321
 322                        err = skcipher_walk_done(&walk, tail);
 323                }
 324                if (!err) {
 325                        kernel_neon_begin();
 326                        ce_aes_ccm_final(mac, buf, ctx->key_enc,
 327                                         num_rounds(ctx));
 328                        kernel_neon_end();
 329                }
 330        } else {
 331                err = ccm_crypt_fallback(&walk, mac, buf, ctx, false);
 332        }
 333
 334        if (err)
 335                return err;
 336
 337        /* compare calculated auth tag with the stored one */
 338        scatterwalk_map_and_copy(buf, req->src,
 339                                 req->assoclen + req->cryptlen - authsize,
 340                                 authsize, 0);
 341
 342        if (crypto_memneq(mac, buf, authsize))
 343                return -EBADMSG;
 344        return 0;
 345}
 346
 347static struct aead_alg ccm_aes_alg = {
 348        .base = {
 349                .cra_name               = "ccm(aes)",
 350                .cra_driver_name        = "ccm-aes-ce",
 351                .cra_priority           = 300,
 352                .cra_blocksize          = 1,
 353                .cra_ctxsize            = sizeof(struct crypto_aes_ctx),
 354                .cra_module             = THIS_MODULE,
 355        },
 356        .ivsize         = AES_BLOCK_SIZE,
 357        .chunksize      = AES_BLOCK_SIZE,
 358        .maxauthsize    = AES_BLOCK_SIZE,
 359        .setkey         = ccm_setkey,
 360        .setauthsize    = ccm_setauthsize,
 361        .encrypt        = ccm_encrypt,
 362        .decrypt        = ccm_decrypt,
 363};
 364
 365static int __init aes_mod_init(void)
 366{
 367        if (!cpu_have_named_feature(AES))
 368                return -ENODEV;
 369        return crypto_register_aead(&ccm_aes_alg);
 370}
 371
 372static void __exit aes_mod_exit(void)
 373{
 374        crypto_unregister_aead(&ccm_aes_alg);
 375}
 376
 377module_init(aes_mod_init);
 378module_exit(aes_mod_exit);
 379
 380MODULE_DESCRIPTION("Synchronous AES in CCM mode using ARMv8 Crypto Extensions");
 381MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>");
 382MODULE_LICENSE("GPL v2");
 383MODULE_ALIAS_CRYPTO("ccm(aes)");
 384