linux/arch/arm64/crypto/ghash-ce-glue.c
<<
>>
Prefs
   1/*
   2 * Accelerated GHASH implementation with ARMv8 PMULL instructions.
   3 *
   4 * Copyright (C) 2014 - 2017 Linaro Ltd. <ard.biesheuvel@linaro.org>
   5 *
   6 * This program is free software; you can redistribute it and/or modify it
   7 * under the terms of the GNU General Public License version 2 as published
   8 * by the Free Software Foundation.
   9 */
  10
  11#include <asm/neon.h>
  12#include <asm/simd.h>
  13#include <asm/unaligned.h>
  14#include <crypto/aes.h>
  15#include <crypto/algapi.h>
  16#include <crypto/b128ops.h>
  17#include <crypto/gf128mul.h>
  18#include <crypto/internal/aead.h>
  19#include <crypto/internal/hash.h>
  20#include <crypto/internal/skcipher.h>
  21#include <crypto/scatterwalk.h>
  22#include <linux/cpufeature.h>
  23#include <linux/crypto.h>
  24#include <linux/module.h>
  25
  26MODULE_DESCRIPTION("GHASH and AES-GCM using ARMv8 Crypto Extensions");
  27MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>");
  28MODULE_LICENSE("GPL v2");
  29MODULE_ALIAS_CRYPTO("ghash");
  30
  31#define GHASH_BLOCK_SIZE        16
  32#define GHASH_DIGEST_SIZE       16
  33#define GCM_IV_SIZE             12
  34
  35struct ghash_key {
  36        u64 a;
  37        u64 b;
  38        be128 k;
  39};
  40
  41struct ghash_desc_ctx {
  42        u64 digest[GHASH_DIGEST_SIZE/sizeof(u64)];
  43        u8 buf[GHASH_BLOCK_SIZE];
  44        u32 count;
  45};
  46
  47struct gcm_aes_ctx {
  48        struct crypto_aes_ctx   aes_key;
  49        struct ghash_key        ghash_key;
  50};
  51
  52asmlinkage void pmull_ghash_update_p64(int blocks, u64 dg[], const char *src,
  53                                       struct ghash_key const *k,
  54                                       const char *head);
  55
  56asmlinkage void pmull_ghash_update_p8(int blocks, u64 dg[], const char *src,
  57                                      struct ghash_key const *k,
  58                                      const char *head);
  59
  60static void (*pmull_ghash_update)(int blocks, u64 dg[], const char *src,
  61                                  struct ghash_key const *k,
  62                                  const char *head);
  63
  64asmlinkage void pmull_gcm_encrypt(int blocks, u64 dg[], u8 dst[],
  65                                  const u8 src[], struct ghash_key const *k,
  66                                  u8 ctr[], int rounds, u8 ks[]);
  67
  68asmlinkage void pmull_gcm_decrypt(int blocks, u64 dg[], u8 dst[],
  69                                  const u8 src[], struct ghash_key const *k,
  70                                  u8 ctr[], int rounds);
  71
  72asmlinkage void pmull_gcm_encrypt_block(u8 dst[], u8 const src[],
  73                                        u32 const rk[], int rounds);
  74
  75asmlinkage void __aes_arm64_encrypt(u32 *rk, u8 *out, const u8 *in, int rounds);
  76
  77static int ghash_init(struct shash_desc *desc)
  78{
  79        struct ghash_desc_ctx *ctx = shash_desc_ctx(desc);
  80
  81        *ctx = (struct ghash_desc_ctx){};
  82        return 0;
  83}
  84
  85static void ghash_do_update(int blocks, u64 dg[], const char *src,
  86                            struct ghash_key *key, const char *head)
  87{
  88        if (likely(may_use_simd())) {
  89                kernel_neon_begin();
  90                pmull_ghash_update(blocks, dg, src, key, head);
  91                kernel_neon_end();
  92        } else {
  93                be128 dst = { cpu_to_be64(dg[1]), cpu_to_be64(dg[0]) };
  94
  95                do {
  96                        const u8 *in = src;
  97
  98                        if (head) {
  99                                in = head;
 100                                blocks++;
 101                                head = NULL;
 102                        } else {
 103                                src += GHASH_BLOCK_SIZE;
 104                        }
 105
 106                        crypto_xor((u8 *)&dst, in, GHASH_BLOCK_SIZE);
 107                        gf128mul_lle(&dst, &key->k);
 108                } while (--blocks);
 109
 110                dg[0] = be64_to_cpu(dst.b);
 111                dg[1] = be64_to_cpu(dst.a);
 112        }
 113}
 114
 115static int ghash_update(struct shash_desc *desc, const u8 *src,
 116                        unsigned int len)
 117{
 118        struct ghash_desc_ctx *ctx = shash_desc_ctx(desc);
 119        unsigned int partial = ctx->count % GHASH_BLOCK_SIZE;
 120
 121        ctx->count += len;
 122
 123        if ((partial + len) >= GHASH_BLOCK_SIZE) {
 124                struct ghash_key *key = crypto_shash_ctx(desc->tfm);
 125                int blocks;
 126
 127                if (partial) {
 128                        int p = GHASH_BLOCK_SIZE - partial;
 129
 130                        memcpy(ctx->buf + partial, src, p);
 131                        src += p;
 132                        len -= p;
 133                }
 134
 135                blocks = len / GHASH_BLOCK_SIZE;
 136                len %= GHASH_BLOCK_SIZE;
 137
 138                ghash_do_update(blocks, ctx->digest, src, key,
 139                                partial ? ctx->buf : NULL);
 140
 141                src += blocks * GHASH_BLOCK_SIZE;
 142                partial = 0;
 143        }
 144        if (len)
 145                memcpy(ctx->buf + partial, src, len);
 146        return 0;
 147}
 148
 149static int ghash_final(struct shash_desc *desc, u8 *dst)
 150{
 151        struct ghash_desc_ctx *ctx = shash_desc_ctx(desc);
 152        unsigned int partial = ctx->count % GHASH_BLOCK_SIZE;
 153
 154        if (partial) {
 155                struct ghash_key *key = crypto_shash_ctx(desc->tfm);
 156
 157                memset(ctx->buf + partial, 0, GHASH_BLOCK_SIZE - partial);
 158
 159                ghash_do_update(1, ctx->digest, ctx->buf, key, NULL);
 160        }
 161        put_unaligned_be64(ctx->digest[1], dst);
 162        put_unaligned_be64(ctx->digest[0], dst + 8);
 163
 164        *ctx = (struct ghash_desc_ctx){};
 165        return 0;
 166}
 167
 168static int __ghash_setkey(struct ghash_key *key,
 169                          const u8 *inkey, unsigned int keylen)
 170{
 171        u64 a, b;
 172
 173        /* needed for the fallback */
 174        memcpy(&key->k, inkey, GHASH_BLOCK_SIZE);
 175
 176        /* perform multiplication by 'x' in GF(2^128) */
 177        b = get_unaligned_be64(inkey);
 178        a = get_unaligned_be64(inkey + 8);
 179
 180        key->a = (a << 1) | (b >> 63);
 181        key->b = (b << 1) | (a >> 63);
 182
 183        if (b >> 63)
 184                key->b ^= 0xc200000000000000UL;
 185
 186        return 0;
 187}
 188
 189static int ghash_setkey(struct crypto_shash *tfm,
 190                        const u8 *inkey, unsigned int keylen)
 191{
 192        struct ghash_key *key = crypto_shash_ctx(tfm);
 193
 194        if (keylen != GHASH_BLOCK_SIZE) {
 195                crypto_shash_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
 196                return -EINVAL;
 197        }
 198
 199        return __ghash_setkey(key, inkey, keylen);
 200}
 201
 202static struct shash_alg ghash_alg = {
 203        .base.cra_name          = "ghash",
 204        .base.cra_driver_name   = "ghash-ce",
 205        .base.cra_priority      = 200,
 206        .base.cra_flags         = CRYPTO_ALG_TYPE_SHASH,
 207        .base.cra_blocksize     = GHASH_BLOCK_SIZE,
 208        .base.cra_ctxsize       = sizeof(struct ghash_key),
 209        .base.cra_module        = THIS_MODULE,
 210
 211        .digestsize             = GHASH_DIGEST_SIZE,
 212        .init                   = ghash_init,
 213        .update                 = ghash_update,
 214        .final                  = ghash_final,
 215        .setkey                 = ghash_setkey,
 216        .descsize               = sizeof(struct ghash_desc_ctx),
 217};
 218
 219static int num_rounds(struct crypto_aes_ctx *ctx)
 220{
 221        /*
 222         * # of rounds specified by AES:
 223         * 128 bit key          10 rounds
 224         * 192 bit key          12 rounds
 225         * 256 bit key          14 rounds
 226         * => n byte key        => 6 + (n/4) rounds
 227         */
 228        return 6 + ctx->key_length / 4;
 229}
 230
 231static int gcm_setkey(struct crypto_aead *tfm, const u8 *inkey,
 232                      unsigned int keylen)
 233{
 234        struct gcm_aes_ctx *ctx = crypto_aead_ctx(tfm);
 235        u8 key[GHASH_BLOCK_SIZE];
 236        int ret;
 237
 238        ret = crypto_aes_expand_key(&ctx->aes_key, inkey, keylen);
 239        if (ret) {
 240                tfm->base.crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
 241                return -EINVAL;
 242        }
 243
 244        __aes_arm64_encrypt(ctx->aes_key.key_enc, key, (u8[AES_BLOCK_SIZE]){},
 245                            num_rounds(&ctx->aes_key));
 246
 247        return __ghash_setkey(&ctx->ghash_key, key, sizeof(key));
 248}
 249
 250static int gcm_setauthsize(struct crypto_aead *tfm, unsigned int authsize)
 251{
 252        switch (authsize) {
 253        case 4:
 254        case 8:
 255        case 12 ... 16:
 256                break;
 257        default:
 258                return -EINVAL;
 259        }
 260        return 0;
 261}
 262
 263static void gcm_update_mac(u64 dg[], const u8 *src, int count, u8 buf[],
 264                           int *buf_count, struct gcm_aes_ctx *ctx)
 265{
 266        if (*buf_count > 0) {
 267                int buf_added = min(count, GHASH_BLOCK_SIZE - *buf_count);
 268
 269                memcpy(&buf[*buf_count], src, buf_added);
 270
 271                *buf_count += buf_added;
 272                src += buf_added;
 273                count -= buf_added;
 274        }
 275
 276        if (count >= GHASH_BLOCK_SIZE || *buf_count == GHASH_BLOCK_SIZE) {
 277                int blocks = count / GHASH_BLOCK_SIZE;
 278
 279                ghash_do_update(blocks, dg, src, &ctx->ghash_key,
 280                                *buf_count ? buf : NULL);
 281
 282                src += blocks * GHASH_BLOCK_SIZE;
 283                count %= GHASH_BLOCK_SIZE;
 284                *buf_count = 0;
 285        }
 286
 287        if (count > 0) {
 288                memcpy(buf, src, count);
 289                *buf_count = count;
 290        }
 291}
 292
 293static void gcm_calculate_auth_mac(struct aead_request *req, u64 dg[])
 294{
 295        struct crypto_aead *aead = crypto_aead_reqtfm(req);
 296        struct gcm_aes_ctx *ctx = crypto_aead_ctx(aead);
 297        u8 buf[GHASH_BLOCK_SIZE];
 298        struct scatter_walk walk;
 299        u32 len = req->assoclen;
 300        int buf_count = 0;
 301
 302        scatterwalk_start(&walk, req->src);
 303
 304        do {
 305                u32 n = scatterwalk_clamp(&walk, len);
 306                u8 *p;
 307
 308                if (!n) {
 309                        scatterwalk_start(&walk, sg_next(walk.sg));
 310                        n = scatterwalk_clamp(&walk, len);
 311                }
 312                p = scatterwalk_map(&walk);
 313
 314                gcm_update_mac(dg, p, n, buf, &buf_count, ctx);
 315                len -= n;
 316
 317                scatterwalk_unmap(p);
 318                scatterwalk_advance(&walk, n);
 319                scatterwalk_done(&walk, 0, len);
 320        } while (len);
 321
 322        if (buf_count) {
 323                memset(&buf[buf_count], 0, GHASH_BLOCK_SIZE - buf_count);
 324                ghash_do_update(1, dg, buf, &ctx->ghash_key, NULL);
 325        }
 326}
 327
 328static void gcm_final(struct aead_request *req, struct gcm_aes_ctx *ctx,
 329                      u64 dg[], u8 tag[], int cryptlen)
 330{
 331        u8 mac[AES_BLOCK_SIZE];
 332        u128 lengths;
 333
 334        lengths.a = cpu_to_be64(req->assoclen * 8);
 335        lengths.b = cpu_to_be64(cryptlen * 8);
 336
 337        ghash_do_update(1, dg, (void *)&lengths, &ctx->ghash_key, NULL);
 338
 339        put_unaligned_be64(dg[1], mac);
 340        put_unaligned_be64(dg[0], mac + 8);
 341
 342        crypto_xor(tag, mac, AES_BLOCK_SIZE);
 343}
 344
 345static int gcm_encrypt(struct aead_request *req)
 346{
 347        struct crypto_aead *aead = crypto_aead_reqtfm(req);
 348        struct gcm_aes_ctx *ctx = crypto_aead_ctx(aead);
 349        struct skcipher_walk walk;
 350        u8 iv[AES_BLOCK_SIZE];
 351        u8 ks[AES_BLOCK_SIZE];
 352        u8 tag[AES_BLOCK_SIZE];
 353        u64 dg[2] = {};
 354        int err;
 355
 356        if (req->assoclen)
 357                gcm_calculate_auth_mac(req, dg);
 358
 359        memcpy(iv, req->iv, GCM_IV_SIZE);
 360        put_unaligned_be32(1, iv + GCM_IV_SIZE);
 361
 362        if (likely(may_use_simd())) {
 363                kernel_neon_begin();
 364
 365                pmull_gcm_encrypt_block(tag, iv, ctx->aes_key.key_enc,
 366                                        num_rounds(&ctx->aes_key));
 367                put_unaligned_be32(2, iv + GCM_IV_SIZE);
 368                pmull_gcm_encrypt_block(ks, iv, NULL,
 369                                        num_rounds(&ctx->aes_key));
 370                put_unaligned_be32(3, iv + GCM_IV_SIZE);
 371
 372                err = skcipher_walk_aead_encrypt(&walk, req, true);
 373
 374                while (walk.nbytes >= AES_BLOCK_SIZE) {
 375                        int blocks = walk.nbytes / AES_BLOCK_SIZE;
 376
 377                        pmull_gcm_encrypt(blocks, dg, walk.dst.virt.addr,
 378                                          walk.src.virt.addr, &ctx->ghash_key,
 379                                          iv, num_rounds(&ctx->aes_key), ks);
 380
 381                        err = skcipher_walk_done(&walk,
 382                                                 walk.nbytes % AES_BLOCK_SIZE);
 383                }
 384                kernel_neon_end();
 385        } else {
 386                __aes_arm64_encrypt(ctx->aes_key.key_enc, tag, iv,
 387                                    num_rounds(&ctx->aes_key));
 388                put_unaligned_be32(2, iv + GCM_IV_SIZE);
 389
 390                err = skcipher_walk_aead_encrypt(&walk, req, true);
 391
 392                while (walk.nbytes >= AES_BLOCK_SIZE) {
 393                        int blocks = walk.nbytes / AES_BLOCK_SIZE;
 394                        u8 *dst = walk.dst.virt.addr;
 395                        u8 *src = walk.src.virt.addr;
 396
 397                        do {
 398                                __aes_arm64_encrypt(ctx->aes_key.key_enc,
 399                                                    ks, iv,
 400                                                    num_rounds(&ctx->aes_key));
 401                                crypto_xor_cpy(dst, src, ks, AES_BLOCK_SIZE);
 402                                crypto_inc(iv, AES_BLOCK_SIZE);
 403
 404                                dst += AES_BLOCK_SIZE;
 405                                src += AES_BLOCK_SIZE;
 406                        } while (--blocks > 0);
 407
 408                        ghash_do_update(walk.nbytes / AES_BLOCK_SIZE, dg,
 409                                        walk.dst.virt.addr, &ctx->ghash_key,
 410                                        NULL);
 411
 412                        err = skcipher_walk_done(&walk,
 413                                                 walk.nbytes % AES_BLOCK_SIZE);
 414                }
 415                if (walk.nbytes)
 416                        __aes_arm64_encrypt(ctx->aes_key.key_enc, ks, iv,
 417                                            num_rounds(&ctx->aes_key));
 418        }
 419
 420        /* handle the tail */
 421        if (walk.nbytes) {
 422                u8 buf[GHASH_BLOCK_SIZE];
 423
 424                crypto_xor_cpy(walk.dst.virt.addr, walk.src.virt.addr, ks,
 425                               walk.nbytes);
 426
 427                memcpy(buf, walk.dst.virt.addr, walk.nbytes);
 428                memset(buf + walk.nbytes, 0, GHASH_BLOCK_SIZE - walk.nbytes);
 429                ghash_do_update(1, dg, buf, &ctx->ghash_key, NULL);
 430
 431                err = skcipher_walk_done(&walk, 0);
 432        }
 433
 434        if (err)
 435                return err;
 436
 437        gcm_final(req, ctx, dg, tag, req->cryptlen);
 438
 439        /* copy authtag to end of dst */
 440        scatterwalk_map_and_copy(tag, req->dst, req->assoclen + req->cryptlen,
 441                                 crypto_aead_authsize(aead), 1);
 442
 443        return 0;
 444}
 445
 446static int gcm_decrypt(struct aead_request *req)
 447{
 448        struct crypto_aead *aead = crypto_aead_reqtfm(req);
 449        struct gcm_aes_ctx *ctx = crypto_aead_ctx(aead);
 450        unsigned int authsize = crypto_aead_authsize(aead);
 451        struct skcipher_walk walk;
 452        u8 iv[AES_BLOCK_SIZE];
 453        u8 tag[AES_BLOCK_SIZE];
 454        u8 buf[GHASH_BLOCK_SIZE];
 455        u64 dg[2] = {};
 456        int err;
 457
 458        if (req->assoclen)
 459                gcm_calculate_auth_mac(req, dg);
 460
 461        memcpy(iv, req->iv, GCM_IV_SIZE);
 462        put_unaligned_be32(1, iv + GCM_IV_SIZE);
 463
 464        if (likely(may_use_simd())) {
 465                kernel_neon_begin();
 466
 467                pmull_gcm_encrypt_block(tag, iv, ctx->aes_key.key_enc,
 468                                        num_rounds(&ctx->aes_key));
 469                put_unaligned_be32(2, iv + GCM_IV_SIZE);
 470
 471                err = skcipher_walk_aead_decrypt(&walk, req, true);
 472
 473                while (walk.nbytes >= AES_BLOCK_SIZE) {
 474                        int blocks = walk.nbytes / AES_BLOCK_SIZE;
 475
 476                        pmull_gcm_decrypt(blocks, dg, walk.dst.virt.addr,
 477                                          walk.src.virt.addr, &ctx->ghash_key,
 478                                          iv, num_rounds(&ctx->aes_key));
 479
 480                        err = skcipher_walk_done(&walk,
 481                                                 walk.nbytes % AES_BLOCK_SIZE);
 482                }
 483                if (walk.nbytes)
 484                        pmull_gcm_encrypt_block(iv, iv, NULL,
 485                                                num_rounds(&ctx->aes_key));
 486
 487                kernel_neon_end();
 488        } else {
 489                __aes_arm64_encrypt(ctx->aes_key.key_enc, tag, iv,
 490                                    num_rounds(&ctx->aes_key));
 491                put_unaligned_be32(2, iv + GCM_IV_SIZE);
 492
 493                err = skcipher_walk_aead_decrypt(&walk, req, true);
 494
 495                while (walk.nbytes >= AES_BLOCK_SIZE) {
 496                        int blocks = walk.nbytes / AES_BLOCK_SIZE;
 497                        u8 *dst = walk.dst.virt.addr;
 498                        u8 *src = walk.src.virt.addr;
 499
 500                        ghash_do_update(blocks, dg, walk.src.virt.addr,
 501                                        &ctx->ghash_key, NULL);
 502
 503                        do {
 504                                __aes_arm64_encrypt(ctx->aes_key.key_enc,
 505                                                    buf, iv,
 506                                                    num_rounds(&ctx->aes_key));
 507                                crypto_xor_cpy(dst, src, buf, AES_BLOCK_SIZE);
 508                                crypto_inc(iv, AES_BLOCK_SIZE);
 509
 510                                dst += AES_BLOCK_SIZE;
 511                                src += AES_BLOCK_SIZE;
 512                        } while (--blocks > 0);
 513
 514                        err = skcipher_walk_done(&walk,
 515                                                 walk.nbytes % AES_BLOCK_SIZE);
 516                }
 517                if (walk.nbytes)
 518                        __aes_arm64_encrypt(ctx->aes_key.key_enc, iv, iv,
 519                                            num_rounds(&ctx->aes_key));
 520        }
 521
 522        /* handle the tail */
 523        if (walk.nbytes) {
 524                memcpy(buf, walk.src.virt.addr, walk.nbytes);
 525                memset(buf + walk.nbytes, 0, GHASH_BLOCK_SIZE - walk.nbytes);
 526                ghash_do_update(1, dg, buf, &ctx->ghash_key, NULL);
 527
 528                crypto_xor_cpy(walk.dst.virt.addr, walk.src.virt.addr, iv,
 529                               walk.nbytes);
 530
 531                err = skcipher_walk_done(&walk, 0);
 532        }
 533
 534        if (err)
 535                return err;
 536
 537        gcm_final(req, ctx, dg, tag, req->cryptlen - authsize);
 538
 539        /* compare calculated auth tag with the stored one */
 540        scatterwalk_map_and_copy(buf, req->src,
 541                                 req->assoclen + req->cryptlen - authsize,
 542                                 authsize, 0);
 543
 544        if (crypto_memneq(tag, buf, authsize))
 545                return -EBADMSG;
 546        return 0;
 547}
 548
 549static struct aead_alg gcm_aes_alg = {
 550        .ivsize                 = GCM_IV_SIZE,
 551        .chunksize              = AES_BLOCK_SIZE,
 552        .maxauthsize            = AES_BLOCK_SIZE,
 553        .setkey                 = gcm_setkey,
 554        .setauthsize            = gcm_setauthsize,
 555        .encrypt                = gcm_encrypt,
 556        .decrypt                = gcm_decrypt,
 557
 558        .base.cra_name          = "gcm(aes)",
 559        .base.cra_driver_name   = "gcm-aes-ce",
 560        .base.cra_priority      = 300,
 561        .base.cra_blocksize     = 1,
 562        .base.cra_ctxsize       = sizeof(struct gcm_aes_ctx),
 563        .base.cra_module        = THIS_MODULE,
 564};
 565
 566static int __init ghash_ce_mod_init(void)
 567{
 568        int ret;
 569
 570        if (!(elf_hwcap & HWCAP_ASIMD))
 571                return -ENODEV;
 572
 573        if (elf_hwcap & HWCAP_PMULL)
 574                pmull_ghash_update = pmull_ghash_update_p64;
 575
 576        else
 577                pmull_ghash_update = pmull_ghash_update_p8;
 578
 579        ret = crypto_register_shash(&ghash_alg);
 580        if (ret)
 581                return ret;
 582
 583        if (elf_hwcap & HWCAP_PMULL) {
 584                ret = crypto_register_aead(&gcm_aes_alg);
 585                if (ret)
 586                        crypto_unregister_shash(&ghash_alg);
 587        }
 588        return ret;
 589}
 590
 591static void __exit ghash_ce_mod_exit(void)
 592{
 593        crypto_unregister_shash(&ghash_alg);
 594        crypto_unregister_aead(&gcm_aes_alg);
 595}
 596
 597static const struct cpu_feature ghash_cpu_feature[] = {
 598        { cpu_feature(PMULL) }, { }
 599};
 600MODULE_DEVICE_TABLE(cpu, ghash_cpu_feature);
 601
 602module_init(ghash_ce_mod_init);
 603module_exit(ghash_ce_mod_exit);
 604