linux/arch/arm64/crypto/ghash-ce-glue.c
<<
>>
Prefs
   1/*
   2 * Accelerated GHASH implementation with ARMv8 PMULL instructions.
   3 *
   4 * Copyright (C) 2014 - 2017 Linaro Ltd. <ard.biesheuvel@linaro.org>
   5 *
   6 * This program is free software; you can redistribute it and/or modify it
   7 * under the terms of the GNU General Public License version 2 as published
   8 * by the Free Software Foundation.
   9 */
  10
  11#include <asm/neon.h>
  12#include <asm/simd.h>
  13#include <asm/unaligned.h>
  14#include <crypto/aes.h>
  15#include <crypto/algapi.h>
  16#include <crypto/b128ops.h>
  17#include <crypto/gf128mul.h>
  18#include <crypto/internal/aead.h>
  19#include <crypto/internal/hash.h>
  20#include <crypto/internal/skcipher.h>
  21#include <crypto/scatterwalk.h>
  22#include <linux/cpufeature.h>
  23#include <linux/crypto.h>
  24#include <linux/module.h>
  25
  26MODULE_DESCRIPTION("GHASH and AES-GCM using ARMv8 Crypto Extensions");
  27MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>");
  28MODULE_LICENSE("GPL v2");
  29MODULE_ALIAS_CRYPTO("ghash");
  30
  31#define GHASH_BLOCK_SIZE        16
  32#define GHASH_DIGEST_SIZE       16
  33#define GCM_IV_SIZE             12
  34
  35struct ghash_key {
  36        u64 a;
  37        u64 b;
  38        be128 k;
  39};
  40
  41struct ghash_desc_ctx {
  42        u64 digest[GHASH_DIGEST_SIZE/sizeof(u64)];
  43        u8 buf[GHASH_BLOCK_SIZE];
  44        u32 count;
  45};
  46
  47struct gcm_aes_ctx {
  48        struct crypto_aes_ctx   aes_key;
  49        struct ghash_key        ghash_key;
  50};
  51
  52asmlinkage void pmull_ghash_update_p64(int blocks, u64 dg[], const char *src,
  53                                       struct ghash_key const *k,
  54                                       const char *head);
  55
  56asmlinkage void pmull_ghash_update_p8(int blocks, u64 dg[], const char *src,
  57                                      struct ghash_key const *k,
  58                                      const char *head);
  59
  60static void (*pmull_ghash_update)(int blocks, u64 dg[], const char *src,
  61                                  struct ghash_key const *k,
  62                                  const char *head);
  63
  64asmlinkage void pmull_gcm_encrypt(int blocks, u64 dg[], u8 dst[],
  65                                  const u8 src[], struct ghash_key const *k,
  66                                  u8 ctr[], u32 const rk[], int rounds,
  67                                  u8 ks[]);
  68
  69asmlinkage void pmull_gcm_decrypt(int blocks, u64 dg[], u8 dst[],
  70                                  const u8 src[], struct ghash_key const *k,
  71                                  u8 ctr[], u32 const rk[], int rounds);
  72
  73asmlinkage void pmull_gcm_encrypt_block(u8 dst[], u8 const src[],
  74                                        u32 const rk[], int rounds);
  75
  76asmlinkage void __aes_arm64_encrypt(u32 *rk, u8 *out, const u8 *in, int rounds);
  77
  78static int ghash_init(struct shash_desc *desc)
  79{
  80        struct ghash_desc_ctx *ctx = shash_desc_ctx(desc);
  81
  82        *ctx = (struct ghash_desc_ctx){};
  83        return 0;
  84}
  85
  86static void ghash_do_update(int blocks, u64 dg[], const char *src,
  87                            struct ghash_key *key, const char *head)
  88{
  89        if (likely(may_use_simd())) {
  90                kernel_neon_begin();
  91                pmull_ghash_update(blocks, dg, src, key, head);
  92                kernel_neon_end();
  93        } else {
  94                be128 dst = { cpu_to_be64(dg[1]), cpu_to_be64(dg[0]) };
  95
  96                do {
  97                        const u8 *in = src;
  98
  99                        if (head) {
 100                                in = head;
 101                                blocks++;
 102                                head = NULL;
 103                        } else {
 104                                src += GHASH_BLOCK_SIZE;
 105                        }
 106
 107                        crypto_xor((u8 *)&dst, in, GHASH_BLOCK_SIZE);
 108                        gf128mul_lle(&dst, &key->k);
 109                } while (--blocks);
 110
 111                dg[0] = be64_to_cpu(dst.b);
 112                dg[1] = be64_to_cpu(dst.a);
 113        }
 114}
 115
 116static int ghash_update(struct shash_desc *desc, const u8 *src,
 117                        unsigned int len)
 118{
 119        struct ghash_desc_ctx *ctx = shash_desc_ctx(desc);
 120        unsigned int partial = ctx->count % GHASH_BLOCK_SIZE;
 121
 122        ctx->count += len;
 123
 124        if ((partial + len) >= GHASH_BLOCK_SIZE) {
 125                struct ghash_key *key = crypto_shash_ctx(desc->tfm);
 126                int blocks;
 127
 128                if (partial) {
 129                        int p = GHASH_BLOCK_SIZE - partial;
 130
 131                        memcpy(ctx->buf + partial, src, p);
 132                        src += p;
 133                        len -= p;
 134                }
 135
 136                blocks = len / GHASH_BLOCK_SIZE;
 137                len %= GHASH_BLOCK_SIZE;
 138
 139                ghash_do_update(blocks, ctx->digest, src, key,
 140                                partial ? ctx->buf : NULL);
 141
 142                src += blocks * GHASH_BLOCK_SIZE;
 143                partial = 0;
 144        }
 145        if (len)
 146                memcpy(ctx->buf + partial, src, len);
 147        return 0;
 148}
 149
 150static int ghash_final(struct shash_desc *desc, u8 *dst)
 151{
 152        struct ghash_desc_ctx *ctx = shash_desc_ctx(desc);
 153        unsigned int partial = ctx->count % GHASH_BLOCK_SIZE;
 154
 155        if (partial) {
 156                struct ghash_key *key = crypto_shash_ctx(desc->tfm);
 157
 158                memset(ctx->buf + partial, 0, GHASH_BLOCK_SIZE - partial);
 159
 160                ghash_do_update(1, ctx->digest, ctx->buf, key, NULL);
 161        }
 162        put_unaligned_be64(ctx->digest[1], dst);
 163        put_unaligned_be64(ctx->digest[0], dst + 8);
 164
 165        *ctx = (struct ghash_desc_ctx){};
 166        return 0;
 167}
 168
 169static int __ghash_setkey(struct ghash_key *key,
 170                          const u8 *inkey, unsigned int keylen)
 171{
 172        u64 a, b;
 173
 174        /* needed for the fallback */
 175        memcpy(&key->k, inkey, GHASH_BLOCK_SIZE);
 176
 177        /* perform multiplication by 'x' in GF(2^128) */
 178        b = get_unaligned_be64(inkey);
 179        a = get_unaligned_be64(inkey + 8);
 180
 181        key->a = (a << 1) | (b >> 63);
 182        key->b = (b << 1) | (a >> 63);
 183
 184        if (b >> 63)
 185                key->b ^= 0xc200000000000000UL;
 186
 187        return 0;
 188}
 189
 190static int ghash_setkey(struct crypto_shash *tfm,
 191                        const u8 *inkey, unsigned int keylen)
 192{
 193        struct ghash_key *key = crypto_shash_ctx(tfm);
 194
 195        if (keylen != GHASH_BLOCK_SIZE) {
 196                crypto_shash_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
 197                return -EINVAL;
 198        }
 199
 200        return __ghash_setkey(key, inkey, keylen);
 201}
 202
 203static struct shash_alg ghash_alg = {
 204        .base.cra_name          = "ghash",
 205        .base.cra_driver_name   = "ghash-ce",
 206        .base.cra_priority      = 200,
 207        .base.cra_flags         = CRYPTO_ALG_TYPE_SHASH,
 208        .base.cra_blocksize     = GHASH_BLOCK_SIZE,
 209        .base.cra_ctxsize       = sizeof(struct ghash_key),
 210        .base.cra_module        = THIS_MODULE,
 211
 212        .digestsize             = GHASH_DIGEST_SIZE,
 213        .init                   = ghash_init,
 214        .update                 = ghash_update,
 215        .final                  = ghash_final,
 216        .setkey                 = ghash_setkey,
 217        .descsize               = sizeof(struct ghash_desc_ctx),
 218};
 219
 220static int num_rounds(struct crypto_aes_ctx *ctx)
 221{
 222        /*
 223         * # of rounds specified by AES:
 224         * 128 bit key          10 rounds
 225         * 192 bit key          12 rounds
 226         * 256 bit key          14 rounds
 227         * => n byte key        => 6 + (n/4) rounds
 228         */
 229        return 6 + ctx->key_length / 4;
 230}
 231
 232static int gcm_setkey(struct crypto_aead *tfm, const u8 *inkey,
 233                      unsigned int keylen)
 234{
 235        struct gcm_aes_ctx *ctx = crypto_aead_ctx(tfm);
 236        u8 key[GHASH_BLOCK_SIZE];
 237        int ret;
 238
 239        ret = crypto_aes_expand_key(&ctx->aes_key, inkey, keylen);
 240        if (ret) {
 241                tfm->base.crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
 242                return -EINVAL;
 243        }
 244
 245        __aes_arm64_encrypt(ctx->aes_key.key_enc, key, (u8[AES_BLOCK_SIZE]){},
 246                            num_rounds(&ctx->aes_key));
 247
 248        return __ghash_setkey(&ctx->ghash_key, key, sizeof(key));
 249}
 250
 251static int gcm_setauthsize(struct crypto_aead *tfm, unsigned int authsize)
 252{
 253        switch (authsize) {
 254        case 4:
 255        case 8:
 256        case 12 ... 16:
 257                break;
 258        default:
 259                return -EINVAL;
 260        }
 261        return 0;
 262}
 263
 264static void gcm_update_mac(u64 dg[], const u8 *src, int count, u8 buf[],
 265                           int *buf_count, struct gcm_aes_ctx *ctx)
 266{
 267        if (*buf_count > 0) {
 268                int buf_added = min(count, GHASH_BLOCK_SIZE - *buf_count);
 269
 270                memcpy(&buf[*buf_count], src, buf_added);
 271
 272                *buf_count += buf_added;
 273                src += buf_added;
 274                count -= buf_added;
 275        }
 276
 277        if (count >= GHASH_BLOCK_SIZE || *buf_count == GHASH_BLOCK_SIZE) {
 278                int blocks = count / GHASH_BLOCK_SIZE;
 279
 280                ghash_do_update(blocks, dg, src, &ctx->ghash_key,
 281                                *buf_count ? buf : NULL);
 282
 283                src += blocks * GHASH_BLOCK_SIZE;
 284                count %= GHASH_BLOCK_SIZE;
 285                *buf_count = 0;
 286        }
 287
 288        if (count > 0) {
 289                memcpy(buf, src, count);
 290                *buf_count = count;
 291        }
 292}
 293
 294static void gcm_calculate_auth_mac(struct aead_request *req, u64 dg[])
 295{
 296        struct crypto_aead *aead = crypto_aead_reqtfm(req);
 297        struct gcm_aes_ctx *ctx = crypto_aead_ctx(aead);
 298        u8 buf[GHASH_BLOCK_SIZE];
 299        struct scatter_walk walk;
 300        u32 len = req->assoclen;
 301        int buf_count = 0;
 302
 303        scatterwalk_start(&walk, req->src);
 304
 305        do {
 306                u32 n = scatterwalk_clamp(&walk, len);
 307                u8 *p;
 308
 309                if (!n) {
 310                        scatterwalk_start(&walk, sg_next(walk.sg));
 311                        n = scatterwalk_clamp(&walk, len);
 312                }
 313                p = scatterwalk_map(&walk);
 314
 315                gcm_update_mac(dg, p, n, buf, &buf_count, ctx);
 316                len -= n;
 317
 318                scatterwalk_unmap(p);
 319                scatterwalk_advance(&walk, n);
 320                scatterwalk_done(&walk, 0, len);
 321        } while (len);
 322
 323        if (buf_count) {
 324                memset(&buf[buf_count], 0, GHASH_BLOCK_SIZE - buf_count);
 325                ghash_do_update(1, dg, buf, &ctx->ghash_key, NULL);
 326        }
 327}
 328
 329static void gcm_final(struct aead_request *req, struct gcm_aes_ctx *ctx,
 330                      u64 dg[], u8 tag[], int cryptlen)
 331{
 332        u8 mac[AES_BLOCK_SIZE];
 333        u128 lengths;
 334
 335        lengths.a = cpu_to_be64(req->assoclen * 8);
 336        lengths.b = cpu_to_be64(cryptlen * 8);
 337
 338        ghash_do_update(1, dg, (void *)&lengths, &ctx->ghash_key, NULL);
 339
 340        put_unaligned_be64(dg[1], mac);
 341        put_unaligned_be64(dg[0], mac + 8);
 342
 343        crypto_xor(tag, mac, AES_BLOCK_SIZE);
 344}
 345
 346static int gcm_encrypt(struct aead_request *req)
 347{
 348        struct crypto_aead *aead = crypto_aead_reqtfm(req);
 349        struct gcm_aes_ctx *ctx = crypto_aead_ctx(aead);
 350        struct skcipher_walk walk;
 351        u8 iv[AES_BLOCK_SIZE];
 352        u8 ks[AES_BLOCK_SIZE];
 353        u8 tag[AES_BLOCK_SIZE];
 354        u64 dg[2] = {};
 355        int err;
 356
 357        if (req->assoclen)
 358                gcm_calculate_auth_mac(req, dg);
 359
 360        memcpy(iv, req->iv, GCM_IV_SIZE);
 361        put_unaligned_be32(1, iv + GCM_IV_SIZE);
 362
 363        if (likely(may_use_simd())) {
 364                kernel_neon_begin();
 365
 366                pmull_gcm_encrypt_block(tag, iv, ctx->aes_key.key_enc,
 367                                        num_rounds(&ctx->aes_key));
 368                put_unaligned_be32(2, iv + GCM_IV_SIZE);
 369                pmull_gcm_encrypt_block(ks, iv, NULL,
 370                                        num_rounds(&ctx->aes_key));
 371                put_unaligned_be32(3, iv + GCM_IV_SIZE);
 372                kernel_neon_end();
 373
 374                err = skcipher_walk_aead_encrypt(&walk, req, false);
 375
 376                while (walk.nbytes >= AES_BLOCK_SIZE) {
 377                        int blocks = walk.nbytes / AES_BLOCK_SIZE;
 378
 379                        kernel_neon_begin();
 380                        pmull_gcm_encrypt(blocks, dg, walk.dst.virt.addr,
 381                                          walk.src.virt.addr, &ctx->ghash_key,
 382                                          iv, ctx->aes_key.key_enc,
 383                                          num_rounds(&ctx->aes_key), ks);
 384                        kernel_neon_end();
 385
 386                        err = skcipher_walk_done(&walk,
 387                                                 walk.nbytes % AES_BLOCK_SIZE);
 388                }
 389        } else {
 390                __aes_arm64_encrypt(ctx->aes_key.key_enc, tag, iv,
 391                                    num_rounds(&ctx->aes_key));
 392                put_unaligned_be32(2, iv + GCM_IV_SIZE);
 393
 394                err = skcipher_walk_aead_encrypt(&walk, req, false);
 395
 396                while (walk.nbytes >= AES_BLOCK_SIZE) {
 397                        int blocks = walk.nbytes / AES_BLOCK_SIZE;
 398                        u8 *dst = walk.dst.virt.addr;
 399                        u8 *src = walk.src.virt.addr;
 400
 401                        do {
 402                                __aes_arm64_encrypt(ctx->aes_key.key_enc,
 403                                                    ks, iv,
 404                                                    num_rounds(&ctx->aes_key));
 405                                crypto_xor_cpy(dst, src, ks, AES_BLOCK_SIZE);
 406                                crypto_inc(iv, AES_BLOCK_SIZE);
 407
 408                                dst += AES_BLOCK_SIZE;
 409                                src += AES_BLOCK_SIZE;
 410                        } while (--blocks > 0);
 411
 412                        ghash_do_update(walk.nbytes / AES_BLOCK_SIZE, dg,
 413                                        walk.dst.virt.addr, &ctx->ghash_key,
 414                                        NULL);
 415
 416                        err = skcipher_walk_done(&walk,
 417                                                 walk.nbytes % AES_BLOCK_SIZE);
 418                }
 419                if (walk.nbytes)
 420                        __aes_arm64_encrypt(ctx->aes_key.key_enc, ks, iv,
 421                                            num_rounds(&ctx->aes_key));
 422        }
 423
 424        /* handle the tail */
 425        if (walk.nbytes) {
 426                u8 buf[GHASH_BLOCK_SIZE];
 427
 428                crypto_xor_cpy(walk.dst.virt.addr, walk.src.virt.addr, ks,
 429                               walk.nbytes);
 430
 431                memcpy(buf, walk.dst.virt.addr, walk.nbytes);
 432                memset(buf + walk.nbytes, 0, GHASH_BLOCK_SIZE - walk.nbytes);
 433                ghash_do_update(1, dg, buf, &ctx->ghash_key, NULL);
 434
 435                err = skcipher_walk_done(&walk, 0);
 436        }
 437
 438        if (err)
 439                return err;
 440
 441        gcm_final(req, ctx, dg, tag, req->cryptlen);
 442
 443        /* copy authtag to end of dst */
 444        scatterwalk_map_and_copy(tag, req->dst, req->assoclen + req->cryptlen,
 445                                 crypto_aead_authsize(aead), 1);
 446
 447        return 0;
 448}
 449
 450static int gcm_decrypt(struct aead_request *req)
 451{
 452        struct crypto_aead *aead = crypto_aead_reqtfm(req);
 453        struct gcm_aes_ctx *ctx = crypto_aead_ctx(aead);
 454        unsigned int authsize = crypto_aead_authsize(aead);
 455        struct skcipher_walk walk;
 456        u8 iv[AES_BLOCK_SIZE];
 457        u8 tag[AES_BLOCK_SIZE];
 458        u8 buf[GHASH_BLOCK_SIZE];
 459        u64 dg[2] = {};
 460        int err;
 461
 462        if (req->assoclen)
 463                gcm_calculate_auth_mac(req, dg);
 464
 465        memcpy(iv, req->iv, GCM_IV_SIZE);
 466        put_unaligned_be32(1, iv + GCM_IV_SIZE);
 467
 468        if (likely(may_use_simd())) {
 469                kernel_neon_begin();
 470
 471                pmull_gcm_encrypt_block(tag, iv, ctx->aes_key.key_enc,
 472                                        num_rounds(&ctx->aes_key));
 473                put_unaligned_be32(2, iv + GCM_IV_SIZE);
 474                kernel_neon_end();
 475
 476                err = skcipher_walk_aead_decrypt(&walk, req, false);
 477
 478                while (walk.nbytes >= AES_BLOCK_SIZE) {
 479                        int blocks = walk.nbytes / AES_BLOCK_SIZE;
 480
 481                        kernel_neon_begin();
 482                        pmull_gcm_decrypt(blocks, dg, walk.dst.virt.addr,
 483                                          walk.src.virt.addr, &ctx->ghash_key,
 484                                          iv, ctx->aes_key.key_enc,
 485                                          num_rounds(&ctx->aes_key));
 486                        kernel_neon_end();
 487
 488                        err = skcipher_walk_done(&walk,
 489                                                 walk.nbytes % AES_BLOCK_SIZE);
 490                }
 491                if (walk.nbytes) {
 492                        kernel_neon_begin();
 493                        pmull_gcm_encrypt_block(iv, iv, ctx->aes_key.key_enc,
 494                                                num_rounds(&ctx->aes_key));
 495                        kernel_neon_end();
 496                }
 497
 498        } else {
 499                __aes_arm64_encrypt(ctx->aes_key.key_enc, tag, iv,
 500                                    num_rounds(&ctx->aes_key));
 501                put_unaligned_be32(2, iv + GCM_IV_SIZE);
 502
 503                err = skcipher_walk_aead_decrypt(&walk, req, false);
 504
 505                while (walk.nbytes >= AES_BLOCK_SIZE) {
 506                        int blocks = walk.nbytes / AES_BLOCK_SIZE;
 507                        u8 *dst = walk.dst.virt.addr;
 508                        u8 *src = walk.src.virt.addr;
 509
 510                        ghash_do_update(blocks, dg, walk.src.virt.addr,
 511                                        &ctx->ghash_key, NULL);
 512
 513                        do {
 514                                __aes_arm64_encrypt(ctx->aes_key.key_enc,
 515                                                    buf, iv,
 516                                                    num_rounds(&ctx->aes_key));
 517                                crypto_xor_cpy(dst, src, buf, AES_BLOCK_SIZE);
 518                                crypto_inc(iv, AES_BLOCK_SIZE);
 519
 520                                dst += AES_BLOCK_SIZE;
 521                                src += AES_BLOCK_SIZE;
 522                        } while (--blocks > 0);
 523
 524                        err = skcipher_walk_done(&walk,
 525                                                 walk.nbytes % AES_BLOCK_SIZE);
 526                }
 527                if (walk.nbytes)
 528                        __aes_arm64_encrypt(ctx->aes_key.key_enc, iv, iv,
 529                                            num_rounds(&ctx->aes_key));
 530        }
 531
 532        /* handle the tail */
 533        if (walk.nbytes) {
 534                memcpy(buf, walk.src.virt.addr, walk.nbytes);
 535                memset(buf + walk.nbytes, 0, GHASH_BLOCK_SIZE - walk.nbytes);
 536                ghash_do_update(1, dg, buf, &ctx->ghash_key, NULL);
 537
 538                crypto_xor_cpy(walk.dst.virt.addr, walk.src.virt.addr, iv,
 539                               walk.nbytes);
 540
 541                err = skcipher_walk_done(&walk, 0);
 542        }
 543
 544        if (err)
 545                return err;
 546
 547        gcm_final(req, ctx, dg, tag, req->cryptlen - authsize);
 548
 549        /* compare calculated auth tag with the stored one */
 550        scatterwalk_map_and_copy(buf, req->src,
 551                                 req->assoclen + req->cryptlen - authsize,
 552                                 authsize, 0);
 553
 554        if (crypto_memneq(tag, buf, authsize))
 555                return -EBADMSG;
 556        return 0;
 557}
 558
 559static struct aead_alg gcm_aes_alg = {
 560        .ivsize                 = GCM_IV_SIZE,
 561        .chunksize              = AES_BLOCK_SIZE,
 562        .maxauthsize            = AES_BLOCK_SIZE,
 563        .setkey                 = gcm_setkey,
 564        .setauthsize            = gcm_setauthsize,
 565        .encrypt                = gcm_encrypt,
 566        .decrypt                = gcm_decrypt,
 567
 568        .base.cra_name          = "gcm(aes)",
 569        .base.cra_driver_name   = "gcm-aes-ce",
 570        .base.cra_priority      = 300,
 571        .base.cra_blocksize     = 1,
 572        .base.cra_ctxsize       = sizeof(struct gcm_aes_ctx),
 573        .base.cra_module        = THIS_MODULE,
 574};
 575
 576static int __init ghash_ce_mod_init(void)
 577{
 578        int ret;
 579
 580        if (!(elf_hwcap & HWCAP_ASIMD))
 581                return -ENODEV;
 582
 583        if (elf_hwcap & HWCAP_PMULL)
 584                pmull_ghash_update = pmull_ghash_update_p64;
 585
 586        else
 587                pmull_ghash_update = pmull_ghash_update_p8;
 588
 589        ret = crypto_register_shash(&ghash_alg);
 590        if (ret)
 591                return ret;
 592
 593        if (elf_hwcap & HWCAP_PMULL) {
 594                ret = crypto_register_aead(&gcm_aes_alg);
 595                if (ret)
 596                        crypto_unregister_shash(&ghash_alg);
 597        }
 598        return ret;
 599}
 600
 601static void __exit ghash_ce_mod_exit(void)
 602{
 603        crypto_unregister_shash(&ghash_alg);
 604        crypto_unregister_aead(&gcm_aes_alg);
 605}
 606
 607static const struct cpu_feature ghash_cpu_feature[] = {
 608        { cpu_feature(PMULL) }, { }
 609};
 610MODULE_DEVICE_TABLE(cpu, ghash_cpu_feature);
 611
 612module_init(ghash_ce_mod_init);
 613module_exit(ghash_ce_mod_exit);
 614