linux/arch/x86/crypto/ghash-clmulni-intel_glue.c
<<
>>
Prefs
   1// SPDX-License-Identifier: GPL-2.0-only
   2/*
   3 * Accelerated GHASH implementation with Intel PCLMULQDQ-NI
   4 * instructions. This file contains glue code.
   5 *
   6 * Copyright (c) 2009 Intel Corp.
   7 *   Author: Huang Ying <ying.huang@intel.com>
   8 */
   9
  10#include <linux/err.h>
  11#include <linux/module.h>
  12#include <linux/init.h>
  13#include <linux/kernel.h>
  14#include <linux/crypto.h>
  15#include <crypto/algapi.h>
  16#include <crypto/cryptd.h>
  17#include <crypto/gf128mul.h>
  18#include <crypto/internal/hash.h>
  19#include <crypto/internal/simd.h>
  20#include <asm/cpu_device_id.h>
  21#include <asm/simd.h>
  22
  23#define GHASH_BLOCK_SIZE        16
  24#define GHASH_DIGEST_SIZE       16
  25
  26void clmul_ghash_mul(char *dst, const u128 *shash);
  27
  28void clmul_ghash_update(char *dst, const char *src, unsigned int srclen,
  29                        const u128 *shash);
  30
  31struct ghash_async_ctx {
  32        struct cryptd_ahash *cryptd_tfm;
  33};
  34
  35struct ghash_ctx {
  36        u128 shash;
  37};
  38
  39struct ghash_desc_ctx {
  40        u8 buffer[GHASH_BLOCK_SIZE];
  41        u32 bytes;
  42};
  43
  44static int ghash_init(struct shash_desc *desc)
  45{
  46        struct ghash_desc_ctx *dctx = shash_desc_ctx(desc);
  47
  48        memset(dctx, 0, sizeof(*dctx));
  49
  50        return 0;
  51}
  52
  53static int ghash_setkey(struct crypto_shash *tfm,
  54                        const u8 *key, unsigned int keylen)
  55{
  56        struct ghash_ctx *ctx = crypto_shash_ctx(tfm);
  57        be128 *x = (be128 *)key;
  58        u64 a, b;
  59
  60        if (keylen != GHASH_BLOCK_SIZE) {
  61                crypto_shash_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
  62                return -EINVAL;
  63        }
  64
  65        /* perform multiplication by 'x' in GF(2^128) */
  66        a = be64_to_cpu(x->a);
  67        b = be64_to_cpu(x->b);
  68
  69        ctx->shash.a = (b << 1) | (a >> 63);
  70        ctx->shash.b = (a << 1) | (b >> 63);
  71
  72        if (a >> 63)
  73                ctx->shash.b ^= ((u64)0xc2) << 56;
  74
  75        return 0;
  76}
  77
  78static int ghash_update(struct shash_desc *desc,
  79                         const u8 *src, unsigned int srclen)
  80{
  81        struct ghash_desc_ctx *dctx = shash_desc_ctx(desc);
  82        struct ghash_ctx *ctx = crypto_shash_ctx(desc->tfm);
  83        u8 *dst = dctx->buffer;
  84
  85        kernel_fpu_begin();
  86        if (dctx->bytes) {
  87                int n = min(srclen, dctx->bytes);
  88                u8 *pos = dst + (GHASH_BLOCK_SIZE - dctx->bytes);
  89
  90                dctx->bytes -= n;
  91                srclen -= n;
  92
  93                while (n--)
  94                        *pos++ ^= *src++;
  95
  96                if (!dctx->bytes)
  97                        clmul_ghash_mul(dst, &ctx->shash);
  98        }
  99
 100        clmul_ghash_update(dst, src, srclen, &ctx->shash);
 101        kernel_fpu_end();
 102
 103        if (srclen & 0xf) {
 104                src += srclen - (srclen & 0xf);
 105                srclen &= 0xf;
 106                dctx->bytes = GHASH_BLOCK_SIZE - srclen;
 107                while (srclen--)
 108                        *dst++ ^= *src++;
 109        }
 110
 111        return 0;
 112}
 113
 114static void ghash_flush(struct ghash_ctx *ctx, struct ghash_desc_ctx *dctx)
 115{
 116        u8 *dst = dctx->buffer;
 117
 118        if (dctx->bytes) {
 119                u8 *tmp = dst + (GHASH_BLOCK_SIZE - dctx->bytes);
 120
 121                while (dctx->bytes--)
 122                        *tmp++ ^= 0;
 123
 124                kernel_fpu_begin();
 125                clmul_ghash_mul(dst, &ctx->shash);
 126                kernel_fpu_end();
 127        }
 128
 129        dctx->bytes = 0;
 130}
 131
 132static int ghash_final(struct shash_desc *desc, u8 *dst)
 133{
 134        struct ghash_desc_ctx *dctx = shash_desc_ctx(desc);
 135        struct ghash_ctx *ctx = crypto_shash_ctx(desc->tfm);
 136        u8 *buf = dctx->buffer;
 137
 138        ghash_flush(ctx, dctx);
 139        memcpy(dst, buf, GHASH_BLOCK_SIZE);
 140
 141        return 0;
 142}
 143
 144static struct shash_alg ghash_alg = {
 145        .digestsize     = GHASH_DIGEST_SIZE,
 146        .init           = ghash_init,
 147        .update         = ghash_update,
 148        .final          = ghash_final,
 149        .setkey         = ghash_setkey,
 150        .descsize       = sizeof(struct ghash_desc_ctx),
 151        .base           = {
 152                .cra_name               = "__ghash",
 153                .cra_driver_name        = "__ghash-pclmulqdqni",
 154                .cra_priority           = 0,
 155                .cra_flags              = CRYPTO_ALG_INTERNAL,
 156                .cra_blocksize          = GHASH_BLOCK_SIZE,
 157                .cra_ctxsize            = sizeof(struct ghash_ctx),
 158                .cra_module             = THIS_MODULE,
 159        },
 160};
 161
 162static int ghash_async_init(struct ahash_request *req)
 163{
 164        struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
 165        struct ghash_async_ctx *ctx = crypto_ahash_ctx(tfm);
 166        struct ahash_request *cryptd_req = ahash_request_ctx(req);
 167        struct cryptd_ahash *cryptd_tfm = ctx->cryptd_tfm;
 168        struct shash_desc *desc = cryptd_shash_desc(cryptd_req);
 169        struct crypto_shash *child = cryptd_ahash_child(cryptd_tfm);
 170
 171        desc->tfm = child;
 172        return crypto_shash_init(desc);
 173}
 174
 175static int ghash_async_update(struct ahash_request *req)
 176{
 177        struct ahash_request *cryptd_req = ahash_request_ctx(req);
 178        struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
 179        struct ghash_async_ctx *ctx = crypto_ahash_ctx(tfm);
 180        struct cryptd_ahash *cryptd_tfm = ctx->cryptd_tfm;
 181
 182        if (!crypto_simd_usable() ||
 183            (in_atomic() && cryptd_ahash_queued(cryptd_tfm))) {
 184                memcpy(cryptd_req, req, sizeof(*req));
 185                ahash_request_set_tfm(cryptd_req, &cryptd_tfm->base);
 186                return crypto_ahash_update(cryptd_req);
 187        } else {
 188                struct shash_desc *desc = cryptd_shash_desc(cryptd_req);
 189                return shash_ahash_update(req, desc);
 190        }
 191}
 192
 193static int ghash_async_final(struct ahash_request *req)
 194{
 195        struct ahash_request *cryptd_req = ahash_request_ctx(req);
 196        struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
 197        struct ghash_async_ctx *ctx = crypto_ahash_ctx(tfm);
 198        struct cryptd_ahash *cryptd_tfm = ctx->cryptd_tfm;
 199
 200        if (!crypto_simd_usable() ||
 201            (in_atomic() && cryptd_ahash_queued(cryptd_tfm))) {
 202                memcpy(cryptd_req, req, sizeof(*req));
 203                ahash_request_set_tfm(cryptd_req, &cryptd_tfm->base);
 204                return crypto_ahash_final(cryptd_req);
 205        } else {
 206                struct shash_desc *desc = cryptd_shash_desc(cryptd_req);
 207                return crypto_shash_final(desc, req->result);
 208        }
 209}
 210
 211static int ghash_async_import(struct ahash_request *req, const void *in)
 212{
 213        struct ahash_request *cryptd_req = ahash_request_ctx(req);
 214        struct shash_desc *desc = cryptd_shash_desc(cryptd_req);
 215        struct ghash_desc_ctx *dctx = shash_desc_ctx(desc);
 216
 217        ghash_async_init(req);
 218        memcpy(dctx, in, sizeof(*dctx));
 219        return 0;
 220
 221}
 222
 223static int ghash_async_export(struct ahash_request *req, void *out)
 224{
 225        struct ahash_request *cryptd_req = ahash_request_ctx(req);
 226        struct shash_desc *desc = cryptd_shash_desc(cryptd_req);
 227        struct ghash_desc_ctx *dctx = shash_desc_ctx(desc);
 228
 229        memcpy(out, dctx, sizeof(*dctx));
 230        return 0;
 231
 232}
 233
 234static int ghash_async_digest(struct ahash_request *req)
 235{
 236        struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
 237        struct ghash_async_ctx *ctx = crypto_ahash_ctx(tfm);
 238        struct ahash_request *cryptd_req = ahash_request_ctx(req);
 239        struct cryptd_ahash *cryptd_tfm = ctx->cryptd_tfm;
 240
 241        if (!crypto_simd_usable() ||
 242            (in_atomic() && cryptd_ahash_queued(cryptd_tfm))) {
 243                memcpy(cryptd_req, req, sizeof(*req));
 244                ahash_request_set_tfm(cryptd_req, &cryptd_tfm->base);
 245                return crypto_ahash_digest(cryptd_req);
 246        } else {
 247                struct shash_desc *desc = cryptd_shash_desc(cryptd_req);
 248                struct crypto_shash *child = cryptd_ahash_child(cryptd_tfm);
 249
 250                desc->tfm = child;
 251                return shash_ahash_digest(req, desc);
 252        }
 253}
 254
 255static int ghash_async_setkey(struct crypto_ahash *tfm, const u8 *key,
 256                              unsigned int keylen)
 257{
 258        struct ghash_async_ctx *ctx = crypto_ahash_ctx(tfm);
 259        struct crypto_ahash *child = &ctx->cryptd_tfm->base;
 260        int err;
 261
 262        crypto_ahash_clear_flags(child, CRYPTO_TFM_REQ_MASK);
 263        crypto_ahash_set_flags(child, crypto_ahash_get_flags(tfm)
 264                               & CRYPTO_TFM_REQ_MASK);
 265        err = crypto_ahash_setkey(child, key, keylen);
 266        crypto_ahash_set_flags(tfm, crypto_ahash_get_flags(child)
 267                               & CRYPTO_TFM_RES_MASK);
 268
 269        return err;
 270}
 271
 272static int ghash_async_init_tfm(struct crypto_tfm *tfm)
 273{
 274        struct cryptd_ahash *cryptd_tfm;
 275        struct ghash_async_ctx *ctx = crypto_tfm_ctx(tfm);
 276
 277        cryptd_tfm = cryptd_alloc_ahash("__ghash-pclmulqdqni",
 278                                        CRYPTO_ALG_INTERNAL,
 279                                        CRYPTO_ALG_INTERNAL);
 280        if (IS_ERR(cryptd_tfm))
 281                return PTR_ERR(cryptd_tfm);
 282        ctx->cryptd_tfm = cryptd_tfm;
 283        crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm),
 284                                 sizeof(struct ahash_request) +
 285                                 crypto_ahash_reqsize(&cryptd_tfm->base));
 286
 287        return 0;
 288}
 289
 290static void ghash_async_exit_tfm(struct crypto_tfm *tfm)
 291{
 292        struct ghash_async_ctx *ctx = crypto_tfm_ctx(tfm);
 293
 294        cryptd_free_ahash(ctx->cryptd_tfm);
 295}
 296
 297static struct ahash_alg ghash_async_alg = {
 298        .init           = ghash_async_init,
 299        .update         = ghash_async_update,
 300        .final          = ghash_async_final,
 301        .setkey         = ghash_async_setkey,
 302        .digest         = ghash_async_digest,
 303        .export         = ghash_async_export,
 304        .import         = ghash_async_import,
 305        .halg = {
 306                .digestsize     = GHASH_DIGEST_SIZE,
 307                .statesize = sizeof(struct ghash_desc_ctx),
 308                .base = {
 309                        .cra_name               = "ghash",
 310                        .cra_driver_name        = "ghash-clmulni",
 311                        .cra_priority           = 400,
 312                        .cra_ctxsize            = sizeof(struct ghash_async_ctx),
 313                        .cra_flags              = CRYPTO_ALG_ASYNC,
 314                        .cra_blocksize          = GHASH_BLOCK_SIZE,
 315                        .cra_module             = THIS_MODULE,
 316                        .cra_init               = ghash_async_init_tfm,
 317                        .cra_exit               = ghash_async_exit_tfm,
 318                },
 319        },
 320};
 321
 322static const struct x86_cpu_id pcmul_cpu_id[] = {
 323        X86_FEATURE_MATCH(X86_FEATURE_PCLMULQDQ), /* Pickle-Mickle-Duck */
 324        {}
 325};
 326MODULE_DEVICE_TABLE(x86cpu, pcmul_cpu_id);
 327
 328static int __init ghash_pclmulqdqni_mod_init(void)
 329{
 330        int err;
 331
 332        if (!x86_match_cpu(pcmul_cpu_id))
 333                return -ENODEV;
 334
 335        err = crypto_register_shash(&ghash_alg);
 336        if (err)
 337                goto err_out;
 338        err = crypto_register_ahash(&ghash_async_alg);
 339        if (err)
 340                goto err_shash;
 341
 342        return 0;
 343
 344err_shash:
 345        crypto_unregister_shash(&ghash_alg);
 346err_out:
 347        return err;
 348}
 349
 350static void __exit ghash_pclmulqdqni_mod_exit(void)
 351{
 352        crypto_unregister_ahash(&ghash_async_alg);
 353        crypto_unregister_shash(&ghash_alg);
 354}
 355
 356module_init(ghash_pclmulqdqni_mod_init);
 357module_exit(ghash_pclmulqdqni_mod_exit);
 358
 359MODULE_LICENSE("GPL");
 360MODULE_DESCRIPTION("GHASH Message Digest Algorithm, "
 361                   "accelerated by PCLMULQDQ-NI");
 362MODULE_ALIAS_CRYPTO("ghash");
 363