linux/drivers/crypto/vmx/aes_ctr.c
<<
>>
Prefs
   1// SPDX-License-Identifier: GPL-2.0-only
   2/*
   3 * AES CTR routines supporting VMX instructions on the Power 8
   4 *
   5 * Copyright (C) 2015 International Business Machines Inc.
   6 *
   7 * Author: Marcelo Henrique Cerri <mhcerri@br.ibm.com>
   8 */
   9
  10#include <asm/simd.h>
  11#include <asm/switch_to.h>
  12#include <crypto/aes.h>
  13#include <crypto/internal/simd.h>
  14#include <crypto/internal/skcipher.h>
  15
  16#include "aesp8-ppc.h"
  17
  18struct p8_aes_ctr_ctx {
  19        struct crypto_skcipher *fallback;
  20        struct aes_key enc_key;
  21};
  22
  23static int p8_aes_ctr_init(struct crypto_skcipher *tfm)
  24{
  25        struct p8_aes_ctr_ctx *ctx = crypto_skcipher_ctx(tfm);
  26        struct crypto_skcipher *fallback;
  27
  28        fallback = crypto_alloc_skcipher("ctr(aes)", 0,
  29                                         CRYPTO_ALG_NEED_FALLBACK |
  30                                         CRYPTO_ALG_ASYNC);
  31        if (IS_ERR(fallback)) {
  32                pr_err("Failed to allocate ctr(aes) fallback: %ld\n",
  33                       PTR_ERR(fallback));
  34                return PTR_ERR(fallback);
  35        }
  36
  37        crypto_skcipher_set_reqsize(tfm, sizeof(struct skcipher_request) +
  38                                    crypto_skcipher_reqsize(fallback));
  39        ctx->fallback = fallback;
  40        return 0;
  41}
  42
  43static void p8_aes_ctr_exit(struct crypto_skcipher *tfm)
  44{
  45        struct p8_aes_ctr_ctx *ctx = crypto_skcipher_ctx(tfm);
  46
  47        crypto_free_skcipher(ctx->fallback);
  48}
  49
  50static int p8_aes_ctr_setkey(struct crypto_skcipher *tfm, const u8 *key,
  51                             unsigned int keylen)
  52{
  53        struct p8_aes_ctr_ctx *ctx = crypto_skcipher_ctx(tfm);
  54        int ret;
  55
  56        preempt_disable();
  57        pagefault_disable();
  58        enable_kernel_vsx();
  59        ret = aes_p8_set_encrypt_key(key, keylen * 8, &ctx->enc_key);
  60        disable_kernel_vsx();
  61        pagefault_enable();
  62        preempt_enable();
  63
  64        ret |= crypto_skcipher_setkey(ctx->fallback, key, keylen);
  65
  66        return ret ? -EINVAL : 0;
  67}
  68
  69static void p8_aes_ctr_final(const struct p8_aes_ctr_ctx *ctx,
  70                             struct skcipher_walk *walk)
  71{
  72        u8 *ctrblk = walk->iv;
  73        u8 keystream[AES_BLOCK_SIZE];
  74        u8 *src = walk->src.virt.addr;
  75        u8 *dst = walk->dst.virt.addr;
  76        unsigned int nbytes = walk->nbytes;
  77
  78        preempt_disable();
  79        pagefault_disable();
  80        enable_kernel_vsx();
  81        aes_p8_encrypt(ctrblk, keystream, &ctx->enc_key);
  82        disable_kernel_vsx();
  83        pagefault_enable();
  84        preempt_enable();
  85
  86        crypto_xor_cpy(dst, keystream, src, nbytes);
  87        crypto_inc(ctrblk, AES_BLOCK_SIZE);
  88}
  89
  90static int p8_aes_ctr_crypt(struct skcipher_request *req)
  91{
  92        struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
  93        const struct p8_aes_ctr_ctx *ctx = crypto_skcipher_ctx(tfm);
  94        struct skcipher_walk walk;
  95        unsigned int nbytes;
  96        int ret;
  97
  98        if (!crypto_simd_usable()) {
  99                struct skcipher_request *subreq = skcipher_request_ctx(req);
 100
 101                *subreq = *req;
 102                skcipher_request_set_tfm(subreq, ctx->fallback);
 103                return crypto_skcipher_encrypt(subreq);
 104        }
 105
 106        ret = skcipher_walk_virt(&walk, req, false);
 107        while ((nbytes = walk.nbytes) >= AES_BLOCK_SIZE) {
 108                preempt_disable();
 109                pagefault_disable();
 110                enable_kernel_vsx();
 111                aes_p8_ctr32_encrypt_blocks(walk.src.virt.addr,
 112                                            walk.dst.virt.addr,
 113                                            nbytes / AES_BLOCK_SIZE,
 114                                            &ctx->enc_key, walk.iv);
 115                disable_kernel_vsx();
 116                pagefault_enable();
 117                preempt_enable();
 118
 119                do {
 120                        crypto_inc(walk.iv, AES_BLOCK_SIZE);
 121                } while ((nbytes -= AES_BLOCK_SIZE) >= AES_BLOCK_SIZE);
 122
 123                ret = skcipher_walk_done(&walk, nbytes);
 124        }
 125        if (nbytes) {
 126                p8_aes_ctr_final(ctx, &walk);
 127                ret = skcipher_walk_done(&walk, 0);
 128        }
 129        return ret;
 130}
 131
 132struct skcipher_alg p8_aes_ctr_alg = {
 133        .base.cra_name = "ctr(aes)",
 134        .base.cra_driver_name = "p8_aes_ctr",
 135        .base.cra_module = THIS_MODULE,
 136        .base.cra_priority = 2000,
 137        .base.cra_flags = CRYPTO_ALG_NEED_FALLBACK,
 138        .base.cra_blocksize = 1,
 139        .base.cra_ctxsize = sizeof(struct p8_aes_ctr_ctx),
 140        .setkey = p8_aes_ctr_setkey,
 141        .encrypt = p8_aes_ctr_crypt,
 142        .decrypt = p8_aes_ctr_crypt,
 143        .init = p8_aes_ctr_init,
 144        .exit = p8_aes_ctr_exit,
 145        .min_keysize = AES_MIN_KEY_SIZE,
 146        .max_keysize = AES_MAX_KEY_SIZE,
 147        .ivsize = AES_BLOCK_SIZE,
 148        .chunksize = AES_BLOCK_SIZE,
 149};
 150