linux/drivers/crypto/vmx/aes_xts.c
<<
>>
Prefs
   1// SPDX-License-Identifier: GPL-2.0-only
   2/*
   3 * AES XTS routines supporting VMX In-core instructions on Power 8
   4 *
   5 * Copyright (C) 2015 International Business Machines Inc.
   6 *
   7 * Author: Leonidas S. Barbosa <leosilva@linux.vnet.ibm.com>
   8 */
   9
  10#include <asm/simd.h>
  11#include <asm/switch_to.h>
  12#include <crypto/aes.h>
  13#include <crypto/internal/simd.h>
  14#include <crypto/internal/skcipher.h>
  15#include <crypto/xts.h>
  16
  17#include "aesp8-ppc.h"
  18
  19struct p8_aes_xts_ctx {
  20        struct crypto_skcipher *fallback;
  21        struct aes_key enc_key;
  22        struct aes_key dec_key;
  23        struct aes_key tweak_key;
  24};
  25
  26static int p8_aes_xts_init(struct crypto_skcipher *tfm)
  27{
  28        struct p8_aes_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
  29        struct crypto_skcipher *fallback;
  30
  31        fallback = crypto_alloc_skcipher("xts(aes)", 0,
  32                                         CRYPTO_ALG_NEED_FALLBACK |
  33                                         CRYPTO_ALG_ASYNC);
  34        if (IS_ERR(fallback)) {
  35                pr_err("Failed to allocate xts(aes) fallback: %ld\n",
  36                       PTR_ERR(fallback));
  37                return PTR_ERR(fallback);
  38        }
  39
  40        crypto_skcipher_set_reqsize(tfm, sizeof(struct skcipher_request) +
  41                                    crypto_skcipher_reqsize(fallback));
  42        ctx->fallback = fallback;
  43        return 0;
  44}
  45
  46static void p8_aes_xts_exit(struct crypto_skcipher *tfm)
  47{
  48        struct p8_aes_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
  49
  50        crypto_free_skcipher(ctx->fallback);
  51}
  52
  53static int p8_aes_xts_setkey(struct crypto_skcipher *tfm, const u8 *key,
  54                             unsigned int keylen)
  55{
  56        struct p8_aes_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
  57        int ret;
  58
  59        ret = xts_verify_key(tfm, key, keylen);
  60        if (ret)
  61                return ret;
  62
  63        preempt_disable();
  64        pagefault_disable();
  65        enable_kernel_vsx();
  66        ret = aes_p8_set_encrypt_key(key + keylen/2, (keylen/2) * 8, &ctx->tweak_key);
  67        ret |= aes_p8_set_encrypt_key(key, (keylen/2) * 8, &ctx->enc_key);
  68        ret |= aes_p8_set_decrypt_key(key, (keylen/2) * 8, &ctx->dec_key);
  69        disable_kernel_vsx();
  70        pagefault_enable();
  71        preempt_enable();
  72
  73        ret |= crypto_skcipher_setkey(ctx->fallback, key, keylen);
  74
  75        return ret ? -EINVAL : 0;
  76}
  77
  78static int p8_aes_xts_crypt(struct skcipher_request *req, int enc)
  79{
  80        struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
  81        const struct p8_aes_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
  82        struct skcipher_walk walk;
  83        unsigned int nbytes;
  84        u8 tweak[AES_BLOCK_SIZE];
  85        int ret;
  86
  87        if (req->cryptlen < AES_BLOCK_SIZE)
  88                return -EINVAL;
  89
  90        if (!crypto_simd_usable() || (req->cryptlen % XTS_BLOCK_SIZE) != 0) {
  91                struct skcipher_request *subreq = skcipher_request_ctx(req);
  92
  93                *subreq = *req;
  94                skcipher_request_set_tfm(subreq, ctx->fallback);
  95                return enc ? crypto_skcipher_encrypt(subreq) :
  96                             crypto_skcipher_decrypt(subreq);
  97        }
  98
  99        ret = skcipher_walk_virt(&walk, req, false);
 100        if (ret)
 101                return ret;
 102
 103        preempt_disable();
 104        pagefault_disable();
 105        enable_kernel_vsx();
 106
 107        aes_p8_encrypt(walk.iv, tweak, &ctx->tweak_key);
 108
 109        disable_kernel_vsx();
 110        pagefault_enable();
 111        preempt_enable();
 112
 113        while ((nbytes = walk.nbytes) != 0) {
 114                preempt_disable();
 115                pagefault_disable();
 116                enable_kernel_vsx();
 117                if (enc)
 118                        aes_p8_xts_encrypt(walk.src.virt.addr,
 119                                           walk.dst.virt.addr,
 120                                           round_down(nbytes, AES_BLOCK_SIZE),
 121                                           &ctx->enc_key, NULL, tweak);
 122                else
 123                        aes_p8_xts_decrypt(walk.src.virt.addr,
 124                                           walk.dst.virt.addr,
 125                                           round_down(nbytes, AES_BLOCK_SIZE),
 126                                           &ctx->dec_key, NULL, tweak);
 127                disable_kernel_vsx();
 128                pagefault_enable();
 129                preempt_enable();
 130
 131                ret = skcipher_walk_done(&walk, nbytes % AES_BLOCK_SIZE);
 132        }
 133        return ret;
 134}
 135
 136static int p8_aes_xts_encrypt(struct skcipher_request *req)
 137{
 138        return p8_aes_xts_crypt(req, 1);
 139}
 140
 141static int p8_aes_xts_decrypt(struct skcipher_request *req)
 142{
 143        return p8_aes_xts_crypt(req, 0);
 144}
 145
 146struct skcipher_alg p8_aes_xts_alg = {
 147        .base.cra_name = "xts(aes)",
 148        .base.cra_driver_name = "p8_aes_xts",
 149        .base.cra_module = THIS_MODULE,
 150        .base.cra_priority = 2000,
 151        .base.cra_flags = CRYPTO_ALG_NEED_FALLBACK,
 152        .base.cra_blocksize = AES_BLOCK_SIZE,
 153        .base.cra_ctxsize = sizeof(struct p8_aes_xts_ctx),
 154        .setkey = p8_aes_xts_setkey,
 155        .encrypt = p8_aes_xts_encrypt,
 156        .decrypt = p8_aes_xts_decrypt,
 157        .init = p8_aes_xts_init,
 158        .exit = p8_aes_xts_exit,
 159        .min_keysize = 2 * AES_MIN_KEY_SIZE,
 160        .max_keysize = 2 * AES_MAX_KEY_SIZE,
 161        .ivsize = AES_BLOCK_SIZE,
 162};
 163