linux/arch/sparc/crypto/camellia_glue.c
<<
>>
Prefs
   1// SPDX-License-Identifier: GPL-2.0-only
   2/* Glue code for CAMELLIA encryption optimized for sparc64 crypto opcodes.
   3 *
   4 * Copyright (C) 2012 David S. Miller <davem@davemloft.net>
   5 */
   6
   7#define pr_fmt(fmt)     KBUILD_MODNAME ": " fmt
   8
   9#include <linux/crypto.h>
  10#include <linux/init.h>
  11#include <linux/module.h>
  12#include <linux/mm.h>
  13#include <linux/types.h>
  14#include <crypto/algapi.h>
  15
  16#include <asm/fpumacro.h>
  17#include <asm/pstate.h>
  18#include <asm/elf.h>
  19
  20#include "opcodes.h"
  21
  22#define CAMELLIA_MIN_KEY_SIZE        16
  23#define CAMELLIA_MAX_KEY_SIZE        32
  24#define CAMELLIA_BLOCK_SIZE          16
  25#define CAMELLIA_TABLE_BYTE_LEN     272
  26
  27struct camellia_sparc64_ctx {
  28        u64 encrypt_key[CAMELLIA_TABLE_BYTE_LEN / sizeof(u64)];
  29        u64 decrypt_key[CAMELLIA_TABLE_BYTE_LEN / sizeof(u64)];
  30        int key_len;
  31};
  32
  33extern void camellia_sparc64_key_expand(const u32 *in_key, u64 *encrypt_key,
  34                                        unsigned int key_len, u64 *decrypt_key);
  35
  36static int camellia_set_key(struct crypto_tfm *tfm, const u8 *_in_key,
  37                            unsigned int key_len)
  38{
  39        struct camellia_sparc64_ctx *ctx = crypto_tfm_ctx(tfm);
  40        const u32 *in_key = (const u32 *) _in_key;
  41        u32 *flags = &tfm->crt_flags;
  42
  43        if (key_len != 16 && key_len != 24 && key_len != 32) {
  44                *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
  45                return -EINVAL;
  46        }
  47
  48        ctx->key_len = key_len;
  49
  50        camellia_sparc64_key_expand(in_key, &ctx->encrypt_key[0],
  51                                    key_len, &ctx->decrypt_key[0]);
  52        return 0;
  53}
  54
  55extern void camellia_sparc64_crypt(const u64 *key, const u32 *input,
  56                                   u32 *output, unsigned int key_len);
  57
  58static void camellia_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
  59{
  60        struct camellia_sparc64_ctx *ctx = crypto_tfm_ctx(tfm);
  61
  62        camellia_sparc64_crypt(&ctx->encrypt_key[0],
  63                               (const u32 *) src,
  64                               (u32 *) dst, ctx->key_len);
  65}
  66
  67static void camellia_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
  68{
  69        struct camellia_sparc64_ctx *ctx = crypto_tfm_ctx(tfm);
  70
  71        camellia_sparc64_crypt(&ctx->decrypt_key[0],
  72                               (const u32 *) src,
  73                               (u32 *) dst, ctx->key_len);
  74}
  75
  76extern void camellia_sparc64_load_keys(const u64 *key, unsigned int key_len);
  77
  78typedef void ecb_crypt_op(const u64 *input, u64 *output, unsigned int len,
  79                          const u64 *key);
  80
  81extern ecb_crypt_op camellia_sparc64_ecb_crypt_3_grand_rounds;
  82extern ecb_crypt_op camellia_sparc64_ecb_crypt_4_grand_rounds;
  83
  84#define CAMELLIA_BLOCK_MASK     (~(CAMELLIA_BLOCK_SIZE - 1))
  85
  86static int __ecb_crypt(struct blkcipher_desc *desc,
  87                       struct scatterlist *dst, struct scatterlist *src,
  88                       unsigned int nbytes, bool encrypt)
  89{
  90        struct camellia_sparc64_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
  91        struct blkcipher_walk walk;
  92        ecb_crypt_op *op;
  93        const u64 *key;
  94        int err;
  95
  96        op = camellia_sparc64_ecb_crypt_3_grand_rounds;
  97        if (ctx->key_len != 16)
  98                op = camellia_sparc64_ecb_crypt_4_grand_rounds;
  99
 100        blkcipher_walk_init(&walk, dst, src, nbytes);
 101        err = blkcipher_walk_virt(desc, &walk);
 102        desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
 103
 104        if (encrypt)
 105                key = &ctx->encrypt_key[0];
 106        else
 107                key = &ctx->decrypt_key[0];
 108        camellia_sparc64_load_keys(key, ctx->key_len);
 109        while ((nbytes = walk.nbytes)) {
 110                unsigned int block_len = nbytes & CAMELLIA_BLOCK_MASK;
 111
 112                if (likely(block_len)) {
 113                        const u64 *src64;
 114                        u64 *dst64;
 115
 116                        src64 = (const u64 *)walk.src.virt.addr;
 117                        dst64 = (u64 *) walk.dst.virt.addr;
 118                        op(src64, dst64, block_len, key);
 119                }
 120                nbytes &= CAMELLIA_BLOCK_SIZE - 1;
 121                err = blkcipher_walk_done(desc, &walk, nbytes);
 122        }
 123        fprs_write(0);
 124        return err;
 125}
 126
 127static int ecb_encrypt(struct blkcipher_desc *desc,
 128                       struct scatterlist *dst, struct scatterlist *src,
 129                       unsigned int nbytes)
 130{
 131        return __ecb_crypt(desc, dst, src, nbytes, true);
 132}
 133
 134static int ecb_decrypt(struct blkcipher_desc *desc,
 135                       struct scatterlist *dst, struct scatterlist *src,
 136                       unsigned int nbytes)
 137{
 138        return __ecb_crypt(desc, dst, src, nbytes, false);
 139}
 140
 141typedef void cbc_crypt_op(const u64 *input, u64 *output, unsigned int len,
 142                          const u64 *key, u64 *iv);
 143
 144extern cbc_crypt_op camellia_sparc64_cbc_encrypt_3_grand_rounds;
 145extern cbc_crypt_op camellia_sparc64_cbc_encrypt_4_grand_rounds;
 146extern cbc_crypt_op camellia_sparc64_cbc_decrypt_3_grand_rounds;
 147extern cbc_crypt_op camellia_sparc64_cbc_decrypt_4_grand_rounds;
 148
 149static int cbc_encrypt(struct blkcipher_desc *desc,
 150                       struct scatterlist *dst, struct scatterlist *src,
 151                       unsigned int nbytes)
 152{
 153        struct camellia_sparc64_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
 154        struct blkcipher_walk walk;
 155        cbc_crypt_op *op;
 156        const u64 *key;
 157        int err;
 158
 159        op = camellia_sparc64_cbc_encrypt_3_grand_rounds;
 160        if (ctx->key_len != 16)
 161                op = camellia_sparc64_cbc_encrypt_4_grand_rounds;
 162
 163        blkcipher_walk_init(&walk, dst, src, nbytes);
 164        err = blkcipher_walk_virt(desc, &walk);
 165        desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
 166
 167        key = &ctx->encrypt_key[0];
 168        camellia_sparc64_load_keys(key, ctx->key_len);
 169        while ((nbytes = walk.nbytes)) {
 170                unsigned int block_len = nbytes & CAMELLIA_BLOCK_MASK;
 171
 172                if (likely(block_len)) {
 173                        const u64 *src64;
 174                        u64 *dst64;
 175
 176                        src64 = (const u64 *)walk.src.virt.addr;
 177                        dst64 = (u64 *) walk.dst.virt.addr;
 178                        op(src64, dst64, block_len, key,
 179                           (u64 *) walk.iv);
 180                }
 181                nbytes &= CAMELLIA_BLOCK_SIZE - 1;
 182                err = blkcipher_walk_done(desc, &walk, nbytes);
 183        }
 184        fprs_write(0);
 185        return err;
 186}
 187
 188static int cbc_decrypt(struct blkcipher_desc *desc,
 189                       struct scatterlist *dst, struct scatterlist *src,
 190                       unsigned int nbytes)
 191{
 192        struct camellia_sparc64_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
 193        struct blkcipher_walk walk;
 194        cbc_crypt_op *op;
 195        const u64 *key;
 196        int err;
 197
 198        op = camellia_sparc64_cbc_decrypt_3_grand_rounds;
 199        if (ctx->key_len != 16)
 200                op = camellia_sparc64_cbc_decrypt_4_grand_rounds;
 201
 202        blkcipher_walk_init(&walk, dst, src, nbytes);
 203        err = blkcipher_walk_virt(desc, &walk);
 204        desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
 205
 206        key = &ctx->decrypt_key[0];
 207        camellia_sparc64_load_keys(key, ctx->key_len);
 208        while ((nbytes = walk.nbytes)) {
 209                unsigned int block_len = nbytes & CAMELLIA_BLOCK_MASK;
 210
 211                if (likely(block_len)) {
 212                        const u64 *src64;
 213                        u64 *dst64;
 214
 215                        src64 = (const u64 *)walk.src.virt.addr;
 216                        dst64 = (u64 *) walk.dst.virt.addr;
 217                        op(src64, dst64, block_len, key,
 218                           (u64 *) walk.iv);
 219                }
 220                nbytes &= CAMELLIA_BLOCK_SIZE - 1;
 221                err = blkcipher_walk_done(desc, &walk, nbytes);
 222        }
 223        fprs_write(0);
 224        return err;
 225}
 226
 227static struct crypto_alg algs[] = { {
 228        .cra_name               = "camellia",
 229        .cra_driver_name        = "camellia-sparc64",
 230        .cra_priority           = SPARC_CR_OPCODE_PRIORITY,
 231        .cra_flags              = CRYPTO_ALG_TYPE_CIPHER,
 232        .cra_blocksize          = CAMELLIA_BLOCK_SIZE,
 233        .cra_ctxsize            = sizeof(struct camellia_sparc64_ctx),
 234        .cra_alignmask          = 3,
 235        .cra_module             = THIS_MODULE,
 236        .cra_u  = {
 237                .cipher = {
 238                        .cia_min_keysize        = CAMELLIA_MIN_KEY_SIZE,
 239                        .cia_max_keysize        = CAMELLIA_MAX_KEY_SIZE,
 240                        .cia_setkey             = camellia_set_key,
 241                        .cia_encrypt            = camellia_encrypt,
 242                        .cia_decrypt            = camellia_decrypt
 243                }
 244        }
 245}, {
 246        .cra_name               = "ecb(camellia)",
 247        .cra_driver_name        = "ecb-camellia-sparc64",
 248        .cra_priority           = SPARC_CR_OPCODE_PRIORITY,
 249        .cra_flags              = CRYPTO_ALG_TYPE_BLKCIPHER,
 250        .cra_blocksize          = CAMELLIA_BLOCK_SIZE,
 251        .cra_ctxsize            = sizeof(struct camellia_sparc64_ctx),
 252        .cra_alignmask          = 7,
 253        .cra_type               = &crypto_blkcipher_type,
 254        .cra_module             = THIS_MODULE,
 255        .cra_u = {
 256                .blkcipher = {
 257                        .min_keysize    = CAMELLIA_MIN_KEY_SIZE,
 258                        .max_keysize    = CAMELLIA_MAX_KEY_SIZE,
 259                        .setkey         = camellia_set_key,
 260                        .encrypt        = ecb_encrypt,
 261                        .decrypt        = ecb_decrypt,
 262                },
 263        },
 264}, {
 265        .cra_name               = "cbc(camellia)",
 266        .cra_driver_name        = "cbc-camellia-sparc64",
 267        .cra_priority           = SPARC_CR_OPCODE_PRIORITY,
 268        .cra_flags              = CRYPTO_ALG_TYPE_BLKCIPHER,
 269        .cra_blocksize          = CAMELLIA_BLOCK_SIZE,
 270        .cra_ctxsize            = sizeof(struct camellia_sparc64_ctx),
 271        .cra_alignmask          = 7,
 272        .cra_type               = &crypto_blkcipher_type,
 273        .cra_module             = THIS_MODULE,
 274        .cra_u = {
 275                .blkcipher = {
 276                        .min_keysize    = CAMELLIA_MIN_KEY_SIZE,
 277                        .max_keysize    = CAMELLIA_MAX_KEY_SIZE,
 278                        .ivsize         = CAMELLIA_BLOCK_SIZE,
 279                        .setkey         = camellia_set_key,
 280                        .encrypt        = cbc_encrypt,
 281                        .decrypt        = cbc_decrypt,
 282                },
 283        },
 284}
 285};
 286
 287static bool __init sparc64_has_camellia_opcode(void)
 288{
 289        unsigned long cfr;
 290
 291        if (!(sparc64_elf_hwcap & HWCAP_SPARC_CRYPTO))
 292                return false;
 293
 294        __asm__ __volatile__("rd %%asr26, %0" : "=r" (cfr));
 295        if (!(cfr & CFR_CAMELLIA))
 296                return false;
 297
 298        return true;
 299}
 300
 301static int __init camellia_sparc64_mod_init(void)
 302{
 303        if (sparc64_has_camellia_opcode()) {
 304                pr_info("Using sparc64 camellia opcodes optimized CAMELLIA implementation\n");
 305                return crypto_register_algs(algs, ARRAY_SIZE(algs));
 306        }
 307        pr_info("sparc64 camellia opcodes not available.\n");
 308        return -ENODEV;
 309}
 310
 311static void __exit camellia_sparc64_mod_fini(void)
 312{
 313        crypto_unregister_algs(algs, ARRAY_SIZE(algs));
 314}
 315
 316module_init(camellia_sparc64_mod_init);
 317module_exit(camellia_sparc64_mod_fini);
 318
 319MODULE_LICENSE("GPL");
 320MODULE_DESCRIPTION("Camellia Cipher Algorithm, sparc64 camellia opcode accelerated");
 321
 322MODULE_ALIAS_CRYPTO("camellia");
 323
 324#include "crop_devid.c"
 325