linux/arch/sparc/crypto/camellia_glue.c
<<
>>
Prefs
   1/* Glue code for CAMELLIA encryption optimized for sparc64 crypto opcodes.
   2 *
   3 * Copyright (C) 2012 David S. Miller <davem@davemloft.net>
   4 */
   5
   6#define pr_fmt(fmt)     KBUILD_MODNAME ": " fmt
   7
   8#include <linux/crypto.h>
   9#include <linux/init.h>
  10#include <linux/module.h>
  11#include <linux/mm.h>
  12#include <linux/types.h>
  13#include <crypto/algapi.h>
  14
  15#include <asm/fpumacro.h>
  16#include <asm/pstate.h>
  17#include <asm/elf.h>
  18
  19#include "opcodes.h"
  20
  21#define CAMELLIA_MIN_KEY_SIZE        16
  22#define CAMELLIA_MAX_KEY_SIZE        32
  23#define CAMELLIA_BLOCK_SIZE          16
  24#define CAMELLIA_TABLE_BYTE_LEN     272
  25
  26struct camellia_sparc64_ctx {
  27        u64 encrypt_key[CAMELLIA_TABLE_BYTE_LEN / sizeof(u64)];
  28        u64 decrypt_key[CAMELLIA_TABLE_BYTE_LEN / sizeof(u64)];
  29        int key_len;
  30};
  31
  32extern void camellia_sparc64_key_expand(const u32 *in_key, u64 *encrypt_key,
  33                                        unsigned int key_len, u64 *decrypt_key);
  34
  35static int camellia_set_key(struct crypto_tfm *tfm, const u8 *_in_key,
  36                            unsigned int key_len)
  37{
  38        struct camellia_sparc64_ctx *ctx = crypto_tfm_ctx(tfm);
  39        const u32 *in_key = (const u32 *) _in_key;
  40        u32 *flags = &tfm->crt_flags;
  41
  42        if (key_len != 16 && key_len != 24 && key_len != 32) {
  43                *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
  44                return -EINVAL;
  45        }
  46
  47        ctx->key_len = key_len;
  48
  49        camellia_sparc64_key_expand(in_key, &ctx->encrypt_key[0],
  50                                    key_len, &ctx->decrypt_key[0]);
  51        return 0;
  52}
  53
  54extern void camellia_sparc64_crypt(const u64 *key, const u32 *input,
  55                                   u32 *output, unsigned int key_len);
  56
  57static void camellia_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
  58{
  59        struct camellia_sparc64_ctx *ctx = crypto_tfm_ctx(tfm);
  60
  61        camellia_sparc64_crypt(&ctx->encrypt_key[0],
  62                               (const u32 *) src,
  63                               (u32 *) dst, ctx->key_len);
  64}
  65
  66static void camellia_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
  67{
  68        struct camellia_sparc64_ctx *ctx = crypto_tfm_ctx(tfm);
  69
  70        camellia_sparc64_crypt(&ctx->decrypt_key[0],
  71                               (const u32 *) src,
  72                               (u32 *) dst, ctx->key_len);
  73}
  74
  75extern void camellia_sparc64_load_keys(const u64 *key, unsigned int key_len);
  76
  77typedef void ecb_crypt_op(const u64 *input, u64 *output, unsigned int len,
  78                          const u64 *key);
  79
  80extern ecb_crypt_op camellia_sparc64_ecb_crypt_3_grand_rounds;
  81extern ecb_crypt_op camellia_sparc64_ecb_crypt_4_grand_rounds;
  82
  83#define CAMELLIA_BLOCK_MASK     (~(CAMELLIA_BLOCK_SIZE - 1))
  84
  85static int __ecb_crypt(struct blkcipher_desc *desc,
  86                       struct scatterlist *dst, struct scatterlist *src,
  87                       unsigned int nbytes, bool encrypt)
  88{
  89        struct camellia_sparc64_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
  90        struct blkcipher_walk walk;
  91        ecb_crypt_op *op;
  92        const u64 *key;
  93        int err;
  94
  95        op = camellia_sparc64_ecb_crypt_3_grand_rounds;
  96        if (ctx->key_len != 16)
  97                op = camellia_sparc64_ecb_crypt_4_grand_rounds;
  98
  99        blkcipher_walk_init(&walk, dst, src, nbytes);
 100        err = blkcipher_walk_virt(desc, &walk);
 101        desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
 102
 103        if (encrypt)
 104                key = &ctx->encrypt_key[0];
 105        else
 106                key = &ctx->decrypt_key[0];
 107        camellia_sparc64_load_keys(key, ctx->key_len);
 108        while ((nbytes = walk.nbytes)) {
 109                unsigned int block_len = nbytes & CAMELLIA_BLOCK_MASK;
 110
 111                if (likely(block_len)) {
 112                        const u64 *src64;
 113                        u64 *dst64;
 114
 115                        src64 = (const u64 *)walk.src.virt.addr;
 116                        dst64 = (u64 *) walk.dst.virt.addr;
 117                        op(src64, dst64, block_len, key);
 118                }
 119                nbytes &= CAMELLIA_BLOCK_SIZE - 1;
 120                err = blkcipher_walk_done(desc, &walk, nbytes);
 121        }
 122        fprs_write(0);
 123        return err;
 124}
 125
 126static int ecb_encrypt(struct blkcipher_desc *desc,
 127                       struct scatterlist *dst, struct scatterlist *src,
 128                       unsigned int nbytes)
 129{
 130        return __ecb_crypt(desc, dst, src, nbytes, true);
 131}
 132
 133static int ecb_decrypt(struct blkcipher_desc *desc,
 134                       struct scatterlist *dst, struct scatterlist *src,
 135                       unsigned int nbytes)
 136{
 137        return __ecb_crypt(desc, dst, src, nbytes, false);
 138}
 139
 140typedef void cbc_crypt_op(const u64 *input, u64 *output, unsigned int len,
 141                          const u64 *key, u64 *iv);
 142
 143extern cbc_crypt_op camellia_sparc64_cbc_encrypt_3_grand_rounds;
 144extern cbc_crypt_op camellia_sparc64_cbc_encrypt_4_grand_rounds;
 145extern cbc_crypt_op camellia_sparc64_cbc_decrypt_3_grand_rounds;
 146extern cbc_crypt_op camellia_sparc64_cbc_decrypt_4_grand_rounds;
 147
 148static int cbc_encrypt(struct blkcipher_desc *desc,
 149                       struct scatterlist *dst, struct scatterlist *src,
 150                       unsigned int nbytes)
 151{
 152        struct camellia_sparc64_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
 153        struct blkcipher_walk walk;
 154        cbc_crypt_op *op;
 155        const u64 *key;
 156        int err;
 157
 158        op = camellia_sparc64_cbc_encrypt_3_grand_rounds;
 159        if (ctx->key_len != 16)
 160                op = camellia_sparc64_cbc_encrypt_4_grand_rounds;
 161
 162        blkcipher_walk_init(&walk, dst, src, nbytes);
 163        err = blkcipher_walk_virt(desc, &walk);
 164        desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
 165
 166        key = &ctx->encrypt_key[0];
 167        camellia_sparc64_load_keys(key, ctx->key_len);
 168        while ((nbytes = walk.nbytes)) {
 169                unsigned int block_len = nbytes & CAMELLIA_BLOCK_MASK;
 170
 171                if (likely(block_len)) {
 172                        const u64 *src64;
 173                        u64 *dst64;
 174
 175                        src64 = (const u64 *)walk.src.virt.addr;
 176                        dst64 = (u64 *) walk.dst.virt.addr;
 177                        op(src64, dst64, block_len, key,
 178                           (u64 *) walk.iv);
 179                }
 180                nbytes &= CAMELLIA_BLOCK_SIZE - 1;
 181                err = blkcipher_walk_done(desc, &walk, nbytes);
 182        }
 183        fprs_write(0);
 184        return err;
 185}
 186
 187static int cbc_decrypt(struct blkcipher_desc *desc,
 188                       struct scatterlist *dst, struct scatterlist *src,
 189                       unsigned int nbytes)
 190{
 191        struct camellia_sparc64_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
 192        struct blkcipher_walk walk;
 193        cbc_crypt_op *op;
 194        const u64 *key;
 195        int err;
 196
 197        op = camellia_sparc64_cbc_decrypt_3_grand_rounds;
 198        if (ctx->key_len != 16)
 199                op = camellia_sparc64_cbc_decrypt_4_grand_rounds;
 200
 201        blkcipher_walk_init(&walk, dst, src, nbytes);
 202        err = blkcipher_walk_virt(desc, &walk);
 203        desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
 204
 205        key = &ctx->decrypt_key[0];
 206        camellia_sparc64_load_keys(key, ctx->key_len);
 207        while ((nbytes = walk.nbytes)) {
 208                unsigned int block_len = nbytes & CAMELLIA_BLOCK_MASK;
 209
 210                if (likely(block_len)) {
 211                        const u64 *src64;
 212                        u64 *dst64;
 213
 214                        src64 = (const u64 *)walk.src.virt.addr;
 215                        dst64 = (u64 *) walk.dst.virt.addr;
 216                        op(src64, dst64, block_len, key,
 217                           (u64 *) walk.iv);
 218                }
 219                nbytes &= CAMELLIA_BLOCK_SIZE - 1;
 220                err = blkcipher_walk_done(desc, &walk, nbytes);
 221        }
 222        fprs_write(0);
 223        return err;
 224}
 225
 226static struct crypto_alg algs[] = { {
 227        .cra_name               = "camellia",
 228        .cra_driver_name        = "camellia-sparc64",
 229        .cra_priority           = SPARC_CR_OPCODE_PRIORITY,
 230        .cra_flags              = CRYPTO_ALG_TYPE_CIPHER,
 231        .cra_blocksize          = CAMELLIA_BLOCK_SIZE,
 232        .cra_ctxsize            = sizeof(struct camellia_sparc64_ctx),
 233        .cra_alignmask          = 3,
 234        .cra_module             = THIS_MODULE,
 235        .cra_u  = {
 236                .cipher = {
 237                        .cia_min_keysize        = CAMELLIA_MIN_KEY_SIZE,
 238                        .cia_max_keysize        = CAMELLIA_MAX_KEY_SIZE,
 239                        .cia_setkey             = camellia_set_key,
 240                        .cia_encrypt            = camellia_encrypt,
 241                        .cia_decrypt            = camellia_decrypt
 242                }
 243        }
 244}, {
 245        .cra_name               = "ecb(camellia)",
 246        .cra_driver_name        = "ecb-camellia-sparc64",
 247        .cra_priority           = SPARC_CR_OPCODE_PRIORITY,
 248        .cra_flags              = CRYPTO_ALG_TYPE_BLKCIPHER,
 249        .cra_blocksize          = CAMELLIA_BLOCK_SIZE,
 250        .cra_ctxsize            = sizeof(struct camellia_sparc64_ctx),
 251        .cra_alignmask          = 7,
 252        .cra_type               = &crypto_blkcipher_type,
 253        .cra_module             = THIS_MODULE,
 254        .cra_u = {
 255                .blkcipher = {
 256                        .min_keysize    = CAMELLIA_MIN_KEY_SIZE,
 257                        .max_keysize    = CAMELLIA_MAX_KEY_SIZE,
 258                        .setkey         = camellia_set_key,
 259                        .encrypt        = ecb_encrypt,
 260                        .decrypt        = ecb_decrypt,
 261                },
 262        },
 263}, {
 264        .cra_name               = "cbc(camellia)",
 265        .cra_driver_name        = "cbc-camellia-sparc64",
 266        .cra_priority           = SPARC_CR_OPCODE_PRIORITY,
 267        .cra_flags              = CRYPTO_ALG_TYPE_BLKCIPHER,
 268        .cra_blocksize          = CAMELLIA_BLOCK_SIZE,
 269        .cra_ctxsize            = sizeof(struct camellia_sparc64_ctx),
 270        .cra_alignmask          = 7,
 271        .cra_type               = &crypto_blkcipher_type,
 272        .cra_module             = THIS_MODULE,
 273        .cra_u = {
 274                .blkcipher = {
 275                        .min_keysize    = CAMELLIA_MIN_KEY_SIZE,
 276                        .max_keysize    = CAMELLIA_MAX_KEY_SIZE,
 277                        .setkey         = camellia_set_key,
 278                        .encrypt        = cbc_encrypt,
 279                        .decrypt        = cbc_decrypt,
 280                },
 281        },
 282}
 283};
 284
 285static bool __init sparc64_has_camellia_opcode(void)
 286{
 287        unsigned long cfr;
 288
 289        if (!(sparc64_elf_hwcap & HWCAP_SPARC_CRYPTO))
 290                return false;
 291
 292        __asm__ __volatile__("rd %%asr26, %0" : "=r" (cfr));
 293        if (!(cfr & CFR_CAMELLIA))
 294                return false;
 295
 296        return true;
 297}
 298
 299static int __init camellia_sparc64_mod_init(void)
 300{
 301        int i;
 302
 303        for (i = 0; i < ARRAY_SIZE(algs); i++)
 304                INIT_LIST_HEAD(&algs[i].cra_list);
 305
 306        if (sparc64_has_camellia_opcode()) {
 307                pr_info("Using sparc64 camellia opcodes optimized CAMELLIA implementation\n");
 308                return crypto_register_algs(algs, ARRAY_SIZE(algs));
 309        }
 310        pr_info("sparc64 camellia opcodes not available.\n");
 311        return -ENODEV;
 312}
 313
 314static void __exit camellia_sparc64_mod_fini(void)
 315{
 316        crypto_unregister_algs(algs, ARRAY_SIZE(algs));
 317}
 318
 319module_init(camellia_sparc64_mod_init);
 320module_exit(camellia_sparc64_mod_fini);
 321
 322MODULE_LICENSE("GPL");
 323MODULE_DESCRIPTION("Camellia Cipher Algorithm, sparc64 camellia opcode accelerated");
 324
 325MODULE_ALIAS("aes");
 326
 327#include "crop_devid.c"
 328