linux/arch/sparc/crypto/camellia_glue.c
<<
>>
Prefs
   1// SPDX-License-Identifier: GPL-2.0-only
   2/* Glue code for CAMELLIA encryption optimized for sparc64 crypto opcodes.
   3 *
   4 * Copyright (C) 2012 David S. Miller <davem@davemloft.net>
   5 */
   6
   7#define pr_fmt(fmt)     KBUILD_MODNAME ": " fmt
   8
   9#include <linux/crypto.h>
  10#include <linux/init.h>
  11#include <linux/module.h>
  12#include <linux/mm.h>
  13#include <linux/types.h>
  14#include <crypto/algapi.h>
  15#include <crypto/internal/skcipher.h>
  16
  17#include <asm/fpumacro.h>
  18#include <asm/pstate.h>
  19#include <asm/elf.h>
  20
  21#include "opcodes.h"
  22
  23#define CAMELLIA_MIN_KEY_SIZE        16
  24#define CAMELLIA_MAX_KEY_SIZE        32
  25#define CAMELLIA_BLOCK_SIZE          16
  26#define CAMELLIA_TABLE_BYTE_LEN     272
  27
  28struct camellia_sparc64_ctx {
  29        u64 encrypt_key[CAMELLIA_TABLE_BYTE_LEN / sizeof(u64)];
  30        u64 decrypt_key[CAMELLIA_TABLE_BYTE_LEN / sizeof(u64)];
  31        int key_len;
  32};
  33
  34extern void camellia_sparc64_key_expand(const u32 *in_key, u64 *encrypt_key,
  35                                        unsigned int key_len, u64 *decrypt_key);
  36
  37static int camellia_set_key(struct crypto_tfm *tfm, const u8 *_in_key,
  38                            unsigned int key_len)
  39{
  40        struct camellia_sparc64_ctx *ctx = crypto_tfm_ctx(tfm);
  41        const u32 *in_key = (const u32 *) _in_key;
  42
  43        if (key_len != 16 && key_len != 24 && key_len != 32)
  44                return -EINVAL;
  45
  46        ctx->key_len = key_len;
  47
  48        camellia_sparc64_key_expand(in_key, &ctx->encrypt_key[0],
  49                                    key_len, &ctx->decrypt_key[0]);
  50        return 0;
  51}
  52
  53static int camellia_set_key_skcipher(struct crypto_skcipher *tfm,
  54                                     const u8 *in_key, unsigned int key_len)
  55{
  56        return camellia_set_key(crypto_skcipher_tfm(tfm), in_key, key_len);
  57}
  58
  59extern void camellia_sparc64_crypt(const u64 *key, const u32 *input,
  60                                   u32 *output, unsigned int key_len);
  61
  62static void camellia_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
  63{
  64        struct camellia_sparc64_ctx *ctx = crypto_tfm_ctx(tfm);
  65
  66        camellia_sparc64_crypt(&ctx->encrypt_key[0],
  67                               (const u32 *) src,
  68                               (u32 *) dst, ctx->key_len);
  69}
  70
  71static void camellia_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
  72{
  73        struct camellia_sparc64_ctx *ctx = crypto_tfm_ctx(tfm);
  74
  75        camellia_sparc64_crypt(&ctx->decrypt_key[0],
  76                               (const u32 *) src,
  77                               (u32 *) dst, ctx->key_len);
  78}
  79
  80extern void camellia_sparc64_load_keys(const u64 *key, unsigned int key_len);
  81
  82typedef void ecb_crypt_op(const u64 *input, u64 *output, unsigned int len,
  83                          const u64 *key);
  84
  85extern ecb_crypt_op camellia_sparc64_ecb_crypt_3_grand_rounds;
  86extern ecb_crypt_op camellia_sparc64_ecb_crypt_4_grand_rounds;
  87
  88static int __ecb_crypt(struct skcipher_request *req, bool encrypt)
  89{
  90        struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
  91        const struct camellia_sparc64_ctx *ctx = crypto_skcipher_ctx(tfm);
  92        struct skcipher_walk walk;
  93        ecb_crypt_op *op;
  94        const u64 *key;
  95        unsigned int nbytes;
  96        int err;
  97
  98        op = camellia_sparc64_ecb_crypt_3_grand_rounds;
  99        if (ctx->key_len != 16)
 100                op = camellia_sparc64_ecb_crypt_4_grand_rounds;
 101
 102        err = skcipher_walk_virt(&walk, req, true);
 103        if (err)
 104                return err;
 105
 106        if (encrypt)
 107                key = &ctx->encrypt_key[0];
 108        else
 109                key = &ctx->decrypt_key[0];
 110        camellia_sparc64_load_keys(key, ctx->key_len);
 111        while ((nbytes = walk.nbytes) != 0) {
 112                op(walk.src.virt.addr, walk.dst.virt.addr,
 113                   round_down(nbytes, CAMELLIA_BLOCK_SIZE), key);
 114                err = skcipher_walk_done(&walk, nbytes % CAMELLIA_BLOCK_SIZE);
 115        }
 116        fprs_write(0);
 117        return err;
 118}
 119
 120static int ecb_encrypt(struct skcipher_request *req)
 121{
 122        return __ecb_crypt(req, true);
 123}
 124
 125static int ecb_decrypt(struct skcipher_request *req)
 126{
 127        return __ecb_crypt(req, false);
 128}
 129
 130typedef void cbc_crypt_op(const u64 *input, u64 *output, unsigned int len,
 131                          const u64 *key, u64 *iv);
 132
 133extern cbc_crypt_op camellia_sparc64_cbc_encrypt_3_grand_rounds;
 134extern cbc_crypt_op camellia_sparc64_cbc_encrypt_4_grand_rounds;
 135extern cbc_crypt_op camellia_sparc64_cbc_decrypt_3_grand_rounds;
 136extern cbc_crypt_op camellia_sparc64_cbc_decrypt_4_grand_rounds;
 137
 138static int cbc_encrypt(struct skcipher_request *req)
 139{
 140        struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
 141        const struct camellia_sparc64_ctx *ctx = crypto_skcipher_ctx(tfm);
 142        struct skcipher_walk walk;
 143        cbc_crypt_op *op;
 144        const u64 *key;
 145        unsigned int nbytes;
 146        int err;
 147
 148        op = camellia_sparc64_cbc_encrypt_3_grand_rounds;
 149        if (ctx->key_len != 16)
 150                op = camellia_sparc64_cbc_encrypt_4_grand_rounds;
 151
 152        err = skcipher_walk_virt(&walk, req, true);
 153        if (err)
 154                return err;
 155
 156        key = &ctx->encrypt_key[0];
 157        camellia_sparc64_load_keys(key, ctx->key_len);
 158        while ((nbytes = walk.nbytes) != 0) {
 159                op(walk.src.virt.addr, walk.dst.virt.addr,
 160                   round_down(nbytes, CAMELLIA_BLOCK_SIZE), key, walk.iv);
 161                err = skcipher_walk_done(&walk, nbytes % CAMELLIA_BLOCK_SIZE);
 162        }
 163        fprs_write(0);
 164        return err;
 165}
 166
 167static int cbc_decrypt(struct skcipher_request *req)
 168{
 169        struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
 170        const struct camellia_sparc64_ctx *ctx = crypto_skcipher_ctx(tfm);
 171        struct skcipher_walk walk;
 172        cbc_crypt_op *op;
 173        const u64 *key;
 174        unsigned int nbytes;
 175        int err;
 176
 177        op = camellia_sparc64_cbc_decrypt_3_grand_rounds;
 178        if (ctx->key_len != 16)
 179                op = camellia_sparc64_cbc_decrypt_4_grand_rounds;
 180
 181        err = skcipher_walk_virt(&walk, req, true);
 182        if (err)
 183                return err;
 184
 185        key = &ctx->decrypt_key[0];
 186        camellia_sparc64_load_keys(key, ctx->key_len);
 187        while ((nbytes = walk.nbytes) != 0) {
 188                op(walk.src.virt.addr, walk.dst.virt.addr,
 189                   round_down(nbytes, CAMELLIA_BLOCK_SIZE), key, walk.iv);
 190                err = skcipher_walk_done(&walk, nbytes % CAMELLIA_BLOCK_SIZE);
 191        }
 192        fprs_write(0);
 193        return err;
 194}
 195
 196static struct crypto_alg cipher_alg = {
 197        .cra_name               = "camellia",
 198        .cra_driver_name        = "camellia-sparc64",
 199        .cra_priority           = SPARC_CR_OPCODE_PRIORITY,
 200        .cra_flags              = CRYPTO_ALG_TYPE_CIPHER,
 201        .cra_blocksize          = CAMELLIA_BLOCK_SIZE,
 202        .cra_ctxsize            = sizeof(struct camellia_sparc64_ctx),
 203        .cra_alignmask          = 3,
 204        .cra_module             = THIS_MODULE,
 205        .cra_u  = {
 206                .cipher = {
 207                        .cia_min_keysize        = CAMELLIA_MIN_KEY_SIZE,
 208                        .cia_max_keysize        = CAMELLIA_MAX_KEY_SIZE,
 209                        .cia_setkey             = camellia_set_key,
 210                        .cia_encrypt            = camellia_encrypt,
 211                        .cia_decrypt            = camellia_decrypt
 212                }
 213        }
 214};
 215
 216static struct skcipher_alg skcipher_algs[] = {
 217        {
 218                .base.cra_name          = "ecb(camellia)",
 219                .base.cra_driver_name   = "ecb-camellia-sparc64",
 220                .base.cra_priority      = SPARC_CR_OPCODE_PRIORITY,
 221                .base.cra_blocksize     = CAMELLIA_BLOCK_SIZE,
 222                .base.cra_ctxsize       = sizeof(struct camellia_sparc64_ctx),
 223                .base.cra_alignmask     = 7,
 224                .base.cra_module        = THIS_MODULE,
 225                .min_keysize            = CAMELLIA_MIN_KEY_SIZE,
 226                .max_keysize            = CAMELLIA_MAX_KEY_SIZE,
 227                .setkey                 = camellia_set_key_skcipher,
 228                .encrypt                = ecb_encrypt,
 229                .decrypt                = ecb_decrypt,
 230        }, {
 231                .base.cra_name          = "cbc(camellia)",
 232                .base.cra_driver_name   = "cbc-camellia-sparc64",
 233                .base.cra_priority      = SPARC_CR_OPCODE_PRIORITY,
 234                .base.cra_blocksize     = CAMELLIA_BLOCK_SIZE,
 235                .base.cra_ctxsize       = sizeof(struct camellia_sparc64_ctx),
 236                .base.cra_alignmask     = 7,
 237                .base.cra_module        = THIS_MODULE,
 238                .min_keysize            = CAMELLIA_MIN_KEY_SIZE,
 239                .max_keysize            = CAMELLIA_MAX_KEY_SIZE,
 240                .ivsize                 = CAMELLIA_BLOCK_SIZE,
 241                .setkey                 = camellia_set_key_skcipher,
 242                .encrypt                = cbc_encrypt,
 243                .decrypt                = cbc_decrypt,
 244        }
 245};
 246
 247static bool __init sparc64_has_camellia_opcode(void)
 248{
 249        unsigned long cfr;
 250
 251        if (!(sparc64_elf_hwcap & HWCAP_SPARC_CRYPTO))
 252                return false;
 253
 254        __asm__ __volatile__("rd %%asr26, %0" : "=r" (cfr));
 255        if (!(cfr & CFR_CAMELLIA))
 256                return false;
 257
 258        return true;
 259}
 260
 261static int __init camellia_sparc64_mod_init(void)
 262{
 263        int err;
 264
 265        if (!sparc64_has_camellia_opcode()) {
 266                pr_info("sparc64 camellia opcodes not available.\n");
 267                return -ENODEV;
 268        }
 269        pr_info("Using sparc64 camellia opcodes optimized CAMELLIA implementation\n");
 270        err = crypto_register_alg(&cipher_alg);
 271        if (err)
 272                return err;
 273        err = crypto_register_skciphers(skcipher_algs,
 274                                        ARRAY_SIZE(skcipher_algs));
 275        if (err)
 276                crypto_unregister_alg(&cipher_alg);
 277        return err;
 278}
 279
 280static void __exit camellia_sparc64_mod_fini(void)
 281{
 282        crypto_unregister_alg(&cipher_alg);
 283        crypto_unregister_skciphers(skcipher_algs, ARRAY_SIZE(skcipher_algs));
 284}
 285
 286module_init(camellia_sparc64_mod_init);
 287module_exit(camellia_sparc64_mod_fini);
 288
 289MODULE_LICENSE("GPL");
 290MODULE_DESCRIPTION("Camellia Cipher Algorithm, sparc64 camellia opcode accelerated");
 291
 292MODULE_ALIAS_CRYPTO("camellia");
 293
 294#include "crop_devid.c"
 295