linux/arch/sparc/crypto/aes_glue.c
<<
>>
Prefs
   1/* Glue code for AES encryption optimized for sparc64 crypto opcodes.
   2 *
   3 * This is based largely upon arch/x86/crypto/aesni-intel_glue.c
   4 *
   5 * Copyright (C) 2008, Intel Corp.
   6 *    Author: Huang Ying <ying.huang@intel.com>
   7 *
   8 * Added RFC4106 AES-GCM support for 128-bit keys under the AEAD
   9 * interface for 64-bit kernels.
  10 *    Authors: Adrian Hoban <adrian.hoban@intel.com>
  11 *             Gabriele Paoloni <gabriele.paoloni@intel.com>
  12 *             Tadeusz Struk (tadeusz.struk@intel.com)
  13 *             Aidan O'Mahony (aidan.o.mahony@intel.com)
  14 *    Copyright (c) 2010, Intel Corporation.
  15 */
  16
  17#define pr_fmt(fmt)     KBUILD_MODNAME ": " fmt
  18
  19#include <linux/crypto.h>
  20#include <linux/init.h>
  21#include <linux/module.h>
  22#include <linux/mm.h>
  23#include <linux/types.h>
  24#include <crypto/algapi.h>
  25#include <crypto/aes.h>
  26
  27#include <asm/fpumacro.h>
  28#include <asm/pstate.h>
  29#include <asm/elf.h>
  30
  31#include "opcodes.h"
  32
  33struct aes_ops {
  34        void (*encrypt)(const u64 *key, const u32 *input, u32 *output);
  35        void (*decrypt)(const u64 *key, const u32 *input, u32 *output);
  36        void (*load_encrypt_keys)(const u64 *key);
  37        void (*load_decrypt_keys)(const u64 *key);
  38        void (*ecb_encrypt)(const u64 *key, const u64 *input, u64 *output,
  39                            unsigned int len);
  40        void (*ecb_decrypt)(const u64 *key, const u64 *input, u64 *output,
  41                            unsigned int len);
  42        void (*cbc_encrypt)(const u64 *key, const u64 *input, u64 *output,
  43                            unsigned int len, u64 *iv);
  44        void (*cbc_decrypt)(const u64 *key, const u64 *input, u64 *output,
  45                            unsigned int len, u64 *iv);
  46        void (*ctr_crypt)(const u64 *key, const u64 *input, u64 *output,
  47                          unsigned int len, u64 *iv);
  48};
  49
  50struct crypto_sparc64_aes_ctx {
  51        struct aes_ops *ops;
  52        u64 key[AES_MAX_KEYLENGTH / sizeof(u64)];
  53        u32 key_length;
  54        u32 expanded_key_length;
  55};
  56
  57extern void aes_sparc64_encrypt_128(const u64 *key, const u32 *input,
  58                                    u32 *output);
  59extern void aes_sparc64_encrypt_192(const u64 *key, const u32 *input,
  60                                    u32 *output);
  61extern void aes_sparc64_encrypt_256(const u64 *key, const u32 *input,
  62                                    u32 *output);
  63
  64extern void aes_sparc64_decrypt_128(const u64 *key, const u32 *input,
  65                                    u32 *output);
  66extern void aes_sparc64_decrypt_192(const u64 *key, const u32 *input,
  67                                    u32 *output);
  68extern void aes_sparc64_decrypt_256(const u64 *key, const u32 *input,
  69                                    u32 *output);
  70
  71extern void aes_sparc64_load_encrypt_keys_128(const u64 *key);
  72extern void aes_sparc64_load_encrypt_keys_192(const u64 *key);
  73extern void aes_sparc64_load_encrypt_keys_256(const u64 *key);
  74
  75extern void aes_sparc64_load_decrypt_keys_128(const u64 *key);
  76extern void aes_sparc64_load_decrypt_keys_192(const u64 *key);
  77extern void aes_sparc64_load_decrypt_keys_256(const u64 *key);
  78
  79extern void aes_sparc64_ecb_encrypt_128(const u64 *key, const u64 *input,
  80                                        u64 *output, unsigned int len);
  81extern void aes_sparc64_ecb_encrypt_192(const u64 *key, const u64 *input,
  82                                        u64 *output, unsigned int len);
  83extern void aes_sparc64_ecb_encrypt_256(const u64 *key, const u64 *input,
  84                                        u64 *output, unsigned int len);
  85
  86extern void aes_sparc64_ecb_decrypt_128(const u64 *key, const u64 *input,
  87                                        u64 *output, unsigned int len);
  88extern void aes_sparc64_ecb_decrypt_192(const u64 *key, const u64 *input,
  89                                        u64 *output, unsigned int len);
  90extern void aes_sparc64_ecb_decrypt_256(const u64 *key, const u64 *input,
  91                                        u64 *output, unsigned int len);
  92
  93extern void aes_sparc64_cbc_encrypt_128(const u64 *key, const u64 *input,
  94                                        u64 *output, unsigned int len,
  95                                        u64 *iv);
  96
  97extern void aes_sparc64_cbc_encrypt_192(const u64 *key, const u64 *input,
  98                                        u64 *output, unsigned int len,
  99                                        u64 *iv);
 100
 101extern void aes_sparc64_cbc_encrypt_256(const u64 *key, const u64 *input,
 102                                        u64 *output, unsigned int len,
 103                                        u64 *iv);
 104
 105extern void aes_sparc64_cbc_decrypt_128(const u64 *key, const u64 *input,
 106                                        u64 *output, unsigned int len,
 107                                        u64 *iv);
 108
 109extern void aes_sparc64_cbc_decrypt_192(const u64 *key, const u64 *input,
 110                                        u64 *output, unsigned int len,
 111                                        u64 *iv);
 112
 113extern void aes_sparc64_cbc_decrypt_256(const u64 *key, const u64 *input,
 114                                        u64 *output, unsigned int len,
 115                                        u64 *iv);
 116
 117extern void aes_sparc64_ctr_crypt_128(const u64 *key, const u64 *input,
 118                                      u64 *output, unsigned int len,
 119                                      u64 *iv);
 120extern void aes_sparc64_ctr_crypt_192(const u64 *key, const u64 *input,
 121                                      u64 *output, unsigned int len,
 122                                      u64 *iv);
 123extern void aes_sparc64_ctr_crypt_256(const u64 *key, const u64 *input,
 124                                      u64 *output, unsigned int len,
 125                                      u64 *iv);
 126
 127struct aes_ops aes128_ops = {
 128        .encrypt                = aes_sparc64_encrypt_128,
 129        .decrypt                = aes_sparc64_decrypt_128,
 130        .load_encrypt_keys      = aes_sparc64_load_encrypt_keys_128,
 131        .load_decrypt_keys      = aes_sparc64_load_decrypt_keys_128,
 132        .ecb_encrypt            = aes_sparc64_ecb_encrypt_128,
 133        .ecb_decrypt            = aes_sparc64_ecb_decrypt_128,
 134        .cbc_encrypt            = aes_sparc64_cbc_encrypt_128,
 135        .cbc_decrypt            = aes_sparc64_cbc_decrypt_128,
 136        .ctr_crypt              = aes_sparc64_ctr_crypt_128,
 137};
 138
 139struct aes_ops aes192_ops = {
 140        .encrypt                = aes_sparc64_encrypt_192,
 141        .decrypt                = aes_sparc64_decrypt_192,
 142        .load_encrypt_keys      = aes_sparc64_load_encrypt_keys_192,
 143        .load_decrypt_keys      = aes_sparc64_load_decrypt_keys_192,
 144        .ecb_encrypt            = aes_sparc64_ecb_encrypt_192,
 145        .ecb_decrypt            = aes_sparc64_ecb_decrypt_192,
 146        .cbc_encrypt            = aes_sparc64_cbc_encrypt_192,
 147        .cbc_decrypt            = aes_sparc64_cbc_decrypt_192,
 148        .ctr_crypt              = aes_sparc64_ctr_crypt_192,
 149};
 150
 151struct aes_ops aes256_ops = {
 152        .encrypt                = aes_sparc64_encrypt_256,
 153        .decrypt                = aes_sparc64_decrypt_256,
 154        .load_encrypt_keys      = aes_sparc64_load_encrypt_keys_256,
 155        .load_decrypt_keys      = aes_sparc64_load_decrypt_keys_256,
 156        .ecb_encrypt            = aes_sparc64_ecb_encrypt_256,
 157        .ecb_decrypt            = aes_sparc64_ecb_decrypt_256,
 158        .cbc_encrypt            = aes_sparc64_cbc_encrypt_256,
 159        .cbc_decrypt            = aes_sparc64_cbc_decrypt_256,
 160        .ctr_crypt              = aes_sparc64_ctr_crypt_256,
 161};
 162
 163extern void aes_sparc64_key_expand(const u32 *in_key, u64 *output_key,
 164                                   unsigned int key_len);
 165
 166static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
 167                       unsigned int key_len)
 168{
 169        struct crypto_sparc64_aes_ctx *ctx = crypto_tfm_ctx(tfm);
 170        u32 *flags = &tfm->crt_flags;
 171
 172        switch (key_len) {
 173        case AES_KEYSIZE_128:
 174                ctx->expanded_key_length = 0xb0;
 175                ctx->ops = &aes128_ops;
 176                break;
 177
 178        case AES_KEYSIZE_192:
 179                ctx->expanded_key_length = 0xd0;
 180                ctx->ops = &aes192_ops;
 181                break;
 182
 183        case AES_KEYSIZE_256:
 184                ctx->expanded_key_length = 0xf0;
 185                ctx->ops = &aes256_ops;
 186                break;
 187
 188        default:
 189                *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
 190                return -EINVAL;
 191        }
 192
 193        aes_sparc64_key_expand((const u32 *)in_key, &ctx->key[0], key_len);
 194        ctx->key_length = key_len;
 195
 196        return 0;
 197}
 198
 199static void aes_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
 200{
 201        struct crypto_sparc64_aes_ctx *ctx = crypto_tfm_ctx(tfm);
 202
 203        ctx->ops->encrypt(&ctx->key[0], (const u32 *) src, (u32 *) dst);
 204}
 205
 206static void aes_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
 207{
 208        struct crypto_sparc64_aes_ctx *ctx = crypto_tfm_ctx(tfm);
 209
 210        ctx->ops->decrypt(&ctx->key[0], (const u32 *) src, (u32 *) dst);
 211}
 212
 213#define AES_BLOCK_MASK  (~(AES_BLOCK_SIZE-1))
 214
 215static int ecb_encrypt(struct blkcipher_desc *desc,
 216                       struct scatterlist *dst, struct scatterlist *src,
 217                       unsigned int nbytes)
 218{
 219        struct crypto_sparc64_aes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
 220        struct blkcipher_walk walk;
 221        int err;
 222
 223        blkcipher_walk_init(&walk, dst, src, nbytes);
 224        err = blkcipher_walk_virt(desc, &walk);
 225        desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
 226
 227        ctx->ops->load_encrypt_keys(&ctx->key[0]);
 228        while ((nbytes = walk.nbytes)) {
 229                unsigned int block_len = nbytes & AES_BLOCK_MASK;
 230
 231                if (likely(block_len)) {
 232                        ctx->ops->ecb_encrypt(&ctx->key[0],
 233                                              (const u64 *)walk.src.virt.addr,
 234                                              (u64 *) walk.dst.virt.addr,
 235                                              block_len);
 236                }
 237                nbytes &= AES_BLOCK_SIZE - 1;
 238                err = blkcipher_walk_done(desc, &walk, nbytes);
 239        }
 240        fprs_write(0);
 241        return err;
 242}
 243
 244static int ecb_decrypt(struct blkcipher_desc *desc,
 245                       struct scatterlist *dst, struct scatterlist *src,
 246                       unsigned int nbytes)
 247{
 248        struct crypto_sparc64_aes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
 249        struct blkcipher_walk walk;
 250        u64 *key_end;
 251        int err;
 252
 253        blkcipher_walk_init(&walk, dst, src, nbytes);
 254        err = blkcipher_walk_virt(desc, &walk);
 255        desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
 256
 257        ctx->ops->load_decrypt_keys(&ctx->key[0]);
 258        key_end = &ctx->key[ctx->expanded_key_length / sizeof(u64)];
 259        while ((nbytes = walk.nbytes)) {
 260                unsigned int block_len = nbytes & AES_BLOCK_MASK;
 261
 262                if (likely(block_len)) {
 263                        ctx->ops->ecb_decrypt(key_end,
 264                                              (const u64 *) walk.src.virt.addr,
 265                                              (u64 *) walk.dst.virt.addr, block_len);
 266                }
 267                nbytes &= AES_BLOCK_SIZE - 1;
 268                err = blkcipher_walk_done(desc, &walk, nbytes);
 269        }
 270        fprs_write(0);
 271
 272        return err;
 273}
 274
 275static int cbc_encrypt(struct blkcipher_desc *desc,
 276                       struct scatterlist *dst, struct scatterlist *src,
 277                       unsigned int nbytes)
 278{
 279        struct crypto_sparc64_aes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
 280        struct blkcipher_walk walk;
 281        int err;
 282
 283        blkcipher_walk_init(&walk, dst, src, nbytes);
 284        err = blkcipher_walk_virt(desc, &walk);
 285        desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
 286
 287        ctx->ops->load_encrypt_keys(&ctx->key[0]);
 288        while ((nbytes = walk.nbytes)) {
 289                unsigned int block_len = nbytes & AES_BLOCK_MASK;
 290
 291                if (likely(block_len)) {
 292                        ctx->ops->cbc_encrypt(&ctx->key[0],
 293                                              (const u64 *)walk.src.virt.addr,
 294                                              (u64 *) walk.dst.virt.addr,
 295                                              block_len, (u64 *) walk.iv);
 296                }
 297                nbytes &= AES_BLOCK_SIZE - 1;
 298                err = blkcipher_walk_done(desc, &walk, nbytes);
 299        }
 300        fprs_write(0);
 301        return err;
 302}
 303
 304static int cbc_decrypt(struct blkcipher_desc *desc,
 305                       struct scatterlist *dst, struct scatterlist *src,
 306                       unsigned int nbytes)
 307{
 308        struct crypto_sparc64_aes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
 309        struct blkcipher_walk walk;
 310        u64 *key_end;
 311        int err;
 312
 313        blkcipher_walk_init(&walk, dst, src, nbytes);
 314        err = blkcipher_walk_virt(desc, &walk);
 315        desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
 316
 317        ctx->ops->load_decrypt_keys(&ctx->key[0]);
 318        key_end = &ctx->key[ctx->expanded_key_length / sizeof(u64)];
 319        while ((nbytes = walk.nbytes)) {
 320                unsigned int block_len = nbytes & AES_BLOCK_MASK;
 321
 322                if (likely(block_len)) {
 323                        ctx->ops->cbc_decrypt(key_end,
 324                                              (const u64 *) walk.src.virt.addr,
 325                                              (u64 *) walk.dst.virt.addr,
 326                                              block_len, (u64 *) walk.iv);
 327                }
 328                nbytes &= AES_BLOCK_SIZE - 1;
 329                err = blkcipher_walk_done(desc, &walk, nbytes);
 330        }
 331        fprs_write(0);
 332
 333        return err;
 334}
 335
 336static void ctr_crypt_final(struct crypto_sparc64_aes_ctx *ctx,
 337                            struct blkcipher_walk *walk)
 338{
 339        u8 *ctrblk = walk->iv;
 340        u64 keystream[AES_BLOCK_SIZE / sizeof(u64)];
 341        u8 *src = walk->src.virt.addr;
 342        u8 *dst = walk->dst.virt.addr;
 343        unsigned int nbytes = walk->nbytes;
 344
 345        ctx->ops->ecb_encrypt(&ctx->key[0], (const u64 *)ctrblk,
 346                              keystream, AES_BLOCK_SIZE);
 347        crypto_xor((u8 *) keystream, src, nbytes);
 348        memcpy(dst, keystream, nbytes);
 349        crypto_inc(ctrblk, AES_BLOCK_SIZE);
 350}
 351
 352static int ctr_crypt(struct blkcipher_desc *desc,
 353                     struct scatterlist *dst, struct scatterlist *src,
 354                     unsigned int nbytes)
 355{
 356        struct crypto_sparc64_aes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
 357        struct blkcipher_walk walk;
 358        int err;
 359
 360        blkcipher_walk_init(&walk, dst, src, nbytes);
 361        err = blkcipher_walk_virt_block(desc, &walk, AES_BLOCK_SIZE);
 362        desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
 363
 364        ctx->ops->load_encrypt_keys(&ctx->key[0]);
 365        while ((nbytes = walk.nbytes) >= AES_BLOCK_SIZE) {
 366                unsigned int block_len = nbytes & AES_BLOCK_MASK;
 367
 368                if (likely(block_len)) {
 369                        ctx->ops->ctr_crypt(&ctx->key[0],
 370                                            (const u64 *)walk.src.virt.addr,
 371                                            (u64 *) walk.dst.virt.addr,
 372                                            block_len, (u64 *) walk.iv);
 373                }
 374                nbytes &= AES_BLOCK_SIZE - 1;
 375                err = blkcipher_walk_done(desc, &walk, nbytes);
 376        }
 377        if (walk.nbytes) {
 378                ctr_crypt_final(ctx, &walk);
 379                err = blkcipher_walk_done(desc, &walk, 0);
 380        }
 381        fprs_write(0);
 382        return err;
 383}
 384
 385static struct crypto_alg algs[] = { {
 386        .cra_name               = "aes",
 387        .cra_driver_name        = "aes-sparc64",
 388        .cra_priority           = SPARC_CR_OPCODE_PRIORITY,
 389        .cra_flags              = CRYPTO_ALG_TYPE_CIPHER,
 390        .cra_blocksize          = AES_BLOCK_SIZE,
 391        .cra_ctxsize            = sizeof(struct crypto_sparc64_aes_ctx),
 392        .cra_alignmask          = 3,
 393        .cra_module             = THIS_MODULE,
 394        .cra_u  = {
 395                .cipher = {
 396                        .cia_min_keysize        = AES_MIN_KEY_SIZE,
 397                        .cia_max_keysize        = AES_MAX_KEY_SIZE,
 398                        .cia_setkey             = aes_set_key,
 399                        .cia_encrypt            = aes_encrypt,
 400                        .cia_decrypt            = aes_decrypt
 401                }
 402        }
 403}, {
 404        .cra_name               = "ecb(aes)",
 405        .cra_driver_name        = "ecb-aes-sparc64",
 406        .cra_priority           = SPARC_CR_OPCODE_PRIORITY,
 407        .cra_flags              = CRYPTO_ALG_TYPE_BLKCIPHER,
 408        .cra_blocksize          = AES_BLOCK_SIZE,
 409        .cra_ctxsize            = sizeof(struct crypto_sparc64_aes_ctx),
 410        .cra_alignmask          = 7,
 411        .cra_type               = &crypto_blkcipher_type,
 412        .cra_module             = THIS_MODULE,
 413        .cra_u = {
 414                .blkcipher = {
 415                        .min_keysize    = AES_MIN_KEY_SIZE,
 416                        .max_keysize    = AES_MAX_KEY_SIZE,
 417                        .setkey         = aes_set_key,
 418                        .encrypt        = ecb_encrypt,
 419                        .decrypt        = ecb_decrypt,
 420                },
 421        },
 422}, {
 423        .cra_name               = "cbc(aes)",
 424        .cra_driver_name        = "cbc-aes-sparc64",
 425        .cra_priority           = SPARC_CR_OPCODE_PRIORITY,
 426        .cra_flags              = CRYPTO_ALG_TYPE_BLKCIPHER,
 427        .cra_blocksize          = AES_BLOCK_SIZE,
 428        .cra_ctxsize            = sizeof(struct crypto_sparc64_aes_ctx),
 429        .cra_alignmask          = 7,
 430        .cra_type               = &crypto_blkcipher_type,
 431        .cra_module             = THIS_MODULE,
 432        .cra_u = {
 433                .blkcipher = {
 434                        .min_keysize    = AES_MIN_KEY_SIZE,
 435                        .max_keysize    = AES_MAX_KEY_SIZE,
 436                        .setkey         = aes_set_key,
 437                        .encrypt        = cbc_encrypt,
 438                        .decrypt        = cbc_decrypt,
 439                },
 440        },
 441}, {
 442        .cra_name               = "ctr(aes)",
 443        .cra_driver_name        = "ctr-aes-sparc64",
 444        .cra_priority           = SPARC_CR_OPCODE_PRIORITY,
 445        .cra_flags              = CRYPTO_ALG_TYPE_BLKCIPHER,
 446        .cra_blocksize          = 1,
 447        .cra_ctxsize            = sizeof(struct crypto_sparc64_aes_ctx),
 448        .cra_alignmask          = 7,
 449        .cra_type               = &crypto_blkcipher_type,
 450        .cra_module             = THIS_MODULE,
 451        .cra_u = {
 452                .blkcipher = {
 453                        .min_keysize    = AES_MIN_KEY_SIZE,
 454                        .max_keysize    = AES_MAX_KEY_SIZE,
 455                        .setkey         = aes_set_key,
 456                        .encrypt        = ctr_crypt,
 457                        .decrypt        = ctr_crypt,
 458                },
 459        },
 460} };
 461
 462static bool __init sparc64_has_aes_opcode(void)
 463{
 464        unsigned long cfr;
 465
 466        if (!(sparc64_elf_hwcap & HWCAP_SPARC_CRYPTO))
 467                return false;
 468
 469        __asm__ __volatile__("rd %%asr26, %0" : "=r" (cfr));
 470        if (!(cfr & CFR_AES))
 471                return false;
 472
 473        return true;
 474}
 475
 476static int __init aes_sparc64_mod_init(void)
 477{
 478        int i;
 479
 480        for (i = 0; i < ARRAY_SIZE(algs); i++)
 481                INIT_LIST_HEAD(&algs[i].cra_list);
 482
 483        if (sparc64_has_aes_opcode()) {
 484                pr_info("Using sparc64 aes opcodes optimized AES implementation\n");
 485                return crypto_register_algs(algs, ARRAY_SIZE(algs));
 486        }
 487        pr_info("sparc64 aes opcodes not available.\n");
 488        return -ENODEV;
 489}
 490
 491static void __exit aes_sparc64_mod_fini(void)
 492{
 493        crypto_unregister_algs(algs, ARRAY_SIZE(algs));
 494}
 495
 496module_init(aes_sparc64_mod_init);
 497module_exit(aes_sparc64_mod_fini);
 498
 499MODULE_LICENSE("GPL");
 500MODULE_DESCRIPTION("AES Secure Hash Algorithm, sparc64 aes opcode accelerated");
 501
 502MODULE_ALIAS("aes");
 503
 504#include "crop_devid.c"
 505