linux/arch/s390/crypto/aes_s390.c
<<
>>
Prefs
   1/*
   2 * Cryptographic API.
   3 *
   4 * s390 implementation of the AES Cipher Algorithm.
   5 *
   6 * s390 Version:
   7 *   Copyright IBM Corp. 2005, 2007
   8 *   Author(s): Jan Glauber (jang@de.ibm.com)
   9 *              Sebastian Siewior (sebastian@breakpoint.cc> SW-Fallback
  10 *
  11 * Derived from "crypto/aes_generic.c"
  12 *
  13 * This program is free software; you can redistribute it and/or modify it
  14 * under the terms of the GNU General Public License as published by the Free
  15 * Software Foundation; either version 2 of the License, or (at your option)
  16 * any later version.
  17 *
  18 */
  19
  20#define KMSG_COMPONENT "aes_s390"
  21#define pr_fmt(fmt) KMSG_COMPONENT ": " fmt
  22
  23#include <crypto/aes.h>
  24#include <crypto/algapi.h>
  25#include <linux/err.h>
  26#include <linux/module.h>
  27#include <linux/cpufeature.h>
  28#include <linux/init.h>
  29#include <linux/spinlock.h>
  30#include <crypto/xts.h>
  31#include "crypt_s390.h"
  32
  33#define AES_KEYLEN_128          1
  34#define AES_KEYLEN_192          2
  35#define AES_KEYLEN_256          4
  36
  37static u8 *ctrblk;
  38static DEFINE_SPINLOCK(ctrblk_lock);
  39static char keylen_flag;
  40
  41struct s390_aes_ctx {
  42        u8 key[AES_MAX_KEY_SIZE];
  43        long enc;
  44        long dec;
  45        int key_len;
  46        union {
  47                struct crypto_blkcipher *blk;
  48                struct crypto_cipher *cip;
  49        } fallback;
  50};
  51
  52struct pcc_param {
  53        u8 key[32];
  54        u8 tweak[16];
  55        u8 block[16];
  56        u8 bit[16];
  57        u8 xts[16];
  58};
  59
  60struct s390_xts_ctx {
  61        u8 key[32];
  62        u8 pcc_key[32];
  63        long enc;
  64        long dec;
  65        int key_len;
  66        struct crypto_blkcipher *fallback;
  67};
  68
  69/*
  70 * Check if the key_len is supported by the HW.
  71 * Returns 0 if it is, a positive number if it is not and software fallback is
  72 * required or a negative number in case the key size is not valid
  73 */
  74static int need_fallback(unsigned int key_len)
  75{
  76        switch (key_len) {
  77        case 16:
  78                if (!(keylen_flag & AES_KEYLEN_128))
  79                        return 1;
  80                break;
  81        case 24:
  82                if (!(keylen_flag & AES_KEYLEN_192))
  83                        return 1;
  84                break;
  85        case 32:
  86                if (!(keylen_flag & AES_KEYLEN_256))
  87                        return 1;
  88                break;
  89        default:
  90                return -1;
  91                break;
  92        }
  93        return 0;
  94}
  95
  96static int setkey_fallback_cip(struct crypto_tfm *tfm, const u8 *in_key,
  97                unsigned int key_len)
  98{
  99        struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
 100        int ret;
 101
 102        sctx->fallback.cip->base.crt_flags &= ~CRYPTO_TFM_REQ_MASK;
 103        sctx->fallback.cip->base.crt_flags |= (tfm->crt_flags &
 104                        CRYPTO_TFM_REQ_MASK);
 105
 106        ret = crypto_cipher_setkey(sctx->fallback.cip, in_key, key_len);
 107        if (ret) {
 108                tfm->crt_flags &= ~CRYPTO_TFM_RES_MASK;
 109                tfm->crt_flags |= (sctx->fallback.cip->base.crt_flags &
 110                                CRYPTO_TFM_RES_MASK);
 111        }
 112        return ret;
 113}
 114
 115static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
 116                       unsigned int key_len)
 117{
 118        struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
 119        u32 *flags = &tfm->crt_flags;
 120        int ret;
 121
 122        ret = need_fallback(key_len);
 123        if (ret < 0) {
 124                *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
 125                return -EINVAL;
 126        }
 127
 128        sctx->key_len = key_len;
 129        if (!ret) {
 130                memcpy(sctx->key, in_key, key_len);
 131                return 0;
 132        }
 133
 134        return setkey_fallback_cip(tfm, in_key, key_len);
 135}
 136
 137static void aes_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
 138{
 139        struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
 140
 141        if (unlikely(need_fallback(sctx->key_len))) {
 142                crypto_cipher_encrypt_one(sctx->fallback.cip, out, in);
 143                return;
 144        }
 145
 146        switch (sctx->key_len) {
 147        case 16:
 148                crypt_s390_km(KM_AES_128_ENCRYPT, &sctx->key, out, in,
 149                              AES_BLOCK_SIZE);
 150                break;
 151        case 24:
 152                crypt_s390_km(KM_AES_192_ENCRYPT, &sctx->key, out, in,
 153                              AES_BLOCK_SIZE);
 154                break;
 155        case 32:
 156                crypt_s390_km(KM_AES_256_ENCRYPT, &sctx->key, out, in,
 157                              AES_BLOCK_SIZE);
 158                break;
 159        }
 160}
 161
 162static void aes_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
 163{
 164        struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
 165
 166        if (unlikely(need_fallback(sctx->key_len))) {
 167                crypto_cipher_decrypt_one(sctx->fallback.cip, out, in);
 168                return;
 169        }
 170
 171        switch (sctx->key_len) {
 172        case 16:
 173                crypt_s390_km(KM_AES_128_DECRYPT, &sctx->key, out, in,
 174                              AES_BLOCK_SIZE);
 175                break;
 176        case 24:
 177                crypt_s390_km(KM_AES_192_DECRYPT, &sctx->key, out, in,
 178                              AES_BLOCK_SIZE);
 179                break;
 180        case 32:
 181                crypt_s390_km(KM_AES_256_DECRYPT, &sctx->key, out, in,
 182                              AES_BLOCK_SIZE);
 183                break;
 184        }
 185}
 186
 187static int fallback_init_cip(struct crypto_tfm *tfm)
 188{
 189        const char *name = tfm->__crt_alg->cra_name;
 190        struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
 191
 192        sctx->fallback.cip = crypto_alloc_cipher(name, 0,
 193                        CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK);
 194
 195        if (IS_ERR(sctx->fallback.cip)) {
 196                pr_err("Allocating AES fallback algorithm %s failed\n",
 197                       name);
 198                return PTR_ERR(sctx->fallback.cip);
 199        }
 200
 201        return 0;
 202}
 203
 204static void fallback_exit_cip(struct crypto_tfm *tfm)
 205{
 206        struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
 207
 208        crypto_free_cipher(sctx->fallback.cip);
 209        sctx->fallback.cip = NULL;
 210}
 211
 212static struct crypto_alg aes_alg = {
 213        .cra_name               =       "aes",
 214        .cra_driver_name        =       "aes-s390",
 215        .cra_priority           =       CRYPT_S390_PRIORITY,
 216        .cra_flags              =       CRYPTO_ALG_TYPE_CIPHER |
 217                                        CRYPTO_ALG_NEED_FALLBACK,
 218        .cra_blocksize          =       AES_BLOCK_SIZE,
 219        .cra_ctxsize            =       sizeof(struct s390_aes_ctx),
 220        .cra_module             =       THIS_MODULE,
 221        .cra_init               =       fallback_init_cip,
 222        .cra_exit               =       fallback_exit_cip,
 223        .cra_u                  =       {
 224                .cipher = {
 225                        .cia_min_keysize        =       AES_MIN_KEY_SIZE,
 226                        .cia_max_keysize        =       AES_MAX_KEY_SIZE,
 227                        .cia_setkey             =       aes_set_key,
 228                        .cia_encrypt            =       aes_encrypt,
 229                        .cia_decrypt            =       aes_decrypt,
 230                }
 231        }
 232};
 233
 234static int setkey_fallback_blk(struct crypto_tfm *tfm, const u8 *key,
 235                unsigned int len)
 236{
 237        struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
 238        unsigned int ret;
 239
 240        sctx->fallback.blk->base.crt_flags &= ~CRYPTO_TFM_REQ_MASK;
 241        sctx->fallback.blk->base.crt_flags |= (tfm->crt_flags &
 242                        CRYPTO_TFM_REQ_MASK);
 243
 244        ret = crypto_blkcipher_setkey(sctx->fallback.blk, key, len);
 245        if (ret) {
 246                tfm->crt_flags &= ~CRYPTO_TFM_RES_MASK;
 247                tfm->crt_flags |= (sctx->fallback.blk->base.crt_flags &
 248                                CRYPTO_TFM_RES_MASK);
 249        }
 250        return ret;
 251}
 252
 253static int fallback_blk_dec(struct blkcipher_desc *desc,
 254                struct scatterlist *dst, struct scatterlist *src,
 255                unsigned int nbytes)
 256{
 257        unsigned int ret;
 258        struct crypto_blkcipher *tfm;
 259        struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
 260
 261        tfm = desc->tfm;
 262        desc->tfm = sctx->fallback.blk;
 263
 264        ret = crypto_blkcipher_decrypt_iv(desc, dst, src, nbytes);
 265
 266        desc->tfm = tfm;
 267        return ret;
 268}
 269
 270static int fallback_blk_enc(struct blkcipher_desc *desc,
 271                struct scatterlist *dst, struct scatterlist *src,
 272                unsigned int nbytes)
 273{
 274        unsigned int ret;
 275        struct crypto_blkcipher *tfm;
 276        struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
 277
 278        tfm = desc->tfm;
 279        desc->tfm = sctx->fallback.blk;
 280
 281        ret = crypto_blkcipher_encrypt_iv(desc, dst, src, nbytes);
 282
 283        desc->tfm = tfm;
 284        return ret;
 285}
 286
 287static int ecb_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
 288                           unsigned int key_len)
 289{
 290        struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
 291        int ret;
 292
 293        ret = need_fallback(key_len);
 294        if (ret > 0) {
 295                sctx->key_len = key_len;
 296                return setkey_fallback_blk(tfm, in_key, key_len);
 297        }
 298
 299        switch (key_len) {
 300        case 16:
 301                sctx->enc = KM_AES_128_ENCRYPT;
 302                sctx->dec = KM_AES_128_DECRYPT;
 303                break;
 304        case 24:
 305                sctx->enc = KM_AES_192_ENCRYPT;
 306                sctx->dec = KM_AES_192_DECRYPT;
 307                break;
 308        case 32:
 309                sctx->enc = KM_AES_256_ENCRYPT;
 310                sctx->dec = KM_AES_256_DECRYPT;
 311                break;
 312        }
 313
 314        return aes_set_key(tfm, in_key, key_len);
 315}
 316
 317static int ecb_aes_crypt(struct blkcipher_desc *desc, long func, void *param,
 318                         struct blkcipher_walk *walk)
 319{
 320        int ret = blkcipher_walk_virt(desc, walk);
 321        unsigned int nbytes;
 322
 323        while ((nbytes = walk->nbytes)) {
 324                /* only use complete blocks */
 325                unsigned int n = nbytes & ~(AES_BLOCK_SIZE - 1);
 326                u8 *out = walk->dst.virt.addr;
 327                u8 *in = walk->src.virt.addr;
 328
 329                ret = crypt_s390_km(func, param, out, in, n);
 330                if (ret < 0 || ret != n)
 331                        return -EIO;
 332
 333                nbytes &= AES_BLOCK_SIZE - 1;
 334                ret = blkcipher_walk_done(desc, walk, nbytes);
 335        }
 336
 337        return ret;
 338}
 339
 340static int ecb_aes_encrypt(struct blkcipher_desc *desc,
 341                           struct scatterlist *dst, struct scatterlist *src,
 342                           unsigned int nbytes)
 343{
 344        struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
 345        struct blkcipher_walk walk;
 346
 347        if (unlikely(need_fallback(sctx->key_len)))
 348                return fallback_blk_enc(desc, dst, src, nbytes);
 349
 350        blkcipher_walk_init(&walk, dst, src, nbytes);
 351        return ecb_aes_crypt(desc, sctx->enc, sctx->key, &walk);
 352}
 353
 354static int ecb_aes_decrypt(struct blkcipher_desc *desc,
 355                           struct scatterlist *dst, struct scatterlist *src,
 356                           unsigned int nbytes)
 357{
 358        struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
 359        struct blkcipher_walk walk;
 360
 361        if (unlikely(need_fallback(sctx->key_len)))
 362                return fallback_blk_dec(desc, dst, src, nbytes);
 363
 364        blkcipher_walk_init(&walk, dst, src, nbytes);
 365        return ecb_aes_crypt(desc, sctx->dec, sctx->key, &walk);
 366}
 367
 368static int fallback_init_blk(struct crypto_tfm *tfm)
 369{
 370        const char *name = tfm->__crt_alg->cra_name;
 371        struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
 372
 373        sctx->fallback.blk = crypto_alloc_blkcipher(name, 0,
 374                        CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK);
 375
 376        if (IS_ERR(sctx->fallback.blk)) {
 377                pr_err("Allocating AES fallback algorithm %s failed\n",
 378                       name);
 379                return PTR_ERR(sctx->fallback.blk);
 380        }
 381
 382        return 0;
 383}
 384
 385static void fallback_exit_blk(struct crypto_tfm *tfm)
 386{
 387        struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
 388
 389        crypto_free_blkcipher(sctx->fallback.blk);
 390        sctx->fallback.blk = NULL;
 391}
 392
 393static struct crypto_alg ecb_aes_alg = {
 394        .cra_name               =       "ecb(aes)",
 395        .cra_driver_name        =       "ecb-aes-s390",
 396        .cra_priority           =       CRYPT_S390_COMPOSITE_PRIORITY,
 397        .cra_flags              =       CRYPTO_ALG_TYPE_BLKCIPHER |
 398                                        CRYPTO_ALG_NEED_FALLBACK,
 399        .cra_blocksize          =       AES_BLOCK_SIZE,
 400        .cra_ctxsize            =       sizeof(struct s390_aes_ctx),
 401        .cra_type               =       &crypto_blkcipher_type,
 402        .cra_module             =       THIS_MODULE,
 403        .cra_init               =       fallback_init_blk,
 404        .cra_exit               =       fallback_exit_blk,
 405        .cra_u                  =       {
 406                .blkcipher = {
 407                        .min_keysize            =       AES_MIN_KEY_SIZE,
 408                        .max_keysize            =       AES_MAX_KEY_SIZE,
 409                        .setkey                 =       ecb_aes_set_key,
 410                        .encrypt                =       ecb_aes_encrypt,
 411                        .decrypt                =       ecb_aes_decrypt,
 412                }
 413        }
 414};
 415
 416static int cbc_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
 417                           unsigned int key_len)
 418{
 419        struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
 420        int ret;
 421
 422        ret = need_fallback(key_len);
 423        if (ret > 0) {
 424                sctx->key_len = key_len;
 425                return setkey_fallback_blk(tfm, in_key, key_len);
 426        }
 427
 428        switch (key_len) {
 429        case 16:
 430                sctx->enc = KMC_AES_128_ENCRYPT;
 431                sctx->dec = KMC_AES_128_DECRYPT;
 432                break;
 433        case 24:
 434                sctx->enc = KMC_AES_192_ENCRYPT;
 435                sctx->dec = KMC_AES_192_DECRYPT;
 436                break;
 437        case 32:
 438                sctx->enc = KMC_AES_256_ENCRYPT;
 439                sctx->dec = KMC_AES_256_DECRYPT;
 440                break;
 441        }
 442
 443        return aes_set_key(tfm, in_key, key_len);
 444}
 445
 446static int cbc_aes_crypt(struct blkcipher_desc *desc, long func,
 447                         struct blkcipher_walk *walk)
 448{
 449        struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
 450        int ret = blkcipher_walk_virt(desc, walk);
 451        unsigned int nbytes = walk->nbytes;
 452        struct {
 453                u8 iv[AES_BLOCK_SIZE];
 454                u8 key[AES_MAX_KEY_SIZE];
 455        } param;
 456
 457        if (!nbytes)
 458                goto out;
 459
 460        memcpy(param.iv, walk->iv, AES_BLOCK_SIZE);
 461        memcpy(param.key, sctx->key, sctx->key_len);
 462        do {
 463                /* only use complete blocks */
 464                unsigned int n = nbytes & ~(AES_BLOCK_SIZE - 1);
 465                u8 *out = walk->dst.virt.addr;
 466                u8 *in = walk->src.virt.addr;
 467
 468                ret = crypt_s390_kmc(func, &param, out, in, n);
 469                if (ret < 0 || ret != n)
 470                        return -EIO;
 471
 472                nbytes &= AES_BLOCK_SIZE - 1;
 473                ret = blkcipher_walk_done(desc, walk, nbytes);
 474        } while ((nbytes = walk->nbytes));
 475        memcpy(walk->iv, param.iv, AES_BLOCK_SIZE);
 476
 477out:
 478        return ret;
 479}
 480
 481static int cbc_aes_encrypt(struct blkcipher_desc *desc,
 482                           struct scatterlist *dst, struct scatterlist *src,
 483                           unsigned int nbytes)
 484{
 485        struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
 486        struct blkcipher_walk walk;
 487
 488        if (unlikely(need_fallback(sctx->key_len)))
 489                return fallback_blk_enc(desc, dst, src, nbytes);
 490
 491        blkcipher_walk_init(&walk, dst, src, nbytes);
 492        return cbc_aes_crypt(desc, sctx->enc, &walk);
 493}
 494
 495static int cbc_aes_decrypt(struct blkcipher_desc *desc,
 496                           struct scatterlist *dst, struct scatterlist *src,
 497                           unsigned int nbytes)
 498{
 499        struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
 500        struct blkcipher_walk walk;
 501
 502        if (unlikely(need_fallback(sctx->key_len)))
 503                return fallback_blk_dec(desc, dst, src, nbytes);
 504
 505        blkcipher_walk_init(&walk, dst, src, nbytes);
 506        return cbc_aes_crypt(desc, sctx->dec, &walk);
 507}
 508
 509static struct crypto_alg cbc_aes_alg = {
 510        .cra_name               =       "cbc(aes)",
 511        .cra_driver_name        =       "cbc-aes-s390",
 512        .cra_priority           =       CRYPT_S390_COMPOSITE_PRIORITY,
 513        .cra_flags              =       CRYPTO_ALG_TYPE_BLKCIPHER |
 514                                        CRYPTO_ALG_NEED_FALLBACK,
 515        .cra_blocksize          =       AES_BLOCK_SIZE,
 516        .cra_ctxsize            =       sizeof(struct s390_aes_ctx),
 517        .cra_type               =       &crypto_blkcipher_type,
 518        .cra_module             =       THIS_MODULE,
 519        .cra_init               =       fallback_init_blk,
 520        .cra_exit               =       fallback_exit_blk,
 521        .cra_u                  =       {
 522                .blkcipher = {
 523                        .min_keysize            =       AES_MIN_KEY_SIZE,
 524                        .max_keysize            =       AES_MAX_KEY_SIZE,
 525                        .ivsize                 =       AES_BLOCK_SIZE,
 526                        .setkey                 =       cbc_aes_set_key,
 527                        .encrypt                =       cbc_aes_encrypt,
 528                        .decrypt                =       cbc_aes_decrypt,
 529                }
 530        }
 531};
 532
 533static int xts_fallback_setkey(struct crypto_tfm *tfm, const u8 *key,
 534                                   unsigned int len)
 535{
 536        struct s390_xts_ctx *xts_ctx = crypto_tfm_ctx(tfm);
 537        unsigned int ret;
 538
 539        xts_ctx->fallback->base.crt_flags &= ~CRYPTO_TFM_REQ_MASK;
 540        xts_ctx->fallback->base.crt_flags |= (tfm->crt_flags &
 541                        CRYPTO_TFM_REQ_MASK);
 542
 543        ret = crypto_blkcipher_setkey(xts_ctx->fallback, key, len);
 544        if (ret) {
 545                tfm->crt_flags &= ~CRYPTO_TFM_RES_MASK;
 546                tfm->crt_flags |= (xts_ctx->fallback->base.crt_flags &
 547                                CRYPTO_TFM_RES_MASK);
 548        }
 549        return ret;
 550}
 551
 552static int xts_fallback_decrypt(struct blkcipher_desc *desc,
 553                struct scatterlist *dst, struct scatterlist *src,
 554                unsigned int nbytes)
 555{
 556        struct s390_xts_ctx *xts_ctx = crypto_blkcipher_ctx(desc->tfm);
 557        struct crypto_blkcipher *tfm;
 558        unsigned int ret;
 559
 560        tfm = desc->tfm;
 561        desc->tfm = xts_ctx->fallback;
 562
 563        ret = crypto_blkcipher_decrypt_iv(desc, dst, src, nbytes);
 564
 565        desc->tfm = tfm;
 566        return ret;
 567}
 568
 569static int xts_fallback_encrypt(struct blkcipher_desc *desc,
 570                struct scatterlist *dst, struct scatterlist *src,
 571                unsigned int nbytes)
 572{
 573        struct s390_xts_ctx *xts_ctx = crypto_blkcipher_ctx(desc->tfm);
 574        struct crypto_blkcipher *tfm;
 575        unsigned int ret;
 576
 577        tfm = desc->tfm;
 578        desc->tfm = xts_ctx->fallback;
 579
 580        ret = crypto_blkcipher_encrypt_iv(desc, dst, src, nbytes);
 581
 582        desc->tfm = tfm;
 583        return ret;
 584}
 585
 586static int xts_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
 587                           unsigned int key_len)
 588{
 589        struct s390_xts_ctx *xts_ctx = crypto_tfm_ctx(tfm);
 590        u32 *flags = &tfm->crt_flags;
 591        int err;
 592
 593        err = xts_check_key(tfm, in_key, key_len);
 594        if (err)
 595                return err;
 596
 597        switch (key_len) {
 598        case 32:
 599                xts_ctx->enc = KM_XTS_128_ENCRYPT;
 600                xts_ctx->dec = KM_XTS_128_DECRYPT;
 601                memcpy(xts_ctx->key + 16, in_key, 16);
 602                memcpy(xts_ctx->pcc_key + 16, in_key + 16, 16);
 603                break;
 604        case 48:
 605                xts_ctx->enc = 0;
 606                xts_ctx->dec = 0;
 607                xts_fallback_setkey(tfm, in_key, key_len);
 608                break;
 609        case 64:
 610                xts_ctx->enc = KM_XTS_256_ENCRYPT;
 611                xts_ctx->dec = KM_XTS_256_DECRYPT;
 612                memcpy(xts_ctx->key, in_key, 32);
 613                memcpy(xts_ctx->pcc_key, in_key + 32, 32);
 614                break;
 615        default:
 616                *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
 617                return -EINVAL;
 618        }
 619        xts_ctx->key_len = key_len;
 620        return 0;
 621}
 622
 623static int xts_aes_crypt(struct blkcipher_desc *desc, long func,
 624                         struct s390_xts_ctx *xts_ctx,
 625                         struct blkcipher_walk *walk)
 626{
 627        unsigned int offset = (xts_ctx->key_len >> 1) & 0x10;
 628        int ret = blkcipher_walk_virt(desc, walk);
 629        unsigned int nbytes = walk->nbytes;
 630        unsigned int n;
 631        u8 *in, *out;
 632        struct pcc_param pcc_param;
 633        struct {
 634                u8 key[32];
 635                u8 init[16];
 636        } xts_param;
 637
 638        if (!nbytes)
 639                goto out;
 640
 641        memset(pcc_param.block, 0, sizeof(pcc_param.block));
 642        memset(pcc_param.bit, 0, sizeof(pcc_param.bit));
 643        memset(pcc_param.xts, 0, sizeof(pcc_param.xts));
 644        memcpy(pcc_param.tweak, walk->iv, sizeof(pcc_param.tweak));
 645        memcpy(pcc_param.key, xts_ctx->pcc_key, 32);
 646        ret = crypt_s390_pcc(func, &pcc_param.key[offset]);
 647        if (ret < 0)
 648                return -EIO;
 649
 650        memcpy(xts_param.key, xts_ctx->key, 32);
 651        memcpy(xts_param.init, pcc_param.xts, 16);
 652        do {
 653                /* only use complete blocks */
 654                n = nbytes & ~(AES_BLOCK_SIZE - 1);
 655                out = walk->dst.virt.addr;
 656                in = walk->src.virt.addr;
 657
 658                ret = crypt_s390_km(func, &xts_param.key[offset], out, in, n);
 659                if (ret < 0 || ret != n)
 660                        return -EIO;
 661
 662                nbytes &= AES_BLOCK_SIZE - 1;
 663                ret = blkcipher_walk_done(desc, walk, nbytes);
 664        } while ((nbytes = walk->nbytes));
 665out:
 666        return ret;
 667}
 668
 669static int xts_aes_encrypt(struct blkcipher_desc *desc,
 670                           struct scatterlist *dst, struct scatterlist *src,
 671                           unsigned int nbytes)
 672{
 673        struct s390_xts_ctx *xts_ctx = crypto_blkcipher_ctx(desc->tfm);
 674        struct blkcipher_walk walk;
 675
 676        if (unlikely(xts_ctx->key_len == 48))
 677                return xts_fallback_encrypt(desc, dst, src, nbytes);
 678
 679        blkcipher_walk_init(&walk, dst, src, nbytes);
 680        return xts_aes_crypt(desc, xts_ctx->enc, xts_ctx, &walk);
 681}
 682
 683static int xts_aes_decrypt(struct blkcipher_desc *desc,
 684                           struct scatterlist *dst, struct scatterlist *src,
 685                           unsigned int nbytes)
 686{
 687        struct s390_xts_ctx *xts_ctx = crypto_blkcipher_ctx(desc->tfm);
 688        struct blkcipher_walk walk;
 689
 690        if (unlikely(xts_ctx->key_len == 48))
 691                return xts_fallback_decrypt(desc, dst, src, nbytes);
 692
 693        blkcipher_walk_init(&walk, dst, src, nbytes);
 694        return xts_aes_crypt(desc, xts_ctx->dec, xts_ctx, &walk);
 695}
 696
 697static int xts_fallback_init(struct crypto_tfm *tfm)
 698{
 699        const char *name = tfm->__crt_alg->cra_name;
 700        struct s390_xts_ctx *xts_ctx = crypto_tfm_ctx(tfm);
 701
 702        xts_ctx->fallback = crypto_alloc_blkcipher(name, 0,
 703                        CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK);
 704
 705        if (IS_ERR(xts_ctx->fallback)) {
 706                pr_err("Allocating XTS fallback algorithm %s failed\n",
 707                       name);
 708                return PTR_ERR(xts_ctx->fallback);
 709        }
 710        return 0;
 711}
 712
 713static void xts_fallback_exit(struct crypto_tfm *tfm)
 714{
 715        struct s390_xts_ctx *xts_ctx = crypto_tfm_ctx(tfm);
 716
 717        crypto_free_blkcipher(xts_ctx->fallback);
 718        xts_ctx->fallback = NULL;
 719}
 720
 721static struct crypto_alg xts_aes_alg = {
 722        .cra_name               =       "xts(aes)",
 723        .cra_driver_name        =       "xts-aes-s390",
 724        .cra_priority           =       CRYPT_S390_COMPOSITE_PRIORITY,
 725        .cra_flags              =       CRYPTO_ALG_TYPE_BLKCIPHER |
 726                                        CRYPTO_ALG_NEED_FALLBACK,
 727        .cra_blocksize          =       AES_BLOCK_SIZE,
 728        .cra_ctxsize            =       sizeof(struct s390_xts_ctx),
 729        .cra_type               =       &crypto_blkcipher_type,
 730        .cra_module             =       THIS_MODULE,
 731        .cra_init               =       xts_fallback_init,
 732        .cra_exit               =       xts_fallback_exit,
 733        .cra_u                  =       {
 734                .blkcipher = {
 735                        .min_keysize            =       2 * AES_MIN_KEY_SIZE,
 736                        .max_keysize            =       2 * AES_MAX_KEY_SIZE,
 737                        .ivsize                 =       AES_BLOCK_SIZE,
 738                        .setkey                 =       xts_aes_set_key,
 739                        .encrypt                =       xts_aes_encrypt,
 740                        .decrypt                =       xts_aes_decrypt,
 741                }
 742        }
 743};
 744
 745static int xts_aes_alg_reg;
 746
 747static int ctr_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
 748                           unsigned int key_len)
 749{
 750        struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
 751
 752        switch (key_len) {
 753        case 16:
 754                sctx->enc = KMCTR_AES_128_ENCRYPT;
 755                sctx->dec = KMCTR_AES_128_DECRYPT;
 756                break;
 757        case 24:
 758                sctx->enc = KMCTR_AES_192_ENCRYPT;
 759                sctx->dec = KMCTR_AES_192_DECRYPT;
 760                break;
 761        case 32:
 762                sctx->enc = KMCTR_AES_256_ENCRYPT;
 763                sctx->dec = KMCTR_AES_256_DECRYPT;
 764                break;
 765        }
 766
 767        return aes_set_key(tfm, in_key, key_len);
 768}
 769
 770static unsigned int __ctrblk_init(u8 *ctrptr, unsigned int nbytes)
 771{
 772        unsigned int i, n;
 773
 774        /* only use complete blocks, max. PAGE_SIZE */
 775        n = (nbytes > PAGE_SIZE) ? PAGE_SIZE : nbytes & ~(AES_BLOCK_SIZE - 1);
 776        for (i = AES_BLOCK_SIZE; i < n; i += AES_BLOCK_SIZE) {
 777                memcpy(ctrptr + i, ctrptr + i - AES_BLOCK_SIZE,
 778                       AES_BLOCK_SIZE);
 779                crypto_inc(ctrptr + i, AES_BLOCK_SIZE);
 780        }
 781        return n;
 782}
 783
 784static int ctr_aes_crypt(struct blkcipher_desc *desc, long func,
 785                         struct s390_aes_ctx *sctx, struct blkcipher_walk *walk)
 786{
 787        int ret = blkcipher_walk_virt_block(desc, walk, AES_BLOCK_SIZE);
 788        unsigned int n, nbytes;
 789        u8 buf[AES_BLOCK_SIZE], ctrbuf[AES_BLOCK_SIZE];
 790        u8 *out, *in, *ctrptr = ctrbuf;
 791
 792        if (!walk->nbytes)
 793                return ret;
 794
 795        if (spin_trylock(&ctrblk_lock))
 796                ctrptr = ctrblk;
 797
 798        memcpy(ctrptr, walk->iv, AES_BLOCK_SIZE);
 799        while ((nbytes = walk->nbytes) >= AES_BLOCK_SIZE) {
 800                out = walk->dst.virt.addr;
 801                in = walk->src.virt.addr;
 802                while (nbytes >= AES_BLOCK_SIZE) {
 803                        if (ctrptr == ctrblk)
 804                                n = __ctrblk_init(ctrptr, nbytes);
 805                        else
 806                                n = AES_BLOCK_SIZE;
 807                        ret = crypt_s390_kmctr(func, sctx->key, out, in,
 808                                               n, ctrptr);
 809                        if (ret < 0 || ret != n) {
 810                                if (ctrptr == ctrblk)
 811                                        spin_unlock(&ctrblk_lock);
 812                                return -EIO;
 813                        }
 814                        if (n > AES_BLOCK_SIZE)
 815                                memcpy(ctrptr, ctrptr + n - AES_BLOCK_SIZE,
 816                                       AES_BLOCK_SIZE);
 817                        crypto_inc(ctrptr, AES_BLOCK_SIZE);
 818                        out += n;
 819                        in += n;
 820                        nbytes -= n;
 821                }
 822                ret = blkcipher_walk_done(desc, walk, nbytes);
 823        }
 824        if (ctrptr == ctrblk) {
 825                if (nbytes)
 826                        memcpy(ctrbuf, ctrptr, AES_BLOCK_SIZE);
 827                else
 828                        memcpy(walk->iv, ctrptr, AES_BLOCK_SIZE);
 829                spin_unlock(&ctrblk_lock);
 830        } else {
 831                if (!nbytes)
 832                        memcpy(walk->iv, ctrptr, AES_BLOCK_SIZE);
 833        }
 834        /*
 835         * final block may be < AES_BLOCK_SIZE, copy only nbytes
 836         */
 837        if (nbytes) {
 838                out = walk->dst.virt.addr;
 839                in = walk->src.virt.addr;
 840                ret = crypt_s390_kmctr(func, sctx->key, buf, in,
 841                                       AES_BLOCK_SIZE, ctrbuf);
 842                if (ret < 0 || ret != AES_BLOCK_SIZE)
 843                        return -EIO;
 844                memcpy(out, buf, nbytes);
 845                crypto_inc(ctrbuf, AES_BLOCK_SIZE);
 846                ret = blkcipher_walk_done(desc, walk, 0);
 847                memcpy(walk->iv, ctrbuf, AES_BLOCK_SIZE);
 848        }
 849
 850        return ret;
 851}
 852
 853static int ctr_aes_encrypt(struct blkcipher_desc *desc,
 854                           struct scatterlist *dst, struct scatterlist *src,
 855                           unsigned int nbytes)
 856{
 857        struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
 858        struct blkcipher_walk walk;
 859
 860        blkcipher_walk_init(&walk, dst, src, nbytes);
 861        return ctr_aes_crypt(desc, sctx->enc, sctx, &walk);
 862}
 863
 864static int ctr_aes_decrypt(struct blkcipher_desc *desc,
 865                           struct scatterlist *dst, struct scatterlist *src,
 866                           unsigned int nbytes)
 867{
 868        struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
 869        struct blkcipher_walk walk;
 870
 871        blkcipher_walk_init(&walk, dst, src, nbytes);
 872        return ctr_aes_crypt(desc, sctx->dec, sctx, &walk);
 873}
 874
 875static struct crypto_alg ctr_aes_alg = {
 876        .cra_name               =       "ctr(aes)",
 877        .cra_driver_name        =       "ctr-aes-s390",
 878        .cra_priority           =       CRYPT_S390_COMPOSITE_PRIORITY,
 879        .cra_flags              =       CRYPTO_ALG_TYPE_BLKCIPHER,
 880        .cra_blocksize          =       1,
 881        .cra_ctxsize            =       sizeof(struct s390_aes_ctx),
 882        .cra_type               =       &crypto_blkcipher_type,
 883        .cra_module             =       THIS_MODULE,
 884        .cra_u                  =       {
 885                .blkcipher = {
 886                        .min_keysize            =       AES_MIN_KEY_SIZE,
 887                        .max_keysize            =       AES_MAX_KEY_SIZE,
 888                        .ivsize                 =       AES_BLOCK_SIZE,
 889                        .setkey                 =       ctr_aes_set_key,
 890                        .encrypt                =       ctr_aes_encrypt,
 891                        .decrypt                =       ctr_aes_decrypt,
 892                }
 893        }
 894};
 895
 896static int ctr_aes_alg_reg;
 897
 898static int __init aes_s390_init(void)
 899{
 900        int ret;
 901
 902        if (crypt_s390_func_available(KM_AES_128_ENCRYPT, CRYPT_S390_MSA))
 903                keylen_flag |= AES_KEYLEN_128;
 904        if (crypt_s390_func_available(KM_AES_192_ENCRYPT, CRYPT_S390_MSA))
 905                keylen_flag |= AES_KEYLEN_192;
 906        if (crypt_s390_func_available(KM_AES_256_ENCRYPT, CRYPT_S390_MSA))
 907                keylen_flag |= AES_KEYLEN_256;
 908
 909        if (!keylen_flag)
 910                return -EOPNOTSUPP;
 911
 912        /* z9 109 and z9 BC/EC only support 128 bit key length */
 913        if (keylen_flag == AES_KEYLEN_128)
 914                pr_info("AES hardware acceleration is only available for"
 915                        " 128-bit keys\n");
 916
 917        ret = crypto_register_alg(&aes_alg);
 918        if (ret)
 919                goto aes_err;
 920
 921        ret = crypto_register_alg(&ecb_aes_alg);
 922        if (ret)
 923                goto ecb_aes_err;
 924
 925        ret = crypto_register_alg(&cbc_aes_alg);
 926        if (ret)
 927                goto cbc_aes_err;
 928
 929        if (crypt_s390_func_available(KM_XTS_128_ENCRYPT,
 930                        CRYPT_S390_MSA | CRYPT_S390_MSA4) &&
 931            crypt_s390_func_available(KM_XTS_256_ENCRYPT,
 932                        CRYPT_S390_MSA | CRYPT_S390_MSA4)) {
 933                ret = crypto_register_alg(&xts_aes_alg);
 934                if (ret)
 935                        goto xts_aes_err;
 936                xts_aes_alg_reg = 1;
 937        }
 938
 939        if (crypt_s390_func_available(KMCTR_AES_128_ENCRYPT,
 940                                CRYPT_S390_MSA | CRYPT_S390_MSA4) &&
 941            crypt_s390_func_available(KMCTR_AES_192_ENCRYPT,
 942                                CRYPT_S390_MSA | CRYPT_S390_MSA4) &&
 943            crypt_s390_func_available(KMCTR_AES_256_ENCRYPT,
 944                                CRYPT_S390_MSA | CRYPT_S390_MSA4)) {
 945                ctrblk = (u8 *) __get_free_page(GFP_KERNEL);
 946                if (!ctrblk) {
 947                        ret = -ENOMEM;
 948                        goto ctr_aes_err;
 949                }
 950                ret = crypto_register_alg(&ctr_aes_alg);
 951                if (ret) {
 952                        free_page((unsigned long) ctrblk);
 953                        goto ctr_aes_err;
 954                }
 955                ctr_aes_alg_reg = 1;
 956        }
 957
 958out:
 959        return ret;
 960
 961ctr_aes_err:
 962        crypto_unregister_alg(&xts_aes_alg);
 963xts_aes_err:
 964        crypto_unregister_alg(&cbc_aes_alg);
 965cbc_aes_err:
 966        crypto_unregister_alg(&ecb_aes_alg);
 967ecb_aes_err:
 968        crypto_unregister_alg(&aes_alg);
 969aes_err:
 970        goto out;
 971}
 972
 973static void __exit aes_s390_fini(void)
 974{
 975        if (ctr_aes_alg_reg) {
 976                crypto_unregister_alg(&ctr_aes_alg);
 977                free_page((unsigned long) ctrblk);
 978        }
 979        if (xts_aes_alg_reg)
 980                crypto_unregister_alg(&xts_aes_alg);
 981        crypto_unregister_alg(&cbc_aes_alg);
 982        crypto_unregister_alg(&ecb_aes_alg);
 983        crypto_unregister_alg(&aes_alg);
 984}
 985
 986module_cpu_feature_match(MSA, aes_s390_init);
 987module_exit(aes_s390_fini);
 988
 989MODULE_ALIAS_CRYPTO("aes-all");
 990
 991MODULE_DESCRIPTION("Rijndael (AES) Cipher Algorithm");
 992MODULE_LICENSE("GPL");
 993