linux/arch/s390/crypto/aes_s390.c
<<
>>
Prefs
   1/*
   2 * Cryptographic API.
   3 *
   4 * s390 implementation of the AES Cipher Algorithm.
   5 *
   6 * s390 Version:
   7 *   Copyright IBM Corp. 2005, 2007
   8 *   Author(s): Jan Glauber (jang@de.ibm.com)
   9 *              Sebastian Siewior (sebastian@breakpoint.cc> SW-Fallback
  10 *
  11 * Derived from "crypto/aes_generic.c"
  12 *
  13 * This program is free software; you can redistribute it and/or modify it
  14 * under the terms of the GNU General Public License as published by the Free
  15 * Software Foundation; either version 2 of the License, or (at your option)
  16 * any later version.
  17 *
  18 */
  19
  20#define KMSG_COMPONENT "aes_s390"
  21#define pr_fmt(fmt) KMSG_COMPONENT ": " fmt
  22
  23#include <crypto/aes.h>
  24#include <crypto/algapi.h>
  25#include <linux/err.h>
  26#include <linux/module.h>
  27#include <linux/init.h>
  28#include "crypt_s390.h"
  29
  30#define AES_KEYLEN_128          1
  31#define AES_KEYLEN_192          2
  32#define AES_KEYLEN_256          4
  33
  34static u8 *ctrblk;
  35static char keylen_flag;
  36
  37struct s390_aes_ctx {
  38        u8 iv[AES_BLOCK_SIZE];
  39        u8 key[AES_MAX_KEY_SIZE];
  40        long enc;
  41        long dec;
  42        int key_len;
  43        union {
  44                struct crypto_blkcipher *blk;
  45                struct crypto_cipher *cip;
  46        } fallback;
  47};
  48
  49struct pcc_param {
  50        u8 key[32];
  51        u8 tweak[16];
  52        u8 block[16];
  53        u8 bit[16];
  54        u8 xts[16];
  55};
  56
  57struct s390_xts_ctx {
  58        u8 key[32];
  59        u8 xts_param[16];
  60        struct pcc_param pcc;
  61        long enc;
  62        long dec;
  63        int key_len;
  64        struct crypto_blkcipher *fallback;
  65};
  66
  67/*
  68 * Check if the key_len is supported by the HW.
  69 * Returns 0 if it is, a positive number if it is not and software fallback is
  70 * required or a negative number in case the key size is not valid
  71 */
  72static int need_fallback(unsigned int key_len)
  73{
  74        switch (key_len) {
  75        case 16:
  76                if (!(keylen_flag & AES_KEYLEN_128))
  77                        return 1;
  78                break;
  79        case 24:
  80                if (!(keylen_flag & AES_KEYLEN_192))
  81                        return 1;
  82                break;
  83        case 32:
  84                if (!(keylen_flag & AES_KEYLEN_256))
  85                        return 1;
  86                break;
  87        default:
  88                return -1;
  89                break;
  90        }
  91        return 0;
  92}
  93
  94static int setkey_fallback_cip(struct crypto_tfm *tfm, const u8 *in_key,
  95                unsigned int key_len)
  96{
  97        struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
  98        int ret;
  99
 100        sctx->fallback.cip->base.crt_flags &= ~CRYPTO_TFM_REQ_MASK;
 101        sctx->fallback.cip->base.crt_flags |= (tfm->crt_flags &
 102                        CRYPTO_TFM_REQ_MASK);
 103
 104        ret = crypto_cipher_setkey(sctx->fallback.cip, in_key, key_len);
 105        if (ret) {
 106                tfm->crt_flags &= ~CRYPTO_TFM_RES_MASK;
 107                tfm->crt_flags |= (sctx->fallback.cip->base.crt_flags &
 108                                CRYPTO_TFM_RES_MASK);
 109        }
 110        return ret;
 111}
 112
 113static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
 114                       unsigned int key_len)
 115{
 116        struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
 117        u32 *flags = &tfm->crt_flags;
 118        int ret;
 119
 120        ret = need_fallback(key_len);
 121        if (ret < 0) {
 122                *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
 123                return -EINVAL;
 124        }
 125
 126        sctx->key_len = key_len;
 127        if (!ret) {
 128                memcpy(sctx->key, in_key, key_len);
 129                return 0;
 130        }
 131
 132        return setkey_fallback_cip(tfm, in_key, key_len);
 133}
 134
 135static void aes_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
 136{
 137        const struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
 138
 139        if (unlikely(need_fallback(sctx->key_len))) {
 140                crypto_cipher_encrypt_one(sctx->fallback.cip, out, in);
 141                return;
 142        }
 143
 144        switch (sctx->key_len) {
 145        case 16:
 146                crypt_s390_km(KM_AES_128_ENCRYPT, &sctx->key, out, in,
 147                              AES_BLOCK_SIZE);
 148                break;
 149        case 24:
 150                crypt_s390_km(KM_AES_192_ENCRYPT, &sctx->key, out, in,
 151                              AES_BLOCK_SIZE);
 152                break;
 153        case 32:
 154                crypt_s390_km(KM_AES_256_ENCRYPT, &sctx->key, out, in,
 155                              AES_BLOCK_SIZE);
 156                break;
 157        }
 158}
 159
 160static void aes_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
 161{
 162        const struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
 163
 164        if (unlikely(need_fallback(sctx->key_len))) {
 165                crypto_cipher_decrypt_one(sctx->fallback.cip, out, in);
 166                return;
 167        }
 168
 169        switch (sctx->key_len) {
 170        case 16:
 171                crypt_s390_km(KM_AES_128_DECRYPT, &sctx->key, out, in,
 172                              AES_BLOCK_SIZE);
 173                break;
 174        case 24:
 175                crypt_s390_km(KM_AES_192_DECRYPT, &sctx->key, out, in,
 176                              AES_BLOCK_SIZE);
 177                break;
 178        case 32:
 179                crypt_s390_km(KM_AES_256_DECRYPT, &sctx->key, out, in,
 180                              AES_BLOCK_SIZE);
 181                break;
 182        }
 183}
 184
 185static int fallback_init_cip(struct crypto_tfm *tfm)
 186{
 187        const char *name = tfm->__crt_alg->cra_name;
 188        struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
 189
 190        sctx->fallback.cip = crypto_alloc_cipher(name, 0,
 191                        CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK);
 192
 193        if (IS_ERR(sctx->fallback.cip)) {
 194                pr_err("Allocating AES fallback algorithm %s failed\n",
 195                       name);
 196                return PTR_ERR(sctx->fallback.cip);
 197        }
 198
 199        return 0;
 200}
 201
 202static void fallback_exit_cip(struct crypto_tfm *tfm)
 203{
 204        struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
 205
 206        crypto_free_cipher(sctx->fallback.cip);
 207        sctx->fallback.cip = NULL;
 208}
 209
 210static struct crypto_alg aes_alg = {
 211        .cra_name               =       "aes",
 212        .cra_driver_name        =       "aes-s390",
 213        .cra_priority           =       CRYPT_S390_PRIORITY,
 214        .cra_flags              =       CRYPTO_ALG_TYPE_CIPHER |
 215                                        CRYPTO_ALG_NEED_FALLBACK,
 216        .cra_blocksize          =       AES_BLOCK_SIZE,
 217        .cra_ctxsize            =       sizeof(struct s390_aes_ctx),
 218        .cra_module             =       THIS_MODULE,
 219        .cra_init               =       fallback_init_cip,
 220        .cra_exit               =       fallback_exit_cip,
 221        .cra_u                  =       {
 222                .cipher = {
 223                        .cia_min_keysize        =       AES_MIN_KEY_SIZE,
 224                        .cia_max_keysize        =       AES_MAX_KEY_SIZE,
 225                        .cia_setkey             =       aes_set_key,
 226                        .cia_encrypt            =       aes_encrypt,
 227                        .cia_decrypt            =       aes_decrypt,
 228                }
 229        }
 230};
 231
 232static int setkey_fallback_blk(struct crypto_tfm *tfm, const u8 *key,
 233                unsigned int len)
 234{
 235        struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
 236        unsigned int ret;
 237
 238        sctx->fallback.blk->base.crt_flags &= ~CRYPTO_TFM_REQ_MASK;
 239        sctx->fallback.blk->base.crt_flags |= (tfm->crt_flags &
 240                        CRYPTO_TFM_REQ_MASK);
 241
 242        ret = crypto_blkcipher_setkey(sctx->fallback.blk, key, len);
 243        if (ret) {
 244                tfm->crt_flags &= ~CRYPTO_TFM_RES_MASK;
 245                tfm->crt_flags |= (sctx->fallback.blk->base.crt_flags &
 246                                CRYPTO_TFM_RES_MASK);
 247        }
 248        return ret;
 249}
 250
 251static int fallback_blk_dec(struct blkcipher_desc *desc,
 252                struct scatterlist *dst, struct scatterlist *src,
 253                unsigned int nbytes)
 254{
 255        unsigned int ret;
 256        struct crypto_blkcipher *tfm;
 257        struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
 258
 259        tfm = desc->tfm;
 260        desc->tfm = sctx->fallback.blk;
 261
 262        ret = crypto_blkcipher_decrypt_iv(desc, dst, src, nbytes);
 263
 264        desc->tfm = tfm;
 265        return ret;
 266}
 267
 268static int fallback_blk_enc(struct blkcipher_desc *desc,
 269                struct scatterlist *dst, struct scatterlist *src,
 270                unsigned int nbytes)
 271{
 272        unsigned int ret;
 273        struct crypto_blkcipher *tfm;
 274        struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
 275
 276        tfm = desc->tfm;
 277        desc->tfm = sctx->fallback.blk;
 278
 279        ret = crypto_blkcipher_encrypt_iv(desc, dst, src, nbytes);
 280
 281        desc->tfm = tfm;
 282        return ret;
 283}
 284
 285static int ecb_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
 286                           unsigned int key_len)
 287{
 288        struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
 289        int ret;
 290
 291        ret = need_fallback(key_len);
 292        if (ret > 0) {
 293                sctx->key_len = key_len;
 294                return setkey_fallback_blk(tfm, in_key, key_len);
 295        }
 296
 297        switch (key_len) {
 298        case 16:
 299                sctx->enc = KM_AES_128_ENCRYPT;
 300                sctx->dec = KM_AES_128_DECRYPT;
 301                break;
 302        case 24:
 303                sctx->enc = KM_AES_192_ENCRYPT;
 304                sctx->dec = KM_AES_192_DECRYPT;
 305                break;
 306        case 32:
 307                sctx->enc = KM_AES_256_ENCRYPT;
 308                sctx->dec = KM_AES_256_DECRYPT;
 309                break;
 310        }
 311
 312        return aes_set_key(tfm, in_key, key_len);
 313}
 314
 315static int ecb_aes_crypt(struct blkcipher_desc *desc, long func, void *param,
 316                         struct blkcipher_walk *walk)
 317{
 318        int ret = blkcipher_walk_virt(desc, walk);
 319        unsigned int nbytes;
 320
 321        while ((nbytes = walk->nbytes)) {
 322                /* only use complete blocks */
 323                unsigned int n = nbytes & ~(AES_BLOCK_SIZE - 1);
 324                u8 *out = walk->dst.virt.addr;
 325                u8 *in = walk->src.virt.addr;
 326
 327                ret = crypt_s390_km(func, param, out, in, n);
 328                if (ret < 0 || ret != n)
 329                        return -EIO;
 330
 331                nbytes &= AES_BLOCK_SIZE - 1;
 332                ret = blkcipher_walk_done(desc, walk, nbytes);
 333        }
 334
 335        return ret;
 336}
 337
 338static int ecb_aes_encrypt(struct blkcipher_desc *desc,
 339                           struct scatterlist *dst, struct scatterlist *src,
 340                           unsigned int nbytes)
 341{
 342        struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
 343        struct blkcipher_walk walk;
 344
 345        if (unlikely(need_fallback(sctx->key_len)))
 346                return fallback_blk_enc(desc, dst, src, nbytes);
 347
 348        blkcipher_walk_init(&walk, dst, src, nbytes);
 349        return ecb_aes_crypt(desc, sctx->enc, sctx->key, &walk);
 350}
 351
 352static int ecb_aes_decrypt(struct blkcipher_desc *desc,
 353                           struct scatterlist *dst, struct scatterlist *src,
 354                           unsigned int nbytes)
 355{
 356        struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
 357        struct blkcipher_walk walk;
 358
 359        if (unlikely(need_fallback(sctx->key_len)))
 360                return fallback_blk_dec(desc, dst, src, nbytes);
 361
 362        blkcipher_walk_init(&walk, dst, src, nbytes);
 363        return ecb_aes_crypt(desc, sctx->dec, sctx->key, &walk);
 364}
 365
 366static int fallback_init_blk(struct crypto_tfm *tfm)
 367{
 368        const char *name = tfm->__crt_alg->cra_name;
 369        struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
 370
 371        sctx->fallback.blk = crypto_alloc_blkcipher(name, 0,
 372                        CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK);
 373
 374        if (IS_ERR(sctx->fallback.blk)) {
 375                pr_err("Allocating AES fallback algorithm %s failed\n",
 376                       name);
 377                return PTR_ERR(sctx->fallback.blk);
 378        }
 379
 380        return 0;
 381}
 382
 383static void fallback_exit_blk(struct crypto_tfm *tfm)
 384{
 385        struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
 386
 387        crypto_free_blkcipher(sctx->fallback.blk);
 388        sctx->fallback.blk = NULL;
 389}
 390
 391static struct crypto_alg ecb_aes_alg = {
 392        .cra_name               =       "ecb(aes)",
 393        .cra_driver_name        =       "ecb-aes-s390",
 394        .cra_priority           =       CRYPT_S390_COMPOSITE_PRIORITY,
 395        .cra_flags              =       CRYPTO_ALG_TYPE_BLKCIPHER |
 396                                        CRYPTO_ALG_NEED_FALLBACK,
 397        .cra_blocksize          =       AES_BLOCK_SIZE,
 398        .cra_ctxsize            =       sizeof(struct s390_aes_ctx),
 399        .cra_type               =       &crypto_blkcipher_type,
 400        .cra_module             =       THIS_MODULE,
 401        .cra_init               =       fallback_init_blk,
 402        .cra_exit               =       fallback_exit_blk,
 403        .cra_u                  =       {
 404                .blkcipher = {
 405                        .min_keysize            =       AES_MIN_KEY_SIZE,
 406                        .max_keysize            =       AES_MAX_KEY_SIZE,
 407                        .setkey                 =       ecb_aes_set_key,
 408                        .encrypt                =       ecb_aes_encrypt,
 409                        .decrypt                =       ecb_aes_decrypt,
 410                }
 411        }
 412};
 413
 414static int cbc_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
 415                           unsigned int key_len)
 416{
 417        struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
 418        int ret;
 419
 420        ret = need_fallback(key_len);
 421        if (ret > 0) {
 422                sctx->key_len = key_len;
 423                return setkey_fallback_blk(tfm, in_key, key_len);
 424        }
 425
 426        switch (key_len) {
 427        case 16:
 428                sctx->enc = KMC_AES_128_ENCRYPT;
 429                sctx->dec = KMC_AES_128_DECRYPT;
 430                break;
 431        case 24:
 432                sctx->enc = KMC_AES_192_ENCRYPT;
 433                sctx->dec = KMC_AES_192_DECRYPT;
 434                break;
 435        case 32:
 436                sctx->enc = KMC_AES_256_ENCRYPT;
 437                sctx->dec = KMC_AES_256_DECRYPT;
 438                break;
 439        }
 440
 441        return aes_set_key(tfm, in_key, key_len);
 442}
 443
 444static int cbc_aes_crypt(struct blkcipher_desc *desc, long func, void *param,
 445                         struct blkcipher_walk *walk)
 446{
 447        int ret = blkcipher_walk_virt(desc, walk);
 448        unsigned int nbytes = walk->nbytes;
 449
 450        if (!nbytes)
 451                goto out;
 452
 453        memcpy(param, walk->iv, AES_BLOCK_SIZE);
 454        do {
 455                /* only use complete blocks */
 456                unsigned int n = nbytes & ~(AES_BLOCK_SIZE - 1);
 457                u8 *out = walk->dst.virt.addr;
 458                u8 *in = walk->src.virt.addr;
 459
 460                ret = crypt_s390_kmc(func, param, out, in, n);
 461                if (ret < 0 || ret != n)
 462                        return -EIO;
 463
 464                nbytes &= AES_BLOCK_SIZE - 1;
 465                ret = blkcipher_walk_done(desc, walk, nbytes);
 466        } while ((nbytes = walk->nbytes));
 467        memcpy(walk->iv, param, AES_BLOCK_SIZE);
 468
 469out:
 470        return ret;
 471}
 472
 473static int cbc_aes_encrypt(struct blkcipher_desc *desc,
 474                           struct scatterlist *dst, struct scatterlist *src,
 475                           unsigned int nbytes)
 476{
 477        struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
 478        struct blkcipher_walk walk;
 479
 480        if (unlikely(need_fallback(sctx->key_len)))
 481                return fallback_blk_enc(desc, dst, src, nbytes);
 482
 483        blkcipher_walk_init(&walk, dst, src, nbytes);
 484        return cbc_aes_crypt(desc, sctx->enc, sctx->iv, &walk);
 485}
 486
 487static int cbc_aes_decrypt(struct blkcipher_desc *desc,
 488                           struct scatterlist *dst, struct scatterlist *src,
 489                           unsigned int nbytes)
 490{
 491        struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
 492        struct blkcipher_walk walk;
 493
 494        if (unlikely(need_fallback(sctx->key_len)))
 495                return fallback_blk_dec(desc, dst, src, nbytes);
 496
 497        blkcipher_walk_init(&walk, dst, src, nbytes);
 498        return cbc_aes_crypt(desc, sctx->dec, sctx->iv, &walk);
 499}
 500
 501static struct crypto_alg cbc_aes_alg = {
 502        .cra_name               =       "cbc(aes)",
 503        .cra_driver_name        =       "cbc-aes-s390",
 504        .cra_priority           =       CRYPT_S390_COMPOSITE_PRIORITY,
 505        .cra_flags              =       CRYPTO_ALG_TYPE_BLKCIPHER |
 506                                        CRYPTO_ALG_NEED_FALLBACK,
 507        .cra_blocksize          =       AES_BLOCK_SIZE,
 508        .cra_ctxsize            =       sizeof(struct s390_aes_ctx),
 509        .cra_type               =       &crypto_blkcipher_type,
 510        .cra_module             =       THIS_MODULE,
 511        .cra_init               =       fallback_init_blk,
 512        .cra_exit               =       fallback_exit_blk,
 513        .cra_u                  =       {
 514                .blkcipher = {
 515                        .min_keysize            =       AES_MIN_KEY_SIZE,
 516                        .max_keysize            =       AES_MAX_KEY_SIZE,
 517                        .ivsize                 =       AES_BLOCK_SIZE,
 518                        .setkey                 =       cbc_aes_set_key,
 519                        .encrypt                =       cbc_aes_encrypt,
 520                        .decrypt                =       cbc_aes_decrypt,
 521                }
 522        }
 523};
 524
 525static int xts_fallback_setkey(struct crypto_tfm *tfm, const u8 *key,
 526                                   unsigned int len)
 527{
 528        struct s390_xts_ctx *xts_ctx = crypto_tfm_ctx(tfm);
 529        unsigned int ret;
 530
 531        xts_ctx->fallback->base.crt_flags &= ~CRYPTO_TFM_REQ_MASK;
 532        xts_ctx->fallback->base.crt_flags |= (tfm->crt_flags &
 533                        CRYPTO_TFM_REQ_MASK);
 534
 535        ret = crypto_blkcipher_setkey(xts_ctx->fallback, key, len);
 536        if (ret) {
 537                tfm->crt_flags &= ~CRYPTO_TFM_RES_MASK;
 538                tfm->crt_flags |= (xts_ctx->fallback->base.crt_flags &
 539                                CRYPTO_TFM_RES_MASK);
 540        }
 541        return ret;
 542}
 543
 544static int xts_fallback_decrypt(struct blkcipher_desc *desc,
 545                struct scatterlist *dst, struct scatterlist *src,
 546                unsigned int nbytes)
 547{
 548        struct s390_xts_ctx *xts_ctx = crypto_blkcipher_ctx(desc->tfm);
 549        struct crypto_blkcipher *tfm;
 550        unsigned int ret;
 551
 552        tfm = desc->tfm;
 553        desc->tfm = xts_ctx->fallback;
 554
 555        ret = crypto_blkcipher_decrypt_iv(desc, dst, src, nbytes);
 556
 557        desc->tfm = tfm;
 558        return ret;
 559}
 560
 561static int xts_fallback_encrypt(struct blkcipher_desc *desc,
 562                struct scatterlist *dst, struct scatterlist *src,
 563                unsigned int nbytes)
 564{
 565        struct s390_xts_ctx *xts_ctx = crypto_blkcipher_ctx(desc->tfm);
 566        struct crypto_blkcipher *tfm;
 567        unsigned int ret;
 568
 569        tfm = desc->tfm;
 570        desc->tfm = xts_ctx->fallback;
 571
 572        ret = crypto_blkcipher_encrypt_iv(desc, dst, src, nbytes);
 573
 574        desc->tfm = tfm;
 575        return ret;
 576}
 577
 578static int xts_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
 579                           unsigned int key_len)
 580{
 581        struct s390_xts_ctx *xts_ctx = crypto_tfm_ctx(tfm);
 582        u32 *flags = &tfm->crt_flags;
 583
 584        switch (key_len) {
 585        case 32:
 586                xts_ctx->enc = KM_XTS_128_ENCRYPT;
 587                xts_ctx->dec = KM_XTS_128_DECRYPT;
 588                memcpy(xts_ctx->key + 16, in_key, 16);
 589                memcpy(xts_ctx->pcc.key + 16, in_key + 16, 16);
 590                break;
 591        case 48:
 592                xts_ctx->enc = 0;
 593                xts_ctx->dec = 0;
 594                xts_fallback_setkey(tfm, in_key, key_len);
 595                break;
 596        case 64:
 597                xts_ctx->enc = KM_XTS_256_ENCRYPT;
 598                xts_ctx->dec = KM_XTS_256_DECRYPT;
 599                memcpy(xts_ctx->key, in_key, 32);
 600                memcpy(xts_ctx->pcc.key, in_key + 32, 32);
 601                break;
 602        default:
 603                *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
 604                return -EINVAL;
 605        }
 606        xts_ctx->key_len = key_len;
 607        return 0;
 608}
 609
 610static int xts_aes_crypt(struct blkcipher_desc *desc, long func,
 611                         struct s390_xts_ctx *xts_ctx,
 612                         struct blkcipher_walk *walk)
 613{
 614        unsigned int offset = (xts_ctx->key_len >> 1) & 0x10;
 615        int ret = blkcipher_walk_virt(desc, walk);
 616        unsigned int nbytes = walk->nbytes;
 617        unsigned int n;
 618        u8 *in, *out;
 619        void *param;
 620
 621        if (!nbytes)
 622                goto out;
 623
 624        memset(xts_ctx->pcc.block, 0, sizeof(xts_ctx->pcc.block));
 625        memset(xts_ctx->pcc.bit, 0, sizeof(xts_ctx->pcc.bit));
 626        memset(xts_ctx->pcc.xts, 0, sizeof(xts_ctx->pcc.xts));
 627        memcpy(xts_ctx->pcc.tweak, walk->iv, sizeof(xts_ctx->pcc.tweak));
 628        param = xts_ctx->pcc.key + offset;
 629        ret = crypt_s390_pcc(func, param);
 630        if (ret < 0)
 631                return -EIO;
 632
 633        memcpy(xts_ctx->xts_param, xts_ctx->pcc.xts, 16);
 634        param = xts_ctx->key + offset;
 635        do {
 636                /* only use complete blocks */
 637                n = nbytes & ~(AES_BLOCK_SIZE - 1);
 638                out = walk->dst.virt.addr;
 639                in = walk->src.virt.addr;
 640
 641                ret = crypt_s390_km(func, param, out, in, n);
 642                if (ret < 0 || ret != n)
 643                        return -EIO;
 644
 645                nbytes &= AES_BLOCK_SIZE - 1;
 646                ret = blkcipher_walk_done(desc, walk, nbytes);
 647        } while ((nbytes = walk->nbytes));
 648out:
 649        return ret;
 650}
 651
 652static int xts_aes_encrypt(struct blkcipher_desc *desc,
 653                           struct scatterlist *dst, struct scatterlist *src,
 654                           unsigned int nbytes)
 655{
 656        struct s390_xts_ctx *xts_ctx = crypto_blkcipher_ctx(desc->tfm);
 657        struct blkcipher_walk walk;
 658
 659        if (unlikely(xts_ctx->key_len == 48))
 660                return xts_fallback_encrypt(desc, dst, src, nbytes);
 661
 662        blkcipher_walk_init(&walk, dst, src, nbytes);
 663        return xts_aes_crypt(desc, xts_ctx->enc, xts_ctx, &walk);
 664}
 665
 666static int xts_aes_decrypt(struct blkcipher_desc *desc,
 667                           struct scatterlist *dst, struct scatterlist *src,
 668                           unsigned int nbytes)
 669{
 670        struct s390_xts_ctx *xts_ctx = crypto_blkcipher_ctx(desc->tfm);
 671        struct blkcipher_walk walk;
 672
 673        if (unlikely(xts_ctx->key_len == 48))
 674                return xts_fallback_decrypt(desc, dst, src, nbytes);
 675
 676        blkcipher_walk_init(&walk, dst, src, nbytes);
 677        return xts_aes_crypt(desc, xts_ctx->dec, xts_ctx, &walk);
 678}
 679
 680static int xts_fallback_init(struct crypto_tfm *tfm)
 681{
 682        const char *name = tfm->__crt_alg->cra_name;
 683        struct s390_xts_ctx *xts_ctx = crypto_tfm_ctx(tfm);
 684
 685        xts_ctx->fallback = crypto_alloc_blkcipher(name, 0,
 686                        CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK);
 687
 688        if (IS_ERR(xts_ctx->fallback)) {
 689                pr_err("Allocating XTS fallback algorithm %s failed\n",
 690                       name);
 691                return PTR_ERR(xts_ctx->fallback);
 692        }
 693        return 0;
 694}
 695
 696static void xts_fallback_exit(struct crypto_tfm *tfm)
 697{
 698        struct s390_xts_ctx *xts_ctx = crypto_tfm_ctx(tfm);
 699
 700        crypto_free_blkcipher(xts_ctx->fallback);
 701        xts_ctx->fallback = NULL;
 702}
 703
 704static struct crypto_alg xts_aes_alg = {
 705        .cra_name               =       "xts(aes)",
 706        .cra_driver_name        =       "xts-aes-s390",
 707        .cra_priority           =       CRYPT_S390_COMPOSITE_PRIORITY,
 708        .cra_flags              =       CRYPTO_ALG_TYPE_BLKCIPHER |
 709                                        CRYPTO_ALG_NEED_FALLBACK,
 710        .cra_blocksize          =       AES_BLOCK_SIZE,
 711        .cra_ctxsize            =       sizeof(struct s390_xts_ctx),
 712        .cra_type               =       &crypto_blkcipher_type,
 713        .cra_module             =       THIS_MODULE,
 714        .cra_init               =       xts_fallback_init,
 715        .cra_exit               =       xts_fallback_exit,
 716        .cra_u                  =       {
 717                .blkcipher = {
 718                        .min_keysize            =       2 * AES_MIN_KEY_SIZE,
 719                        .max_keysize            =       2 * AES_MAX_KEY_SIZE,
 720                        .ivsize                 =       AES_BLOCK_SIZE,
 721                        .setkey                 =       xts_aes_set_key,
 722                        .encrypt                =       xts_aes_encrypt,
 723                        .decrypt                =       xts_aes_decrypt,
 724                }
 725        }
 726};
 727
 728static int ctr_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
 729                           unsigned int key_len)
 730{
 731        struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
 732
 733        switch (key_len) {
 734        case 16:
 735                sctx->enc = KMCTR_AES_128_ENCRYPT;
 736                sctx->dec = KMCTR_AES_128_DECRYPT;
 737                break;
 738        case 24:
 739                sctx->enc = KMCTR_AES_192_ENCRYPT;
 740                sctx->dec = KMCTR_AES_192_DECRYPT;
 741                break;
 742        case 32:
 743                sctx->enc = KMCTR_AES_256_ENCRYPT;
 744                sctx->dec = KMCTR_AES_256_DECRYPT;
 745                break;
 746        }
 747
 748        return aes_set_key(tfm, in_key, key_len);
 749}
 750
 751static int ctr_aes_crypt(struct blkcipher_desc *desc, long func,
 752                         struct s390_aes_ctx *sctx, struct blkcipher_walk *walk)
 753{
 754        int ret = blkcipher_walk_virt_block(desc, walk, AES_BLOCK_SIZE);
 755        unsigned int i, n, nbytes;
 756        u8 buf[AES_BLOCK_SIZE];
 757        u8 *out, *in;
 758
 759        if (!walk->nbytes)
 760                return ret;
 761
 762        memcpy(ctrblk, walk->iv, AES_BLOCK_SIZE);
 763        while ((nbytes = walk->nbytes) >= AES_BLOCK_SIZE) {
 764                out = walk->dst.virt.addr;
 765                in = walk->src.virt.addr;
 766                while (nbytes >= AES_BLOCK_SIZE) {
 767                        /* only use complete blocks, max. PAGE_SIZE */
 768                        n = (nbytes > PAGE_SIZE) ? PAGE_SIZE :
 769                                                 nbytes & ~(AES_BLOCK_SIZE - 1);
 770                        for (i = AES_BLOCK_SIZE; i < n; i += AES_BLOCK_SIZE) {
 771                                memcpy(ctrblk + i, ctrblk + i - AES_BLOCK_SIZE,
 772                                       AES_BLOCK_SIZE);
 773                                crypto_inc(ctrblk + i, AES_BLOCK_SIZE);
 774                        }
 775                        ret = crypt_s390_kmctr(func, sctx->key, out, in, n, ctrblk);
 776                        if (ret < 0 || ret != n)
 777                                return -EIO;
 778                        if (n > AES_BLOCK_SIZE)
 779                                memcpy(ctrblk, ctrblk + n - AES_BLOCK_SIZE,
 780                                       AES_BLOCK_SIZE);
 781                        crypto_inc(ctrblk, AES_BLOCK_SIZE);
 782                        out += n;
 783                        in += n;
 784                        nbytes -= n;
 785                }
 786                ret = blkcipher_walk_done(desc, walk, nbytes);
 787        }
 788        /*
 789         * final block may be < AES_BLOCK_SIZE, copy only nbytes
 790         */
 791        if (nbytes) {
 792                out = walk->dst.virt.addr;
 793                in = walk->src.virt.addr;
 794                ret = crypt_s390_kmctr(func, sctx->key, buf, in,
 795                                       AES_BLOCK_SIZE, ctrblk);
 796                if (ret < 0 || ret != AES_BLOCK_SIZE)
 797                        return -EIO;
 798                memcpy(out, buf, nbytes);
 799                crypto_inc(ctrblk, AES_BLOCK_SIZE);
 800                ret = blkcipher_walk_done(desc, walk, 0);
 801        }
 802        memcpy(walk->iv, ctrblk, AES_BLOCK_SIZE);
 803        return ret;
 804}
 805
 806static int ctr_aes_encrypt(struct blkcipher_desc *desc,
 807                           struct scatterlist *dst, struct scatterlist *src,
 808                           unsigned int nbytes)
 809{
 810        struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
 811        struct blkcipher_walk walk;
 812
 813        blkcipher_walk_init(&walk, dst, src, nbytes);
 814        return ctr_aes_crypt(desc, sctx->enc, sctx, &walk);
 815}
 816
 817static int ctr_aes_decrypt(struct blkcipher_desc *desc,
 818                           struct scatterlist *dst, struct scatterlist *src,
 819                           unsigned int nbytes)
 820{
 821        struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
 822        struct blkcipher_walk walk;
 823
 824        blkcipher_walk_init(&walk, dst, src, nbytes);
 825        return ctr_aes_crypt(desc, sctx->dec, sctx, &walk);
 826}
 827
 828static struct crypto_alg ctr_aes_alg = {
 829        .cra_name               =       "ctr(aes)",
 830        .cra_driver_name        =       "ctr-aes-s390",
 831        .cra_priority           =       CRYPT_S390_COMPOSITE_PRIORITY,
 832        .cra_flags              =       CRYPTO_ALG_TYPE_BLKCIPHER,
 833        .cra_blocksize          =       1,
 834        .cra_ctxsize            =       sizeof(struct s390_aes_ctx),
 835        .cra_type               =       &crypto_blkcipher_type,
 836        .cra_module             =       THIS_MODULE,
 837        .cra_u                  =       {
 838                .blkcipher = {
 839                        .min_keysize            =       AES_MIN_KEY_SIZE,
 840                        .max_keysize            =       AES_MAX_KEY_SIZE,
 841                        .ivsize                 =       AES_BLOCK_SIZE,
 842                        .setkey                 =       ctr_aes_set_key,
 843                        .encrypt                =       ctr_aes_encrypt,
 844                        .decrypt                =       ctr_aes_decrypt,
 845                }
 846        }
 847};
 848
 849static int __init aes_s390_init(void)
 850{
 851        int ret;
 852
 853        if (crypt_s390_func_available(KM_AES_128_ENCRYPT, CRYPT_S390_MSA))
 854                keylen_flag |= AES_KEYLEN_128;
 855        if (crypt_s390_func_available(KM_AES_192_ENCRYPT, CRYPT_S390_MSA))
 856                keylen_flag |= AES_KEYLEN_192;
 857        if (crypt_s390_func_available(KM_AES_256_ENCRYPT, CRYPT_S390_MSA))
 858                keylen_flag |= AES_KEYLEN_256;
 859
 860        if (!keylen_flag)
 861                return -EOPNOTSUPP;
 862
 863        /* z9 109 and z9 BC/EC only support 128 bit key length */
 864        if (keylen_flag == AES_KEYLEN_128)
 865                pr_info("AES hardware acceleration is only available for"
 866                        " 128-bit keys\n");
 867
 868        ret = crypto_register_alg(&aes_alg);
 869        if (ret)
 870                goto aes_err;
 871
 872        ret = crypto_register_alg(&ecb_aes_alg);
 873        if (ret)
 874                goto ecb_aes_err;
 875
 876        ret = crypto_register_alg(&cbc_aes_alg);
 877        if (ret)
 878                goto cbc_aes_err;
 879
 880        if (crypt_s390_func_available(KM_XTS_128_ENCRYPT,
 881                        CRYPT_S390_MSA | CRYPT_S390_MSA4) &&
 882            crypt_s390_func_available(KM_XTS_256_ENCRYPT,
 883                        CRYPT_S390_MSA | CRYPT_S390_MSA4)) {
 884                ret = crypto_register_alg(&xts_aes_alg);
 885                if (ret)
 886                        goto xts_aes_err;
 887        }
 888
 889        if (crypt_s390_func_available(KMCTR_AES_128_ENCRYPT,
 890                                CRYPT_S390_MSA | CRYPT_S390_MSA4) &&
 891            crypt_s390_func_available(KMCTR_AES_192_ENCRYPT,
 892                                CRYPT_S390_MSA | CRYPT_S390_MSA4) &&
 893            crypt_s390_func_available(KMCTR_AES_256_ENCRYPT,
 894                                CRYPT_S390_MSA | CRYPT_S390_MSA4)) {
 895                ctrblk = (u8 *) __get_free_page(GFP_KERNEL);
 896                if (!ctrblk) {
 897                        ret = -ENOMEM;
 898                        goto ctr_aes_err;
 899                }
 900                ret = crypto_register_alg(&ctr_aes_alg);
 901                if (ret) {
 902                        free_page((unsigned long) ctrblk);
 903                        goto ctr_aes_err;
 904                }
 905        }
 906
 907out:
 908        return ret;
 909
 910ctr_aes_err:
 911        crypto_unregister_alg(&xts_aes_alg);
 912xts_aes_err:
 913        crypto_unregister_alg(&cbc_aes_alg);
 914cbc_aes_err:
 915        crypto_unregister_alg(&ecb_aes_alg);
 916ecb_aes_err:
 917        crypto_unregister_alg(&aes_alg);
 918aes_err:
 919        goto out;
 920}
 921
 922static void __exit aes_s390_fini(void)
 923{
 924        crypto_unregister_alg(&ctr_aes_alg);
 925        free_page((unsigned long) ctrblk);
 926        crypto_unregister_alg(&xts_aes_alg);
 927        crypto_unregister_alg(&cbc_aes_alg);
 928        crypto_unregister_alg(&ecb_aes_alg);
 929        crypto_unregister_alg(&aes_alg);
 930}
 931
 932module_init(aes_s390_init);
 933module_exit(aes_s390_fini);
 934
 935MODULE_ALIAS("aes-all");
 936
 937MODULE_DESCRIPTION("Rijndael (AES) Cipher Algorithm");
 938MODULE_LICENSE("GPL");
 939