linux/arch/s390/crypto/paes_s390.c
<<
>>
Prefs
   1// SPDX-License-Identifier: GPL-2.0
   2/*
   3 * Cryptographic API.
   4 *
   5 * s390 implementation of the AES Cipher Algorithm with protected keys.
   6 *
   7 * s390 Version:
   8 *   Copyright IBM Corp. 2017,2020
   9 *   Author(s): Martin Schwidefsky <schwidefsky@de.ibm.com>
  10 *              Harald Freudenberger <freude@de.ibm.com>
  11 */
  12
  13#define KMSG_COMPONENT "paes_s390"
  14#define pr_fmt(fmt) KMSG_COMPONENT ": " fmt
  15
  16#include <crypto/aes.h>
  17#include <crypto/algapi.h>
  18#include <linux/bug.h>
  19#include <linux/err.h>
  20#include <linux/module.h>
  21#include <linux/cpufeature.h>
  22#include <linux/init.h>
  23#include <linux/mutex.h>
  24#include <linux/spinlock.h>
  25#include <linux/delay.h>
  26#include <crypto/internal/skcipher.h>
  27#include <crypto/xts.h>
  28#include <asm/cpacf.h>
  29#include <asm/pkey.h>
  30
  31/*
  32 * Key blobs smaller/bigger than these defines are rejected
  33 * by the common code even before the individual setkey function
  34 * is called. As paes can handle different kinds of key blobs
  35 * and padding is also possible, the limits need to be generous.
  36 */
  37#define PAES_MIN_KEYSIZE 16
  38#define PAES_MAX_KEYSIZE 320
  39
  40static u8 *ctrblk;
  41static DEFINE_MUTEX(ctrblk_lock);
  42
  43static cpacf_mask_t km_functions, kmc_functions, kmctr_functions;
  44
  45struct key_blob {
  46        /*
  47         * Small keys will be stored in the keybuf. Larger keys are
  48         * stored in extra allocated memory. In both cases does
  49         * key point to the memory where the key is stored.
  50         * The code distinguishes by checking keylen against
  51         * sizeof(keybuf). See the two following helper functions.
  52         */
  53        u8 *key;
  54        u8 keybuf[128];
  55        unsigned int keylen;
  56};
  57
  58static inline int _key_to_kb(struct key_blob *kb,
  59                             const u8 *key,
  60                             unsigned int keylen)
  61{
  62        struct clearkey_header {
  63                u8  type;
  64                u8  res0[3];
  65                u8  version;
  66                u8  res1[3];
  67                u32 keytype;
  68                u32 len;
  69        } __packed * h;
  70
  71        switch (keylen) {
  72        case 16:
  73        case 24:
  74        case 32:
  75                /* clear key value, prepare pkey clear key token in keybuf */
  76                memset(kb->keybuf, 0, sizeof(kb->keybuf));
  77                h = (struct clearkey_header *) kb->keybuf;
  78                h->version = 0x02; /* TOKVER_CLEAR_KEY */
  79                h->keytype = (keylen - 8) >> 3;
  80                h->len = keylen;
  81                memcpy(kb->keybuf + sizeof(*h), key, keylen);
  82                kb->keylen = sizeof(*h) + keylen;
  83                kb->key = kb->keybuf;
  84                break;
  85        default:
  86                /* other key material, let pkey handle this */
  87                if (keylen <= sizeof(kb->keybuf))
  88                        kb->key = kb->keybuf;
  89                else {
  90                        kb->key = kmalloc(keylen, GFP_KERNEL);
  91                        if (!kb->key)
  92                                return -ENOMEM;
  93                }
  94                memcpy(kb->key, key, keylen);
  95                kb->keylen = keylen;
  96                break;
  97        }
  98
  99        return 0;
 100}
 101
 102static inline void _free_kb_keybuf(struct key_blob *kb)
 103{
 104        if (kb->key && kb->key != kb->keybuf
 105            && kb->keylen > sizeof(kb->keybuf)) {
 106                kfree(kb->key);
 107                kb->key = NULL;
 108        }
 109}
 110
 111struct s390_paes_ctx {
 112        struct key_blob kb;
 113        struct pkey_protkey pk;
 114        spinlock_t pk_lock;
 115        unsigned long fc;
 116};
 117
 118struct s390_pxts_ctx {
 119        struct key_blob kb[2];
 120        struct pkey_protkey pk[2];
 121        spinlock_t pk_lock;
 122        unsigned long fc;
 123};
 124
 125static inline int __paes_keyblob2pkey(struct key_blob *kb,
 126                                     struct pkey_protkey *pk)
 127{
 128        int i, ret;
 129
 130        /* try three times in case of failure */
 131        for (i = 0; i < 3; i++) {
 132                if (i > 0 && ret == -EAGAIN && in_task())
 133                        if (msleep_interruptible(1000))
 134                                return -EINTR;
 135                ret = pkey_keyblob2pkey(kb->key, kb->keylen, pk);
 136                if (ret == 0)
 137                        break;
 138        }
 139
 140        return ret;
 141}
 142
 143static inline int __paes_convert_key(struct s390_paes_ctx *ctx)
 144{
 145        int ret;
 146        struct pkey_protkey pkey;
 147
 148        ret = __paes_keyblob2pkey(&ctx->kb, &pkey);
 149        if (ret)
 150                return ret;
 151
 152        spin_lock_bh(&ctx->pk_lock);
 153        memcpy(&ctx->pk, &pkey, sizeof(pkey));
 154        spin_unlock_bh(&ctx->pk_lock);
 155
 156        return 0;
 157}
 158
 159static int ecb_paes_init(struct crypto_skcipher *tfm)
 160{
 161        struct s390_paes_ctx *ctx = crypto_skcipher_ctx(tfm);
 162
 163        ctx->kb.key = NULL;
 164        spin_lock_init(&ctx->pk_lock);
 165
 166        return 0;
 167}
 168
 169static void ecb_paes_exit(struct crypto_skcipher *tfm)
 170{
 171        struct s390_paes_ctx *ctx = crypto_skcipher_ctx(tfm);
 172
 173        _free_kb_keybuf(&ctx->kb);
 174}
 175
 176static inline int __ecb_paes_set_key(struct s390_paes_ctx *ctx)
 177{
 178        int rc;
 179        unsigned long fc;
 180
 181        rc = __paes_convert_key(ctx);
 182        if (rc)
 183                return rc;
 184
 185        /* Pick the correct function code based on the protected key type */
 186        fc = (ctx->pk.type == PKEY_KEYTYPE_AES_128) ? CPACF_KM_PAES_128 :
 187                (ctx->pk.type == PKEY_KEYTYPE_AES_192) ? CPACF_KM_PAES_192 :
 188                (ctx->pk.type == PKEY_KEYTYPE_AES_256) ? CPACF_KM_PAES_256 : 0;
 189
 190        /* Check if the function code is available */
 191        ctx->fc = (fc && cpacf_test_func(&km_functions, fc)) ? fc : 0;
 192
 193        return ctx->fc ? 0 : -EINVAL;
 194}
 195
 196static int ecb_paes_set_key(struct crypto_skcipher *tfm, const u8 *in_key,
 197                            unsigned int key_len)
 198{
 199        int rc;
 200        struct s390_paes_ctx *ctx = crypto_skcipher_ctx(tfm);
 201
 202        _free_kb_keybuf(&ctx->kb);
 203        rc = _key_to_kb(&ctx->kb, in_key, key_len);
 204        if (rc)
 205                return rc;
 206
 207        return __ecb_paes_set_key(ctx);
 208}
 209
 210static int ecb_paes_crypt(struct skcipher_request *req, unsigned long modifier)
 211{
 212        struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
 213        struct s390_paes_ctx *ctx = crypto_skcipher_ctx(tfm);
 214        struct skcipher_walk walk;
 215        unsigned int nbytes, n, k;
 216        int ret;
 217        struct {
 218                u8 key[MAXPROTKEYSIZE];
 219        } param;
 220
 221        ret = skcipher_walk_virt(&walk, req, false);
 222        if (ret)
 223                return ret;
 224
 225        spin_lock_bh(&ctx->pk_lock);
 226        memcpy(param.key, ctx->pk.protkey, MAXPROTKEYSIZE);
 227        spin_unlock_bh(&ctx->pk_lock);
 228
 229        while ((nbytes = walk.nbytes) != 0) {
 230                /* only use complete blocks */
 231                n = nbytes & ~(AES_BLOCK_SIZE - 1);
 232                k = cpacf_km(ctx->fc | modifier, &param,
 233                             walk.dst.virt.addr, walk.src.virt.addr, n);
 234                if (k)
 235                        ret = skcipher_walk_done(&walk, nbytes - k);
 236                if (k < n) {
 237                        if (__paes_convert_key(ctx))
 238                                return skcipher_walk_done(&walk, -EIO);
 239                        spin_lock_bh(&ctx->pk_lock);
 240                        memcpy(param.key, ctx->pk.protkey, MAXPROTKEYSIZE);
 241                        spin_unlock_bh(&ctx->pk_lock);
 242                }
 243        }
 244        return ret;
 245}
 246
 247static int ecb_paes_encrypt(struct skcipher_request *req)
 248{
 249        return ecb_paes_crypt(req, 0);
 250}
 251
 252static int ecb_paes_decrypt(struct skcipher_request *req)
 253{
 254        return ecb_paes_crypt(req, CPACF_DECRYPT);
 255}
 256
 257static struct skcipher_alg ecb_paes_alg = {
 258        .base.cra_name          =       "ecb(paes)",
 259        .base.cra_driver_name   =       "ecb-paes-s390",
 260        .base.cra_priority      =       401,    /* combo: aes + ecb + 1 */
 261        .base.cra_blocksize     =       AES_BLOCK_SIZE,
 262        .base.cra_ctxsize       =       sizeof(struct s390_paes_ctx),
 263        .base.cra_module        =       THIS_MODULE,
 264        .base.cra_list          =       LIST_HEAD_INIT(ecb_paes_alg.base.cra_list),
 265        .init                   =       ecb_paes_init,
 266        .exit                   =       ecb_paes_exit,
 267        .min_keysize            =       PAES_MIN_KEYSIZE,
 268        .max_keysize            =       PAES_MAX_KEYSIZE,
 269        .setkey                 =       ecb_paes_set_key,
 270        .encrypt                =       ecb_paes_encrypt,
 271        .decrypt                =       ecb_paes_decrypt,
 272};
 273
 274static int cbc_paes_init(struct crypto_skcipher *tfm)
 275{
 276        struct s390_paes_ctx *ctx = crypto_skcipher_ctx(tfm);
 277
 278        ctx->kb.key = NULL;
 279        spin_lock_init(&ctx->pk_lock);
 280
 281        return 0;
 282}
 283
 284static void cbc_paes_exit(struct crypto_skcipher *tfm)
 285{
 286        struct s390_paes_ctx *ctx = crypto_skcipher_ctx(tfm);
 287
 288        _free_kb_keybuf(&ctx->kb);
 289}
 290
 291static inline int __cbc_paes_set_key(struct s390_paes_ctx *ctx)
 292{
 293        int rc;
 294        unsigned long fc;
 295
 296        rc = __paes_convert_key(ctx);
 297        if (rc)
 298                return rc;
 299
 300        /* Pick the correct function code based on the protected key type */
 301        fc = (ctx->pk.type == PKEY_KEYTYPE_AES_128) ? CPACF_KMC_PAES_128 :
 302                (ctx->pk.type == PKEY_KEYTYPE_AES_192) ? CPACF_KMC_PAES_192 :
 303                (ctx->pk.type == PKEY_KEYTYPE_AES_256) ? CPACF_KMC_PAES_256 : 0;
 304
 305        /* Check if the function code is available */
 306        ctx->fc = (fc && cpacf_test_func(&kmc_functions, fc)) ? fc : 0;
 307
 308        return ctx->fc ? 0 : -EINVAL;
 309}
 310
 311static int cbc_paes_set_key(struct crypto_skcipher *tfm, const u8 *in_key,
 312                            unsigned int key_len)
 313{
 314        int rc;
 315        struct s390_paes_ctx *ctx = crypto_skcipher_ctx(tfm);
 316
 317        _free_kb_keybuf(&ctx->kb);
 318        rc = _key_to_kb(&ctx->kb, in_key, key_len);
 319        if (rc)
 320                return rc;
 321
 322        return __cbc_paes_set_key(ctx);
 323}
 324
 325static int cbc_paes_crypt(struct skcipher_request *req, unsigned long modifier)
 326{
 327        struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
 328        struct s390_paes_ctx *ctx = crypto_skcipher_ctx(tfm);
 329        struct skcipher_walk walk;
 330        unsigned int nbytes, n, k;
 331        int ret;
 332        struct {
 333                u8 iv[AES_BLOCK_SIZE];
 334                u8 key[MAXPROTKEYSIZE];
 335        } param;
 336
 337        ret = skcipher_walk_virt(&walk, req, false);
 338        if (ret)
 339                return ret;
 340
 341        memcpy(param.iv, walk.iv, AES_BLOCK_SIZE);
 342        spin_lock_bh(&ctx->pk_lock);
 343        memcpy(param.key, ctx->pk.protkey, MAXPROTKEYSIZE);
 344        spin_unlock_bh(&ctx->pk_lock);
 345
 346        while ((nbytes = walk.nbytes) != 0) {
 347                /* only use complete blocks */
 348                n = nbytes & ~(AES_BLOCK_SIZE - 1);
 349                k = cpacf_kmc(ctx->fc | modifier, &param,
 350                              walk.dst.virt.addr, walk.src.virt.addr, n);
 351                if (k) {
 352                        memcpy(walk.iv, param.iv, AES_BLOCK_SIZE);
 353                        ret = skcipher_walk_done(&walk, nbytes - k);
 354                }
 355                if (k < n) {
 356                        if (__paes_convert_key(ctx))
 357                                return skcipher_walk_done(&walk, -EIO);
 358                        spin_lock_bh(&ctx->pk_lock);
 359                        memcpy(param.key, ctx->pk.protkey, MAXPROTKEYSIZE);
 360                        spin_unlock_bh(&ctx->pk_lock);
 361                }
 362        }
 363        return ret;
 364}
 365
 366static int cbc_paes_encrypt(struct skcipher_request *req)
 367{
 368        return cbc_paes_crypt(req, 0);
 369}
 370
 371static int cbc_paes_decrypt(struct skcipher_request *req)
 372{
 373        return cbc_paes_crypt(req, CPACF_DECRYPT);
 374}
 375
 376static struct skcipher_alg cbc_paes_alg = {
 377        .base.cra_name          =       "cbc(paes)",
 378        .base.cra_driver_name   =       "cbc-paes-s390",
 379        .base.cra_priority      =       402,    /* ecb-paes-s390 + 1 */
 380        .base.cra_blocksize     =       AES_BLOCK_SIZE,
 381        .base.cra_ctxsize       =       sizeof(struct s390_paes_ctx),
 382        .base.cra_module        =       THIS_MODULE,
 383        .base.cra_list          =       LIST_HEAD_INIT(cbc_paes_alg.base.cra_list),
 384        .init                   =       cbc_paes_init,
 385        .exit                   =       cbc_paes_exit,
 386        .min_keysize            =       PAES_MIN_KEYSIZE,
 387        .max_keysize            =       PAES_MAX_KEYSIZE,
 388        .ivsize                 =       AES_BLOCK_SIZE,
 389        .setkey                 =       cbc_paes_set_key,
 390        .encrypt                =       cbc_paes_encrypt,
 391        .decrypt                =       cbc_paes_decrypt,
 392};
 393
 394static int xts_paes_init(struct crypto_skcipher *tfm)
 395{
 396        struct s390_pxts_ctx *ctx = crypto_skcipher_ctx(tfm);
 397
 398        ctx->kb[0].key = NULL;
 399        ctx->kb[1].key = NULL;
 400        spin_lock_init(&ctx->pk_lock);
 401
 402        return 0;
 403}
 404
 405static void xts_paes_exit(struct crypto_skcipher *tfm)
 406{
 407        struct s390_pxts_ctx *ctx = crypto_skcipher_ctx(tfm);
 408
 409        _free_kb_keybuf(&ctx->kb[0]);
 410        _free_kb_keybuf(&ctx->kb[1]);
 411}
 412
 413static inline int __xts_paes_convert_key(struct s390_pxts_ctx *ctx)
 414{
 415        struct pkey_protkey pkey0, pkey1;
 416
 417        if (__paes_keyblob2pkey(&ctx->kb[0], &pkey0) ||
 418            __paes_keyblob2pkey(&ctx->kb[1], &pkey1))
 419                return -EINVAL;
 420
 421        spin_lock_bh(&ctx->pk_lock);
 422        memcpy(&ctx->pk[0], &pkey0, sizeof(pkey0));
 423        memcpy(&ctx->pk[1], &pkey1, sizeof(pkey1));
 424        spin_unlock_bh(&ctx->pk_lock);
 425
 426        return 0;
 427}
 428
 429static inline int __xts_paes_set_key(struct s390_pxts_ctx *ctx)
 430{
 431        unsigned long fc;
 432
 433        if (__xts_paes_convert_key(ctx))
 434                return -EINVAL;
 435
 436        if (ctx->pk[0].type != ctx->pk[1].type)
 437                return -EINVAL;
 438
 439        /* Pick the correct function code based on the protected key type */
 440        fc = (ctx->pk[0].type == PKEY_KEYTYPE_AES_128) ? CPACF_KM_PXTS_128 :
 441                (ctx->pk[0].type == PKEY_KEYTYPE_AES_256) ?
 442                CPACF_KM_PXTS_256 : 0;
 443
 444        /* Check if the function code is available */
 445        ctx->fc = (fc && cpacf_test_func(&km_functions, fc)) ? fc : 0;
 446
 447        return ctx->fc ? 0 : -EINVAL;
 448}
 449
 450static int xts_paes_set_key(struct crypto_skcipher *tfm, const u8 *in_key,
 451                            unsigned int xts_key_len)
 452{
 453        int rc;
 454        struct s390_pxts_ctx *ctx = crypto_skcipher_ctx(tfm);
 455        u8 ckey[2 * AES_MAX_KEY_SIZE];
 456        unsigned int ckey_len, key_len;
 457
 458        if (xts_key_len % 2)
 459                return -EINVAL;
 460
 461        key_len = xts_key_len / 2;
 462
 463        _free_kb_keybuf(&ctx->kb[0]);
 464        _free_kb_keybuf(&ctx->kb[1]);
 465        rc = _key_to_kb(&ctx->kb[0], in_key, key_len);
 466        if (rc)
 467                return rc;
 468        rc = _key_to_kb(&ctx->kb[1], in_key + key_len, key_len);
 469        if (rc)
 470                return rc;
 471
 472        rc = __xts_paes_set_key(ctx);
 473        if (rc)
 474                return rc;
 475
 476        /*
 477         * xts_check_key verifies the key length is not odd and makes
 478         * sure that the two keys are not the same. This can be done
 479         * on the two protected keys as well
 480         */
 481        ckey_len = (ctx->pk[0].type == PKEY_KEYTYPE_AES_128) ?
 482                AES_KEYSIZE_128 : AES_KEYSIZE_256;
 483        memcpy(ckey, ctx->pk[0].protkey, ckey_len);
 484        memcpy(ckey + ckey_len, ctx->pk[1].protkey, ckey_len);
 485        return xts_verify_key(tfm, ckey, 2*ckey_len);
 486}
 487
 488static int xts_paes_crypt(struct skcipher_request *req, unsigned long modifier)
 489{
 490        struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
 491        struct s390_pxts_ctx *ctx = crypto_skcipher_ctx(tfm);
 492        struct skcipher_walk walk;
 493        unsigned int keylen, offset, nbytes, n, k;
 494        int ret;
 495        struct {
 496                u8 key[MAXPROTKEYSIZE]; /* key + verification pattern */
 497                u8 tweak[16];
 498                u8 block[16];
 499                u8 bit[16];
 500                u8 xts[16];
 501        } pcc_param;
 502        struct {
 503                u8 key[MAXPROTKEYSIZE]; /* key + verification pattern */
 504                u8 init[16];
 505        } xts_param;
 506
 507        ret = skcipher_walk_virt(&walk, req, false);
 508        if (ret)
 509                return ret;
 510
 511        keylen = (ctx->pk[0].type == PKEY_KEYTYPE_AES_128) ? 48 : 64;
 512        offset = (ctx->pk[0].type == PKEY_KEYTYPE_AES_128) ? 16 : 0;
 513
 514        memset(&pcc_param, 0, sizeof(pcc_param));
 515        memcpy(pcc_param.tweak, walk.iv, sizeof(pcc_param.tweak));
 516        spin_lock_bh(&ctx->pk_lock);
 517        memcpy(pcc_param.key + offset, ctx->pk[1].protkey, keylen);
 518        memcpy(xts_param.key + offset, ctx->pk[0].protkey, keylen);
 519        spin_unlock_bh(&ctx->pk_lock);
 520        cpacf_pcc(ctx->fc, pcc_param.key + offset);
 521        memcpy(xts_param.init, pcc_param.xts, 16);
 522
 523        while ((nbytes = walk.nbytes) != 0) {
 524                /* only use complete blocks */
 525                n = nbytes & ~(AES_BLOCK_SIZE - 1);
 526                k = cpacf_km(ctx->fc | modifier, xts_param.key + offset,
 527                             walk.dst.virt.addr, walk.src.virt.addr, n);
 528                if (k)
 529                        ret = skcipher_walk_done(&walk, nbytes - k);
 530                if (k < n) {
 531                        if (__xts_paes_convert_key(ctx))
 532                                return skcipher_walk_done(&walk, -EIO);
 533                        spin_lock_bh(&ctx->pk_lock);
 534                        memcpy(xts_param.key + offset,
 535                               ctx->pk[0].protkey, keylen);
 536                        spin_unlock_bh(&ctx->pk_lock);
 537                }
 538        }
 539
 540        return ret;
 541}
 542
 543static int xts_paes_encrypt(struct skcipher_request *req)
 544{
 545        return xts_paes_crypt(req, 0);
 546}
 547
 548static int xts_paes_decrypt(struct skcipher_request *req)
 549{
 550        return xts_paes_crypt(req, CPACF_DECRYPT);
 551}
 552
 553static struct skcipher_alg xts_paes_alg = {
 554        .base.cra_name          =       "xts(paes)",
 555        .base.cra_driver_name   =       "xts-paes-s390",
 556        .base.cra_priority      =       402,    /* ecb-paes-s390 + 1 */
 557        .base.cra_blocksize     =       AES_BLOCK_SIZE,
 558        .base.cra_ctxsize       =       sizeof(struct s390_pxts_ctx),
 559        .base.cra_module        =       THIS_MODULE,
 560        .base.cra_list          =       LIST_HEAD_INIT(xts_paes_alg.base.cra_list),
 561        .init                   =       xts_paes_init,
 562        .exit                   =       xts_paes_exit,
 563        .min_keysize            =       2 * PAES_MIN_KEYSIZE,
 564        .max_keysize            =       2 * PAES_MAX_KEYSIZE,
 565        .ivsize                 =       AES_BLOCK_SIZE,
 566        .setkey                 =       xts_paes_set_key,
 567        .encrypt                =       xts_paes_encrypt,
 568        .decrypt                =       xts_paes_decrypt,
 569};
 570
 571static int ctr_paes_init(struct crypto_skcipher *tfm)
 572{
 573        struct s390_paes_ctx *ctx = crypto_skcipher_ctx(tfm);
 574
 575        ctx->kb.key = NULL;
 576        spin_lock_init(&ctx->pk_lock);
 577
 578        return 0;
 579}
 580
 581static void ctr_paes_exit(struct crypto_skcipher *tfm)
 582{
 583        struct s390_paes_ctx *ctx = crypto_skcipher_ctx(tfm);
 584
 585        _free_kb_keybuf(&ctx->kb);
 586}
 587
 588static inline int __ctr_paes_set_key(struct s390_paes_ctx *ctx)
 589{
 590        int rc;
 591        unsigned long fc;
 592
 593        rc = __paes_convert_key(ctx);
 594        if (rc)
 595                return rc;
 596
 597        /* Pick the correct function code based on the protected key type */
 598        fc = (ctx->pk.type == PKEY_KEYTYPE_AES_128) ? CPACF_KMCTR_PAES_128 :
 599                (ctx->pk.type == PKEY_KEYTYPE_AES_192) ? CPACF_KMCTR_PAES_192 :
 600                (ctx->pk.type == PKEY_KEYTYPE_AES_256) ?
 601                CPACF_KMCTR_PAES_256 : 0;
 602
 603        /* Check if the function code is available */
 604        ctx->fc = (fc && cpacf_test_func(&kmctr_functions, fc)) ? fc : 0;
 605
 606        return ctx->fc ? 0 : -EINVAL;
 607}
 608
 609static int ctr_paes_set_key(struct crypto_skcipher *tfm, const u8 *in_key,
 610                            unsigned int key_len)
 611{
 612        int rc;
 613        struct s390_paes_ctx *ctx = crypto_skcipher_ctx(tfm);
 614
 615        _free_kb_keybuf(&ctx->kb);
 616        rc = _key_to_kb(&ctx->kb, in_key, key_len);
 617        if (rc)
 618                return rc;
 619
 620        return __ctr_paes_set_key(ctx);
 621}
 622
 623static unsigned int __ctrblk_init(u8 *ctrptr, u8 *iv, unsigned int nbytes)
 624{
 625        unsigned int i, n;
 626
 627        /* only use complete blocks, max. PAGE_SIZE */
 628        memcpy(ctrptr, iv, AES_BLOCK_SIZE);
 629        n = (nbytes > PAGE_SIZE) ? PAGE_SIZE : nbytes & ~(AES_BLOCK_SIZE - 1);
 630        for (i = (n / AES_BLOCK_SIZE) - 1; i > 0; i--) {
 631                memcpy(ctrptr + AES_BLOCK_SIZE, ctrptr, AES_BLOCK_SIZE);
 632                crypto_inc(ctrptr + AES_BLOCK_SIZE, AES_BLOCK_SIZE);
 633                ctrptr += AES_BLOCK_SIZE;
 634        }
 635        return n;
 636}
 637
 638static int ctr_paes_crypt(struct skcipher_request *req)
 639{
 640        struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
 641        struct s390_paes_ctx *ctx = crypto_skcipher_ctx(tfm);
 642        u8 buf[AES_BLOCK_SIZE], *ctrptr;
 643        struct skcipher_walk walk;
 644        unsigned int nbytes, n, k;
 645        int ret, locked;
 646        struct {
 647                u8 key[MAXPROTKEYSIZE];
 648        } param;
 649
 650        ret = skcipher_walk_virt(&walk, req, false);
 651        if (ret)
 652                return ret;
 653
 654        spin_lock_bh(&ctx->pk_lock);
 655        memcpy(param.key, ctx->pk.protkey, MAXPROTKEYSIZE);
 656        spin_unlock_bh(&ctx->pk_lock);
 657
 658        locked = mutex_trylock(&ctrblk_lock);
 659
 660        while ((nbytes = walk.nbytes) >= AES_BLOCK_SIZE) {
 661                n = AES_BLOCK_SIZE;
 662                if (nbytes >= 2*AES_BLOCK_SIZE && locked)
 663                        n = __ctrblk_init(ctrblk, walk.iv, nbytes);
 664                ctrptr = (n > AES_BLOCK_SIZE) ? ctrblk : walk.iv;
 665                k = cpacf_kmctr(ctx->fc, &param, walk.dst.virt.addr,
 666                                walk.src.virt.addr, n, ctrptr);
 667                if (k) {
 668                        if (ctrptr == ctrblk)
 669                                memcpy(walk.iv, ctrptr + k - AES_BLOCK_SIZE,
 670                                       AES_BLOCK_SIZE);
 671                        crypto_inc(walk.iv, AES_BLOCK_SIZE);
 672                        ret = skcipher_walk_done(&walk, nbytes - k);
 673                }
 674                if (k < n) {
 675                        if (__paes_convert_key(ctx)) {
 676                                if (locked)
 677                                        mutex_unlock(&ctrblk_lock);
 678                                return skcipher_walk_done(&walk, -EIO);
 679                        }
 680                        spin_lock_bh(&ctx->pk_lock);
 681                        memcpy(param.key, ctx->pk.protkey, MAXPROTKEYSIZE);
 682                        spin_unlock_bh(&ctx->pk_lock);
 683                }
 684        }
 685        if (locked)
 686                mutex_unlock(&ctrblk_lock);
 687        /*
 688         * final block may be < AES_BLOCK_SIZE, copy only nbytes
 689         */
 690        if (nbytes) {
 691                while (1) {
 692                        if (cpacf_kmctr(ctx->fc, &param, buf,
 693                                        walk.src.virt.addr, AES_BLOCK_SIZE,
 694                                        walk.iv) == AES_BLOCK_SIZE)
 695                                break;
 696                        if (__paes_convert_key(ctx))
 697                                return skcipher_walk_done(&walk, -EIO);
 698                        spin_lock_bh(&ctx->pk_lock);
 699                        memcpy(param.key, ctx->pk.protkey, MAXPROTKEYSIZE);
 700                        spin_unlock_bh(&ctx->pk_lock);
 701                }
 702                memcpy(walk.dst.virt.addr, buf, nbytes);
 703                crypto_inc(walk.iv, AES_BLOCK_SIZE);
 704                ret = skcipher_walk_done(&walk, nbytes);
 705        }
 706
 707        return ret;
 708}
 709
 710static struct skcipher_alg ctr_paes_alg = {
 711        .base.cra_name          =       "ctr(paes)",
 712        .base.cra_driver_name   =       "ctr-paes-s390",
 713        .base.cra_priority      =       402,    /* ecb-paes-s390 + 1 */
 714        .base.cra_blocksize     =       1,
 715        .base.cra_ctxsize       =       sizeof(struct s390_paes_ctx),
 716        .base.cra_module        =       THIS_MODULE,
 717        .base.cra_list          =       LIST_HEAD_INIT(ctr_paes_alg.base.cra_list),
 718        .init                   =       ctr_paes_init,
 719        .exit                   =       ctr_paes_exit,
 720        .min_keysize            =       PAES_MIN_KEYSIZE,
 721        .max_keysize            =       PAES_MAX_KEYSIZE,
 722        .ivsize                 =       AES_BLOCK_SIZE,
 723        .setkey                 =       ctr_paes_set_key,
 724        .encrypt                =       ctr_paes_crypt,
 725        .decrypt                =       ctr_paes_crypt,
 726        .chunksize              =       AES_BLOCK_SIZE,
 727};
 728
 729static inline void __crypto_unregister_skcipher(struct skcipher_alg *alg)
 730{
 731        if (!list_empty(&alg->base.cra_list))
 732                crypto_unregister_skcipher(alg);
 733}
 734
 735static void paes_s390_fini(void)
 736{
 737        __crypto_unregister_skcipher(&ctr_paes_alg);
 738        __crypto_unregister_skcipher(&xts_paes_alg);
 739        __crypto_unregister_skcipher(&cbc_paes_alg);
 740        __crypto_unregister_skcipher(&ecb_paes_alg);
 741        if (ctrblk)
 742                free_page((unsigned long) ctrblk);
 743}
 744
 745static int __init paes_s390_init(void)
 746{
 747        int ret;
 748
 749        /* Query available functions for KM, KMC and KMCTR */
 750        cpacf_query(CPACF_KM, &km_functions);
 751        cpacf_query(CPACF_KMC, &kmc_functions);
 752        cpacf_query(CPACF_KMCTR, &kmctr_functions);
 753
 754        if (cpacf_test_func(&km_functions, CPACF_KM_PAES_128) ||
 755            cpacf_test_func(&km_functions, CPACF_KM_PAES_192) ||
 756            cpacf_test_func(&km_functions, CPACF_KM_PAES_256)) {
 757                ret = crypto_register_skcipher(&ecb_paes_alg);
 758                if (ret)
 759                        goto out_err;
 760        }
 761
 762        if (cpacf_test_func(&kmc_functions, CPACF_KMC_PAES_128) ||
 763            cpacf_test_func(&kmc_functions, CPACF_KMC_PAES_192) ||
 764            cpacf_test_func(&kmc_functions, CPACF_KMC_PAES_256)) {
 765                ret = crypto_register_skcipher(&cbc_paes_alg);
 766                if (ret)
 767                        goto out_err;
 768        }
 769
 770        if (cpacf_test_func(&km_functions, CPACF_KM_PXTS_128) ||
 771            cpacf_test_func(&km_functions, CPACF_KM_PXTS_256)) {
 772                ret = crypto_register_skcipher(&xts_paes_alg);
 773                if (ret)
 774                        goto out_err;
 775        }
 776
 777        if (cpacf_test_func(&kmctr_functions, CPACF_KMCTR_PAES_128) ||
 778            cpacf_test_func(&kmctr_functions, CPACF_KMCTR_PAES_192) ||
 779            cpacf_test_func(&kmctr_functions, CPACF_KMCTR_PAES_256)) {
 780                ctrblk = (u8 *) __get_free_page(GFP_KERNEL);
 781                if (!ctrblk) {
 782                        ret = -ENOMEM;
 783                        goto out_err;
 784                }
 785                ret = crypto_register_skcipher(&ctr_paes_alg);
 786                if (ret)
 787                        goto out_err;
 788        }
 789
 790        return 0;
 791out_err:
 792        paes_s390_fini();
 793        return ret;
 794}
 795
 796module_init(paes_s390_init);
 797module_exit(paes_s390_fini);
 798
 799MODULE_ALIAS_CRYPTO("paes");
 800
 801MODULE_DESCRIPTION("Rijndael (AES) Cipher Algorithm with protected keys");
 802MODULE_LICENSE("GPL");
 803