linux/drivers/crypto/cavium/nitrox/nitrox_skcipher.c
<<
>>
Prefs
   1// SPDX-License-Identifier: GPL-2.0
   2#include <linux/crypto.h>
   3#include <linux/kernel.h>
   4#include <linux/module.h>
   5#include <linux/printk.h>
   6
   7#include <crypto/aes.h>
   8#include <crypto/skcipher.h>
   9#include <crypto/scatterwalk.h>
  10#include <crypto/ctr.h>
  11#include <crypto/internal/des.h>
  12#include <crypto/xts.h>
  13
  14#include "nitrox_dev.h"
  15#include "nitrox_common.h"
  16#include "nitrox_req.h"
  17
  18struct nitrox_cipher {
  19        const char *name;
  20        enum flexi_cipher value;
  21};
  22
  23/*
  24 * supported cipher list
  25 */
  26static const struct nitrox_cipher flexi_cipher_table[] = {
  27        { "null",               CIPHER_NULL },
  28        { "cbc(des3_ede)",      CIPHER_3DES_CBC },
  29        { "ecb(des3_ede)",      CIPHER_3DES_ECB },
  30        { "cbc(aes)",           CIPHER_AES_CBC },
  31        { "ecb(aes)",           CIPHER_AES_ECB },
  32        { "cfb(aes)",           CIPHER_AES_CFB },
  33        { "rfc3686(ctr(aes))",  CIPHER_AES_CTR },
  34        { "xts(aes)",           CIPHER_AES_XTS },
  35        { "cts(cbc(aes))",      CIPHER_AES_CBC_CTS },
  36        { NULL,                 CIPHER_INVALID }
  37};
  38
  39static enum flexi_cipher flexi_cipher_type(const char *name)
  40{
  41        const struct nitrox_cipher *cipher = flexi_cipher_table;
  42
  43        while (cipher->name) {
  44                if (!strcmp(cipher->name, name))
  45                        break;
  46                cipher++;
  47        }
  48        return cipher->value;
  49}
  50
  51static void free_src_sglist(struct skcipher_request *skreq)
  52{
  53        struct nitrox_kcrypt_request *nkreq = skcipher_request_ctx(skreq);
  54
  55        kfree(nkreq->src);
  56}
  57
  58static void free_dst_sglist(struct skcipher_request *skreq)
  59{
  60        struct nitrox_kcrypt_request *nkreq = skcipher_request_ctx(skreq);
  61
  62        kfree(nkreq->dst);
  63}
  64
  65static void nitrox_skcipher_callback(void *arg, int err)
  66{
  67        struct skcipher_request *skreq = arg;
  68
  69        free_src_sglist(skreq);
  70        free_dst_sglist(skreq);
  71        if (err) {
  72                pr_err_ratelimited("request failed status 0x%0x\n", err);
  73                err = -EINVAL;
  74        }
  75
  76        skcipher_request_complete(skreq, err);
  77}
  78
  79static void nitrox_cbc_cipher_callback(void *arg, int err)
  80{
  81        struct skcipher_request *skreq = arg;
  82        struct nitrox_kcrypt_request *nkreq = skcipher_request_ctx(skreq);
  83        struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(skreq);
  84        int ivsize = crypto_skcipher_ivsize(cipher);
  85        unsigned int start = skreq->cryptlen - ivsize;
  86
  87        if (err) {
  88                nitrox_skcipher_callback(arg, err);
  89                return;
  90        }
  91
  92        if (nkreq->creq.ctrl.s.arg == ENCRYPT) {
  93                scatterwalk_map_and_copy(skreq->iv, skreq->dst, start, ivsize,
  94                                         0);
  95        } else {
  96                if (skreq->src != skreq->dst) {
  97                        scatterwalk_map_and_copy(skreq->iv, skreq->src, start,
  98                                                 ivsize, 0);
  99                } else {
 100                        memcpy(skreq->iv, nkreq->iv_out, ivsize);
 101                        kfree(nkreq->iv_out);
 102                }
 103        }
 104
 105        nitrox_skcipher_callback(arg, err);
 106}
 107
 108static int nitrox_skcipher_init(struct crypto_skcipher *tfm)
 109{
 110        struct nitrox_crypto_ctx *nctx = crypto_skcipher_ctx(tfm);
 111        struct crypto_ctx_hdr *chdr;
 112
 113        /* get the first device */
 114        nctx->ndev = nitrox_get_first_device();
 115        if (!nctx->ndev)
 116                return -ENODEV;
 117
 118        /* allocate nitrox crypto context */
 119        chdr = crypto_alloc_context(nctx->ndev);
 120        if (!chdr) {
 121                nitrox_put_device(nctx->ndev);
 122                return -ENOMEM;
 123        }
 124
 125        nctx->callback = nitrox_skcipher_callback;
 126        nctx->chdr = chdr;
 127        nctx->u.ctx_handle = (uintptr_t)((u8 *)chdr->vaddr +
 128                                         sizeof(struct ctx_hdr));
 129        crypto_skcipher_set_reqsize(tfm, crypto_skcipher_reqsize(tfm) +
 130                                    sizeof(struct nitrox_kcrypt_request));
 131        return 0;
 132}
 133
 134static int nitrox_cbc_init(struct crypto_skcipher *tfm)
 135{
 136        int err;
 137        struct nitrox_crypto_ctx *nctx = crypto_skcipher_ctx(tfm);
 138
 139        err = nitrox_skcipher_init(tfm);
 140        if (err)
 141                return err;
 142
 143        nctx->callback = nitrox_cbc_cipher_callback;
 144        return 0;
 145}
 146
 147static void nitrox_skcipher_exit(struct crypto_skcipher *tfm)
 148{
 149        struct nitrox_crypto_ctx *nctx = crypto_skcipher_ctx(tfm);
 150
 151        /* free the nitrox crypto context */
 152        if (nctx->u.ctx_handle) {
 153                struct flexi_crypto_context *fctx = nctx->u.fctx;
 154
 155                memzero_explicit(&fctx->crypto, sizeof(struct crypto_keys));
 156                memzero_explicit(&fctx->auth, sizeof(struct auth_keys));
 157                crypto_free_context((void *)nctx->chdr);
 158        }
 159        nitrox_put_device(nctx->ndev);
 160
 161        nctx->u.ctx_handle = 0;
 162        nctx->ndev = NULL;
 163}
 164
 165static inline int nitrox_skcipher_setkey(struct crypto_skcipher *cipher,
 166                                         int aes_keylen, const u8 *key,
 167                                         unsigned int keylen)
 168{
 169        struct crypto_tfm *tfm = crypto_skcipher_tfm(cipher);
 170        struct nitrox_crypto_ctx *nctx = crypto_tfm_ctx(tfm);
 171        struct flexi_crypto_context *fctx;
 172        union fc_ctx_flags *flags;
 173        enum flexi_cipher cipher_type;
 174        const char *name;
 175
 176        name = crypto_tfm_alg_name(tfm);
 177        cipher_type = flexi_cipher_type(name);
 178        if (unlikely(cipher_type == CIPHER_INVALID)) {
 179                pr_err("unsupported cipher: %s\n", name);
 180                return -EINVAL;
 181        }
 182
 183        /* fill crypto context */
 184        fctx = nctx->u.fctx;
 185        flags = &fctx->flags;
 186        flags->f = 0;
 187        flags->w0.cipher_type = cipher_type;
 188        flags->w0.aes_keylen = aes_keylen;
 189        flags->w0.iv_source = IV_FROM_DPTR;
 190        flags->f = cpu_to_be64(*(u64 *)&flags->w0);
 191        /* copy the key to context */
 192        memcpy(fctx->crypto.u.key, key, keylen);
 193
 194        return 0;
 195}
 196
 197static int nitrox_aes_setkey(struct crypto_skcipher *cipher, const u8 *key,
 198                             unsigned int keylen)
 199{
 200        int aes_keylen;
 201
 202        aes_keylen = flexi_aes_keylen(keylen);
 203        if (aes_keylen < 0)
 204                return -EINVAL;
 205        return nitrox_skcipher_setkey(cipher, aes_keylen, key, keylen);
 206}
 207
 208static int alloc_src_sglist(struct skcipher_request *skreq, int ivsize)
 209{
 210        struct nitrox_kcrypt_request *nkreq = skcipher_request_ctx(skreq);
 211        int nents = sg_nents(skreq->src) + 1;
 212        int ret;
 213
 214        /* Allocate buffer to hold IV and input scatterlist array */
 215        ret = alloc_src_req_buf(nkreq, nents, ivsize);
 216        if (ret)
 217                return ret;
 218
 219        nitrox_creq_copy_iv(nkreq->src, skreq->iv, ivsize);
 220        nitrox_creq_set_src_sg(nkreq, nents, ivsize, skreq->src,
 221                               skreq->cryptlen);
 222
 223        return 0;
 224}
 225
 226static int alloc_dst_sglist(struct skcipher_request *skreq, int ivsize)
 227{
 228        struct nitrox_kcrypt_request *nkreq = skcipher_request_ctx(skreq);
 229        int nents = sg_nents(skreq->dst) + 3;
 230        int ret;
 231
 232        /* Allocate buffer to hold ORH, COMPLETION and output scatterlist
 233         * array
 234         */
 235        ret = alloc_dst_req_buf(nkreq, nents);
 236        if (ret)
 237                return ret;
 238
 239        nitrox_creq_set_orh(nkreq);
 240        nitrox_creq_set_comp(nkreq);
 241        nitrox_creq_set_dst_sg(nkreq, nents, ivsize, skreq->dst,
 242                               skreq->cryptlen);
 243
 244        return 0;
 245}
 246
 247static int nitrox_skcipher_crypt(struct skcipher_request *skreq, bool enc)
 248{
 249        struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(skreq);
 250        struct nitrox_crypto_ctx *nctx = crypto_skcipher_ctx(cipher);
 251        struct nitrox_kcrypt_request *nkreq = skcipher_request_ctx(skreq);
 252        int ivsize = crypto_skcipher_ivsize(cipher);
 253        struct se_crypto_request *creq;
 254        int ret;
 255
 256        creq = &nkreq->creq;
 257        creq->flags = skreq->base.flags;
 258        creq->gfp = (skreq->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ?
 259                     GFP_KERNEL : GFP_ATOMIC;
 260
 261        /* fill the request */
 262        creq->ctrl.value = 0;
 263        creq->opcode = FLEXI_CRYPTO_ENCRYPT_HMAC;
 264        creq->ctrl.s.arg = (enc ? ENCRYPT : DECRYPT);
 265        /* param0: length of the data to be encrypted */
 266        creq->gph.param0 = cpu_to_be16(skreq->cryptlen);
 267        creq->gph.param1 = 0;
 268        /* param2: encryption data offset */
 269        creq->gph.param2 = cpu_to_be16(ivsize);
 270        creq->gph.param3 = 0;
 271
 272        creq->ctx_handle = nctx->u.ctx_handle;
 273        creq->ctrl.s.ctxl = sizeof(struct flexi_crypto_context);
 274
 275        ret = alloc_src_sglist(skreq, ivsize);
 276        if (ret)
 277                return ret;
 278
 279        ret = alloc_dst_sglist(skreq, ivsize);
 280        if (ret) {
 281                free_src_sglist(skreq);
 282                return ret;
 283        }
 284
 285        /* send the crypto request */
 286        return nitrox_process_se_request(nctx->ndev, creq, nctx->callback,
 287                                         skreq);
 288}
 289
 290static int nitrox_cbc_decrypt(struct skcipher_request *skreq)
 291{
 292        struct nitrox_kcrypt_request *nkreq = skcipher_request_ctx(skreq);
 293        struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(skreq);
 294        int ivsize = crypto_skcipher_ivsize(cipher);
 295        gfp_t flags = (skreq->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ?
 296                        GFP_KERNEL : GFP_ATOMIC;
 297        unsigned int start = skreq->cryptlen - ivsize;
 298
 299        if (skreq->src != skreq->dst)
 300                return nitrox_skcipher_crypt(skreq, false);
 301
 302        nkreq->iv_out = kmalloc(ivsize, flags);
 303        if (!nkreq->iv_out)
 304                return -ENOMEM;
 305
 306        scatterwalk_map_and_copy(nkreq->iv_out, skreq->src, start, ivsize, 0);
 307        return nitrox_skcipher_crypt(skreq, false);
 308}
 309
 310static int nitrox_aes_encrypt(struct skcipher_request *skreq)
 311{
 312        return nitrox_skcipher_crypt(skreq, true);
 313}
 314
 315static int nitrox_aes_decrypt(struct skcipher_request *skreq)
 316{
 317        return nitrox_skcipher_crypt(skreq, false);
 318}
 319
 320static int nitrox_3des_setkey(struct crypto_skcipher *cipher,
 321                              const u8 *key, unsigned int keylen)
 322{
 323        return verify_skcipher_des3_key(cipher, key) ?:
 324               nitrox_skcipher_setkey(cipher, 0, key, keylen);
 325}
 326
 327static int nitrox_3des_encrypt(struct skcipher_request *skreq)
 328{
 329        return nitrox_skcipher_crypt(skreq, true);
 330}
 331
 332static int nitrox_3des_decrypt(struct skcipher_request *skreq)
 333{
 334        return nitrox_skcipher_crypt(skreq, false);
 335}
 336
 337static int nitrox_aes_xts_setkey(struct crypto_skcipher *cipher,
 338                                 const u8 *key, unsigned int keylen)
 339{
 340        struct crypto_tfm *tfm = crypto_skcipher_tfm(cipher);
 341        struct nitrox_crypto_ctx *nctx = crypto_tfm_ctx(tfm);
 342        struct flexi_crypto_context *fctx;
 343        int aes_keylen, ret;
 344
 345        ret = xts_check_key(tfm, key, keylen);
 346        if (ret)
 347                return ret;
 348
 349        keylen /= 2;
 350
 351        aes_keylen = flexi_aes_keylen(keylen);
 352        if (aes_keylen < 0)
 353                return -EINVAL;
 354
 355        fctx = nctx->u.fctx;
 356        /* copy KEY2 */
 357        memcpy(fctx->auth.u.key2, (key + keylen), keylen);
 358
 359        return nitrox_skcipher_setkey(cipher, aes_keylen, key, keylen);
 360}
 361
 362static int nitrox_aes_ctr_rfc3686_setkey(struct crypto_skcipher *cipher,
 363                                         const u8 *key, unsigned int keylen)
 364{
 365        struct crypto_tfm *tfm = crypto_skcipher_tfm(cipher);
 366        struct nitrox_crypto_ctx *nctx = crypto_tfm_ctx(tfm);
 367        struct flexi_crypto_context *fctx;
 368        int aes_keylen;
 369
 370        if (keylen < CTR_RFC3686_NONCE_SIZE)
 371                return -EINVAL;
 372
 373        fctx = nctx->u.fctx;
 374
 375        memcpy(fctx->crypto.iv, key + (keylen - CTR_RFC3686_NONCE_SIZE),
 376               CTR_RFC3686_NONCE_SIZE);
 377
 378        keylen -= CTR_RFC3686_NONCE_SIZE;
 379
 380        aes_keylen = flexi_aes_keylen(keylen);
 381        if (aes_keylen < 0)
 382                return -EINVAL;
 383        return nitrox_skcipher_setkey(cipher, aes_keylen, key, keylen);
 384}
 385
 386static struct skcipher_alg nitrox_skciphers[] = { {
 387        .base = {
 388                .cra_name = "cbc(aes)",
 389                .cra_driver_name = "n5_cbc(aes)",
 390                .cra_priority = PRIO,
 391                .cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_ALLOCATES_MEMORY,
 392                .cra_blocksize = AES_BLOCK_SIZE,
 393                .cra_ctxsize = sizeof(struct nitrox_crypto_ctx),
 394                .cra_alignmask = 0,
 395                .cra_module = THIS_MODULE,
 396        },
 397        .min_keysize = AES_MIN_KEY_SIZE,
 398        .max_keysize = AES_MAX_KEY_SIZE,
 399        .ivsize = AES_BLOCK_SIZE,
 400        .setkey = nitrox_aes_setkey,
 401        .encrypt = nitrox_aes_encrypt,
 402        .decrypt = nitrox_cbc_decrypt,
 403        .init = nitrox_cbc_init,
 404        .exit = nitrox_skcipher_exit,
 405}, {
 406        .base = {
 407                .cra_name = "ecb(aes)",
 408                .cra_driver_name = "n5_ecb(aes)",
 409                .cra_priority = PRIO,
 410                .cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_ALLOCATES_MEMORY,
 411                .cra_blocksize = AES_BLOCK_SIZE,
 412                .cra_ctxsize = sizeof(struct nitrox_crypto_ctx),
 413                .cra_alignmask = 0,
 414                .cra_module = THIS_MODULE,
 415        },
 416        .min_keysize = AES_MIN_KEY_SIZE,
 417        .max_keysize = AES_MAX_KEY_SIZE,
 418        .ivsize = AES_BLOCK_SIZE,
 419        .setkey = nitrox_aes_setkey,
 420        .encrypt = nitrox_aes_encrypt,
 421        .decrypt = nitrox_aes_decrypt,
 422        .init = nitrox_skcipher_init,
 423        .exit = nitrox_skcipher_exit,
 424}, {
 425        .base = {
 426                .cra_name = "cfb(aes)",
 427                .cra_driver_name = "n5_cfb(aes)",
 428                .cra_priority = PRIO,
 429                .cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_ALLOCATES_MEMORY,
 430                .cra_blocksize = AES_BLOCK_SIZE,
 431                .cra_ctxsize = sizeof(struct nitrox_crypto_ctx),
 432                .cra_alignmask = 0,
 433                .cra_module = THIS_MODULE,
 434        },
 435        .min_keysize = AES_MIN_KEY_SIZE,
 436        .max_keysize = AES_MAX_KEY_SIZE,
 437        .ivsize = AES_BLOCK_SIZE,
 438        .setkey = nitrox_aes_setkey,
 439        .encrypt = nitrox_aes_encrypt,
 440        .decrypt = nitrox_aes_decrypt,
 441        .init = nitrox_skcipher_init,
 442        .exit = nitrox_skcipher_exit,
 443}, {
 444        .base = {
 445                .cra_name = "xts(aes)",
 446                .cra_driver_name = "n5_xts(aes)",
 447                .cra_priority = PRIO,
 448                .cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_ALLOCATES_MEMORY,
 449                .cra_blocksize = AES_BLOCK_SIZE,
 450                .cra_ctxsize = sizeof(struct nitrox_crypto_ctx),
 451                .cra_alignmask = 0,
 452                .cra_module = THIS_MODULE,
 453        },
 454        .min_keysize = 2 * AES_MIN_KEY_SIZE,
 455        .max_keysize = 2 * AES_MAX_KEY_SIZE,
 456        .ivsize = AES_BLOCK_SIZE,
 457        .setkey = nitrox_aes_xts_setkey,
 458        .encrypt = nitrox_aes_encrypt,
 459        .decrypt = nitrox_aes_decrypt,
 460        .init = nitrox_skcipher_init,
 461        .exit = nitrox_skcipher_exit,
 462}, {
 463        .base = {
 464                .cra_name = "rfc3686(ctr(aes))",
 465                .cra_driver_name = "n5_rfc3686(ctr(aes))",
 466                .cra_priority = PRIO,
 467                .cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_ALLOCATES_MEMORY,
 468                .cra_blocksize = 1,
 469                .cra_ctxsize = sizeof(struct nitrox_crypto_ctx),
 470                .cra_alignmask = 0,
 471                .cra_module = THIS_MODULE,
 472        },
 473        .min_keysize = AES_MIN_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
 474        .max_keysize = AES_MAX_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
 475        .ivsize = CTR_RFC3686_IV_SIZE,
 476        .init = nitrox_skcipher_init,
 477        .exit = nitrox_skcipher_exit,
 478        .setkey = nitrox_aes_ctr_rfc3686_setkey,
 479        .encrypt = nitrox_aes_encrypt,
 480        .decrypt = nitrox_aes_decrypt,
 481}, {
 482        .base = {
 483                .cra_name = "cts(cbc(aes))",
 484                .cra_driver_name = "n5_cts(cbc(aes))",
 485                .cra_priority = PRIO,
 486                .cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_ALLOCATES_MEMORY,
 487                .cra_blocksize = AES_BLOCK_SIZE,
 488                .cra_ctxsize = sizeof(struct nitrox_crypto_ctx),
 489                .cra_alignmask = 0,
 490                .cra_module = THIS_MODULE,
 491        },
 492        .min_keysize = AES_MIN_KEY_SIZE,
 493        .max_keysize = AES_MAX_KEY_SIZE,
 494        .ivsize = AES_BLOCK_SIZE,
 495        .setkey = nitrox_aes_setkey,
 496        .encrypt = nitrox_aes_encrypt,
 497        .decrypt = nitrox_aes_decrypt,
 498        .init = nitrox_skcipher_init,
 499        .exit = nitrox_skcipher_exit,
 500}, {
 501        .base = {
 502                .cra_name = "cbc(des3_ede)",
 503                .cra_driver_name = "n5_cbc(des3_ede)",
 504                .cra_priority = PRIO,
 505                .cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_ALLOCATES_MEMORY,
 506                .cra_blocksize = DES3_EDE_BLOCK_SIZE,
 507                .cra_ctxsize = sizeof(struct nitrox_crypto_ctx),
 508                .cra_alignmask = 0,
 509                .cra_module = THIS_MODULE,
 510        },
 511        .min_keysize = DES3_EDE_KEY_SIZE,
 512        .max_keysize = DES3_EDE_KEY_SIZE,
 513        .ivsize = DES3_EDE_BLOCK_SIZE,
 514        .setkey = nitrox_3des_setkey,
 515        .encrypt = nitrox_3des_encrypt,
 516        .decrypt = nitrox_cbc_decrypt,
 517        .init = nitrox_cbc_init,
 518        .exit = nitrox_skcipher_exit,
 519}, {
 520        .base = {
 521                .cra_name = "ecb(des3_ede)",
 522                .cra_driver_name = "n5_ecb(des3_ede)",
 523                .cra_priority = PRIO,
 524                .cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_ALLOCATES_MEMORY,
 525                .cra_blocksize = DES3_EDE_BLOCK_SIZE,
 526                .cra_ctxsize = sizeof(struct nitrox_crypto_ctx),
 527                .cra_alignmask = 0,
 528                .cra_module = THIS_MODULE,
 529        },
 530        .min_keysize = DES3_EDE_KEY_SIZE,
 531        .max_keysize = DES3_EDE_KEY_SIZE,
 532        .ivsize = DES3_EDE_BLOCK_SIZE,
 533        .setkey = nitrox_3des_setkey,
 534        .encrypt = nitrox_3des_encrypt,
 535        .decrypt = nitrox_3des_decrypt,
 536        .init = nitrox_skcipher_init,
 537        .exit = nitrox_skcipher_exit,
 538}
 539
 540};
 541
 542int nitrox_register_skciphers(void)
 543{
 544        return crypto_register_skciphers(nitrox_skciphers,
 545                                         ARRAY_SIZE(nitrox_skciphers));
 546}
 547
 548void nitrox_unregister_skciphers(void)
 549{
 550        crypto_unregister_skciphers(nitrox_skciphers,
 551                                    ARRAY_SIZE(nitrox_skciphers));
 552}
 553