linux/drivers/crypto/ccree/cc_cipher.c
<<
>>
Prefs
   1// SPDX-License-Identifier: GPL-2.0
   2/* Copyright (C) 2012-2019 ARM Limited (or its affiliates). */
   3
   4#include <linux/kernel.h>
   5#include <linux/module.h>
   6#include <crypto/algapi.h>
   7#include <crypto/internal/skcipher.h>
   8#include <crypto/internal/des.h>
   9#include <crypto/xts.h>
  10#include <crypto/sm4.h>
  11#include <crypto/scatterwalk.h>
  12
  13#include "cc_driver.h"
  14#include "cc_lli_defs.h"
  15#include "cc_buffer_mgr.h"
  16#include "cc_cipher.h"
  17#include "cc_request_mgr.h"
  18
  19#define MAX_SKCIPHER_SEQ_LEN 6
  20
  21#define template_skcipher       template_u.skcipher
  22
  23struct cc_user_key_info {
  24        u8 *key;
  25        dma_addr_t key_dma_addr;
  26};
  27
  28struct cc_hw_key_info {
  29        enum cc_hw_crypto_key key1_slot;
  30        enum cc_hw_crypto_key key2_slot;
  31};
  32
  33struct cc_cpp_key_info {
  34        u8 slot;
  35        enum cc_cpp_alg alg;
  36};
  37
  38enum cc_key_type {
  39        CC_UNPROTECTED_KEY,             /* User key */
  40        CC_HW_PROTECTED_KEY,            /* HW (FDE) key */
  41        CC_POLICY_PROTECTED_KEY,        /* CPP key */
  42        CC_INVALID_PROTECTED_KEY        /* Invalid key */
  43};
  44
  45struct cc_cipher_ctx {
  46        struct cc_drvdata *drvdata;
  47        int keylen;
  48        int cipher_mode;
  49        int flow_mode;
  50        unsigned int flags;
  51        enum cc_key_type key_type;
  52        struct cc_user_key_info user;
  53        union {
  54                struct cc_hw_key_info hw;
  55                struct cc_cpp_key_info cpp;
  56        };
  57        struct crypto_shash *shash_tfm;
  58        struct crypto_skcipher *fallback_tfm;
  59        bool fallback_on;
  60};
  61
  62static void cc_cipher_complete(struct device *dev, void *cc_req, int err);
  63
  64static inline enum cc_key_type cc_key_type(struct crypto_tfm *tfm)
  65{
  66        struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
  67
  68        return ctx_p->key_type;
  69}
  70
  71static int validate_keys_sizes(struct cc_cipher_ctx *ctx_p, u32 size)
  72{
  73        switch (ctx_p->flow_mode) {
  74        case S_DIN_to_AES:
  75                switch (size) {
  76                case CC_AES_128_BIT_KEY_SIZE:
  77                case CC_AES_192_BIT_KEY_SIZE:
  78                        if (ctx_p->cipher_mode != DRV_CIPHER_XTS)
  79                                return 0;
  80                        break;
  81                case CC_AES_256_BIT_KEY_SIZE:
  82                        return 0;
  83                case (CC_AES_192_BIT_KEY_SIZE * 2):
  84                case (CC_AES_256_BIT_KEY_SIZE * 2):
  85                        if (ctx_p->cipher_mode == DRV_CIPHER_XTS ||
  86                            ctx_p->cipher_mode == DRV_CIPHER_ESSIV)
  87                                return 0;
  88                        break;
  89                default:
  90                        break;
  91                }
  92                break;
  93        case S_DIN_to_DES:
  94                if (size == DES3_EDE_KEY_SIZE || size == DES_KEY_SIZE)
  95                        return 0;
  96                break;
  97        case S_DIN_to_SM4:
  98                if (size == SM4_KEY_SIZE)
  99                        return 0;
 100                break;
 101        default:
 102                break;
 103        }
 104        return -EINVAL;
 105}
 106
 107static int validate_data_size(struct cc_cipher_ctx *ctx_p,
 108                              unsigned int size)
 109{
 110        switch (ctx_p->flow_mode) {
 111        case S_DIN_to_AES:
 112                switch (ctx_p->cipher_mode) {
 113                case DRV_CIPHER_XTS:
 114                case DRV_CIPHER_CBC_CTS:
 115                        if (size >= AES_BLOCK_SIZE)
 116                                return 0;
 117                        break;
 118                case DRV_CIPHER_OFB:
 119                case DRV_CIPHER_CTR:
 120                                return 0;
 121                case DRV_CIPHER_ECB:
 122                case DRV_CIPHER_CBC:
 123                case DRV_CIPHER_ESSIV:
 124                        if (IS_ALIGNED(size, AES_BLOCK_SIZE))
 125                                return 0;
 126                        break;
 127                default:
 128                        break;
 129                }
 130                break;
 131        case S_DIN_to_DES:
 132                if (IS_ALIGNED(size, DES_BLOCK_SIZE))
 133                        return 0;
 134                break;
 135        case S_DIN_to_SM4:
 136                switch (ctx_p->cipher_mode) {
 137                case DRV_CIPHER_CTR:
 138                        return 0;
 139                case DRV_CIPHER_ECB:
 140                case DRV_CIPHER_CBC:
 141                        if (IS_ALIGNED(size, SM4_BLOCK_SIZE))
 142                                return 0;
 143                        break;
 144                default:
 145                        break;
 146                }
 147                break;
 148        default:
 149                break;
 150        }
 151        return -EINVAL;
 152}
 153
 154static int cc_cipher_init(struct crypto_tfm *tfm)
 155{
 156        struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
 157        struct cc_crypto_alg *cc_alg =
 158                        container_of(tfm->__crt_alg, struct cc_crypto_alg,
 159                                     skcipher_alg.base);
 160        struct device *dev = drvdata_to_dev(cc_alg->drvdata);
 161        unsigned int max_key_buf_size = cc_alg->skcipher_alg.max_keysize;
 162        unsigned int fallback_req_size = 0;
 163
 164        dev_dbg(dev, "Initializing context @%p for %s\n", ctx_p,
 165                crypto_tfm_alg_name(tfm));
 166
 167        ctx_p->cipher_mode = cc_alg->cipher_mode;
 168        ctx_p->flow_mode = cc_alg->flow_mode;
 169        ctx_p->drvdata = cc_alg->drvdata;
 170
 171        if (ctx_p->cipher_mode == DRV_CIPHER_ESSIV) {
 172                const char *name = crypto_tfm_alg_name(tfm);
 173
 174                /* Alloc hash tfm for essiv */
 175                ctx_p->shash_tfm = crypto_alloc_shash("sha256", 0, 0);
 176                if (IS_ERR(ctx_p->shash_tfm)) {
 177                        dev_err(dev, "Error allocating hash tfm for ESSIV.\n");
 178                        return PTR_ERR(ctx_p->shash_tfm);
 179                }
 180                max_key_buf_size <<= 1;
 181
 182                /* Alloc fallabck tfm or essiv when key size != 256 bit */
 183                ctx_p->fallback_tfm =
 184                        crypto_alloc_skcipher(name, 0, CRYPTO_ALG_NEED_FALLBACK | CRYPTO_ALG_ASYNC);
 185
 186                if (IS_ERR(ctx_p->fallback_tfm)) {
 187                        /* Note we're still allowing registration with no fallback since it's
 188                         * better to have most modes supported than none at all.
 189                         */
 190                        dev_warn(dev, "Error allocating fallback algo %s. Some modes may be available.\n",
 191                               name);
 192                        ctx_p->fallback_tfm = NULL;
 193                } else {
 194                        fallback_req_size = crypto_skcipher_reqsize(ctx_p->fallback_tfm);
 195                }
 196        }
 197
 198        crypto_skcipher_set_reqsize(__crypto_skcipher_cast(tfm),
 199                                    sizeof(struct cipher_req_ctx) + fallback_req_size);
 200
 201        /* Allocate key buffer, cache line aligned */
 202        ctx_p->user.key = kzalloc(max_key_buf_size, GFP_KERNEL);
 203        if (!ctx_p->user.key)
 204                goto free_fallback;
 205
 206        dev_dbg(dev, "Allocated key buffer in context. key=@%p\n",
 207                ctx_p->user.key);
 208
 209        /* Map key buffer */
 210        ctx_p->user.key_dma_addr = dma_map_single(dev, ctx_p->user.key,
 211                                                  max_key_buf_size,
 212                                                  DMA_TO_DEVICE);
 213        if (dma_mapping_error(dev, ctx_p->user.key_dma_addr)) {
 214                dev_err(dev, "Mapping Key %u B at va=%pK for DMA failed\n",
 215                        max_key_buf_size, ctx_p->user.key);
 216                goto free_key;
 217        }
 218        dev_dbg(dev, "Mapped key %u B at va=%pK to dma=%pad\n",
 219                max_key_buf_size, ctx_p->user.key, &ctx_p->user.key_dma_addr);
 220
 221        return 0;
 222
 223free_key:
 224        kfree(ctx_p->user.key);
 225free_fallback:
 226        crypto_free_skcipher(ctx_p->fallback_tfm);
 227        crypto_free_shash(ctx_p->shash_tfm);
 228
 229        return -ENOMEM;
 230}
 231
 232static void cc_cipher_exit(struct crypto_tfm *tfm)
 233{
 234        struct crypto_alg *alg = tfm->__crt_alg;
 235        struct cc_crypto_alg *cc_alg =
 236                        container_of(alg, struct cc_crypto_alg,
 237                                     skcipher_alg.base);
 238        unsigned int max_key_buf_size = cc_alg->skcipher_alg.max_keysize;
 239        struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
 240        struct device *dev = drvdata_to_dev(ctx_p->drvdata);
 241
 242        dev_dbg(dev, "Clearing context @%p for %s\n",
 243                crypto_tfm_ctx(tfm), crypto_tfm_alg_name(tfm));
 244
 245        if (ctx_p->cipher_mode == DRV_CIPHER_ESSIV) {
 246                /* Free hash tfm for essiv */
 247                crypto_free_shash(ctx_p->shash_tfm);
 248                ctx_p->shash_tfm = NULL;
 249                crypto_free_skcipher(ctx_p->fallback_tfm);
 250                ctx_p->fallback_tfm = NULL;
 251        }
 252
 253        /* Unmap key buffer */
 254        dma_unmap_single(dev, ctx_p->user.key_dma_addr, max_key_buf_size,
 255                         DMA_TO_DEVICE);
 256        dev_dbg(dev, "Unmapped key buffer key_dma_addr=%pad\n",
 257                &ctx_p->user.key_dma_addr);
 258
 259        /* Free key buffer in context */
 260        dev_dbg(dev, "Free key buffer in context. key=@%p\n", ctx_p->user.key);
 261        kfree_sensitive(ctx_p->user.key);
 262}
 263
 264struct tdes_keys {
 265        u8      key1[DES_KEY_SIZE];
 266        u8      key2[DES_KEY_SIZE];
 267        u8      key3[DES_KEY_SIZE];
 268};
 269
 270static enum cc_hw_crypto_key cc_slot_to_hw_key(u8 slot_num)
 271{
 272        switch (slot_num) {
 273        case 0:
 274                return KFDE0_KEY;
 275        case 1:
 276                return KFDE1_KEY;
 277        case 2:
 278                return KFDE2_KEY;
 279        case 3:
 280                return KFDE3_KEY;
 281        }
 282        return END_OF_KEYS;
 283}
 284
 285static u8 cc_slot_to_cpp_key(u8 slot_num)
 286{
 287        return (slot_num - CC_FIRST_CPP_KEY_SLOT);
 288}
 289
 290static inline enum cc_key_type cc_slot_to_key_type(u8 slot_num)
 291{
 292        if (slot_num >= CC_FIRST_HW_KEY_SLOT && slot_num <= CC_LAST_HW_KEY_SLOT)
 293                return CC_HW_PROTECTED_KEY;
 294        else if (slot_num >=  CC_FIRST_CPP_KEY_SLOT &&
 295                 slot_num <=  CC_LAST_CPP_KEY_SLOT)
 296                return CC_POLICY_PROTECTED_KEY;
 297        else
 298                return CC_INVALID_PROTECTED_KEY;
 299}
 300
 301static int cc_cipher_sethkey(struct crypto_skcipher *sktfm, const u8 *key,
 302                             unsigned int keylen)
 303{
 304        struct crypto_tfm *tfm = crypto_skcipher_tfm(sktfm);
 305        struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
 306        struct device *dev = drvdata_to_dev(ctx_p->drvdata);
 307        struct cc_hkey_info hki;
 308
 309        dev_dbg(dev, "Setting HW key in context @%p for %s. keylen=%u\n",
 310                ctx_p, crypto_tfm_alg_name(tfm), keylen);
 311        dump_byte_array("key", key, keylen);
 312
 313        /* STAT_PHASE_0: Init and sanity checks */
 314
 315        /* This check the size of the protected key token */
 316        if (keylen != sizeof(hki)) {
 317                dev_err(dev, "Unsupported protected key size %d.\n", keylen);
 318                return -EINVAL;
 319        }
 320
 321        memcpy(&hki, key, keylen);
 322
 323        /* The real key len for crypto op is the size of the HW key
 324         * referenced by the HW key slot, not the hardware key token
 325         */
 326        keylen = hki.keylen;
 327
 328        if (validate_keys_sizes(ctx_p, keylen)) {
 329                dev_dbg(dev, "Unsupported key size %d.\n", keylen);
 330                return -EINVAL;
 331        }
 332
 333        ctx_p->keylen = keylen;
 334        ctx_p->fallback_on = false;
 335
 336        switch (cc_slot_to_key_type(hki.hw_key1)) {
 337        case CC_HW_PROTECTED_KEY:
 338                if (ctx_p->flow_mode == S_DIN_to_SM4) {
 339                        dev_err(dev, "Only AES HW protected keys are supported\n");
 340                        return -EINVAL;
 341                }
 342
 343                ctx_p->hw.key1_slot = cc_slot_to_hw_key(hki.hw_key1);
 344                if (ctx_p->hw.key1_slot == END_OF_KEYS) {
 345                        dev_err(dev, "Unsupported hw key1 number (%d)\n",
 346                                hki.hw_key1);
 347                        return -EINVAL;
 348                }
 349
 350                if (ctx_p->cipher_mode == DRV_CIPHER_XTS ||
 351                    ctx_p->cipher_mode == DRV_CIPHER_ESSIV) {
 352                        if (hki.hw_key1 == hki.hw_key2) {
 353                                dev_err(dev, "Illegal hw key numbers (%d,%d)\n",
 354                                        hki.hw_key1, hki.hw_key2);
 355                                return -EINVAL;
 356                        }
 357
 358                        ctx_p->hw.key2_slot = cc_slot_to_hw_key(hki.hw_key2);
 359                        if (ctx_p->hw.key2_slot == END_OF_KEYS) {
 360                                dev_err(dev, "Unsupported hw key2 number (%d)\n",
 361                                        hki.hw_key2);
 362                                return -EINVAL;
 363                        }
 364                }
 365
 366                ctx_p->key_type = CC_HW_PROTECTED_KEY;
 367                dev_dbg(dev, "HW protected key  %d/%d set\n.",
 368                        ctx_p->hw.key1_slot, ctx_p->hw.key2_slot);
 369                break;
 370
 371        case CC_POLICY_PROTECTED_KEY:
 372                if (ctx_p->drvdata->hw_rev < CC_HW_REV_713) {
 373                        dev_err(dev, "CPP keys not supported in this hardware revision.\n");
 374                        return -EINVAL;
 375                }
 376
 377                if (ctx_p->cipher_mode != DRV_CIPHER_CBC &&
 378                    ctx_p->cipher_mode != DRV_CIPHER_CTR) {
 379                        dev_err(dev, "CPP keys only supported in CBC or CTR modes.\n");
 380                        return -EINVAL;
 381                }
 382
 383                ctx_p->cpp.slot = cc_slot_to_cpp_key(hki.hw_key1);
 384                if (ctx_p->flow_mode == S_DIN_to_AES)
 385                        ctx_p->cpp.alg = CC_CPP_AES;
 386                else /* Must be SM4 since due to sethkey registration */
 387                        ctx_p->cpp.alg = CC_CPP_SM4;
 388                ctx_p->key_type = CC_POLICY_PROTECTED_KEY;
 389                dev_dbg(dev, "policy protected key alg: %d slot: %d.\n",
 390                        ctx_p->cpp.alg, ctx_p->cpp.slot);
 391                break;
 392
 393        default:
 394                dev_err(dev, "Unsupported protected key (%d)\n", hki.hw_key1);
 395                return -EINVAL;
 396        }
 397
 398        return 0;
 399}
 400
 401static int cc_cipher_setkey(struct crypto_skcipher *sktfm, const u8 *key,
 402                            unsigned int keylen)
 403{
 404        struct crypto_tfm *tfm = crypto_skcipher_tfm(sktfm);
 405        struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
 406        struct device *dev = drvdata_to_dev(ctx_p->drvdata);
 407        struct cc_crypto_alg *cc_alg =
 408                        container_of(tfm->__crt_alg, struct cc_crypto_alg,
 409                                     skcipher_alg.base);
 410        unsigned int max_key_buf_size = cc_alg->skcipher_alg.max_keysize;
 411
 412        dev_dbg(dev, "Setting key in context @%p for %s. keylen=%u\n",
 413                ctx_p, crypto_tfm_alg_name(tfm), keylen);
 414        dump_byte_array("key", key, keylen);
 415
 416        /* STAT_PHASE_0: Init and sanity checks */
 417
 418        if (validate_keys_sizes(ctx_p, keylen)) {
 419                dev_dbg(dev, "Invalid key size %d.\n", keylen);
 420                return -EINVAL;
 421        }
 422
 423        if (ctx_p->cipher_mode == DRV_CIPHER_ESSIV) {
 424
 425                /* We only support 256 bit ESSIV-CBC-AES keys */
 426                if (keylen != AES_KEYSIZE_256)  {
 427                        unsigned int flags = crypto_tfm_get_flags(tfm) & CRYPTO_TFM_REQ_MASK;
 428
 429                        if (likely(ctx_p->fallback_tfm)) {
 430                                ctx_p->fallback_on = true;
 431                                crypto_skcipher_clear_flags(ctx_p->fallback_tfm,
 432                                                            CRYPTO_TFM_REQ_MASK);
 433                                crypto_skcipher_clear_flags(ctx_p->fallback_tfm, flags);
 434                                return crypto_skcipher_setkey(ctx_p->fallback_tfm, key, keylen);
 435                        }
 436
 437                        dev_dbg(dev, "Unsupported key size %d and no fallback.\n", keylen);
 438                        return -EINVAL;
 439                }
 440
 441                /* Internal ESSIV key buffer is double sized */
 442                max_key_buf_size <<= 1;
 443        }
 444
 445        ctx_p->fallback_on = false;
 446        ctx_p->key_type = CC_UNPROTECTED_KEY;
 447
 448        /*
 449         * Verify DES weak keys
 450         * Note that we're dropping the expanded key since the
 451         * HW does the expansion on its own.
 452         */
 453        if (ctx_p->flow_mode == S_DIN_to_DES) {
 454                if ((keylen == DES3_EDE_KEY_SIZE &&
 455                     verify_skcipher_des3_key(sktfm, key)) ||
 456                    verify_skcipher_des_key(sktfm, key)) {
 457                        dev_dbg(dev, "weak DES key");
 458                        return -EINVAL;
 459                }
 460        }
 461
 462        if (ctx_p->cipher_mode == DRV_CIPHER_XTS &&
 463            xts_check_key(tfm, key, keylen)) {
 464                dev_dbg(dev, "weak XTS key");
 465                return -EINVAL;
 466        }
 467
 468        /* STAT_PHASE_1: Copy key to ctx */
 469        dma_sync_single_for_cpu(dev, ctx_p->user.key_dma_addr,
 470                                max_key_buf_size, DMA_TO_DEVICE);
 471
 472        memcpy(ctx_p->user.key, key, keylen);
 473
 474        if (ctx_p->cipher_mode == DRV_CIPHER_ESSIV) {
 475                /* sha256 for key2 - use sw implementation */
 476                int err;
 477
 478                err = crypto_shash_tfm_digest(ctx_p->shash_tfm,
 479                                              ctx_p->user.key, keylen,
 480                                              ctx_p->user.key + keylen);
 481                if (err) {
 482                        dev_err(dev, "Failed to hash ESSIV key.\n");
 483                        return err;
 484                }
 485
 486                keylen <<= 1;
 487        }
 488        dma_sync_single_for_device(dev, ctx_p->user.key_dma_addr,
 489                                   max_key_buf_size, DMA_TO_DEVICE);
 490        ctx_p->keylen = keylen;
 491
 492        dev_dbg(dev, "return safely");
 493        return 0;
 494}
 495
 496static int cc_out_setup_mode(struct cc_cipher_ctx *ctx_p)
 497{
 498        switch (ctx_p->flow_mode) {
 499        case S_DIN_to_AES:
 500                return S_AES_to_DOUT;
 501        case S_DIN_to_DES:
 502                return S_DES_to_DOUT;
 503        case S_DIN_to_SM4:
 504                return S_SM4_to_DOUT;
 505        default:
 506                return ctx_p->flow_mode;
 507        }
 508}
 509
 510static void cc_setup_readiv_desc(struct crypto_tfm *tfm,
 511                                 struct cipher_req_ctx *req_ctx,
 512                                 unsigned int ivsize, struct cc_hw_desc desc[],
 513                                 unsigned int *seq_size)
 514{
 515        struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
 516        struct device *dev = drvdata_to_dev(ctx_p->drvdata);
 517        int cipher_mode = ctx_p->cipher_mode;
 518        int flow_mode = cc_out_setup_mode(ctx_p);
 519        int direction = req_ctx->gen_ctx.op_type;
 520        dma_addr_t iv_dma_addr = req_ctx->gen_ctx.iv_dma_addr;
 521
 522        if (ctx_p->key_type == CC_POLICY_PROTECTED_KEY)
 523                return;
 524
 525        switch (cipher_mode) {
 526        case DRV_CIPHER_ECB:
 527                break;
 528        case DRV_CIPHER_CBC:
 529        case DRV_CIPHER_CBC_CTS:
 530        case DRV_CIPHER_CTR:
 531        case DRV_CIPHER_OFB:
 532                /* Read next IV */
 533                hw_desc_init(&desc[*seq_size]);
 534                set_dout_dlli(&desc[*seq_size], iv_dma_addr, ivsize, NS_BIT, 1);
 535                set_cipher_config0(&desc[*seq_size], direction);
 536                set_flow_mode(&desc[*seq_size], flow_mode);
 537                set_cipher_mode(&desc[*seq_size], cipher_mode);
 538                if (cipher_mode == DRV_CIPHER_CTR ||
 539                    cipher_mode == DRV_CIPHER_OFB) {
 540                        set_setup_mode(&desc[*seq_size], SETUP_WRITE_STATE1);
 541                } else {
 542                        set_setup_mode(&desc[*seq_size], SETUP_WRITE_STATE0);
 543                }
 544                set_queue_last_ind(ctx_p->drvdata, &desc[*seq_size]);
 545                (*seq_size)++;
 546                break;
 547        case DRV_CIPHER_XTS:
 548        case DRV_CIPHER_ESSIV:
 549                /*  IV */
 550                hw_desc_init(&desc[*seq_size]);
 551                set_setup_mode(&desc[*seq_size], SETUP_WRITE_STATE1);
 552                set_cipher_mode(&desc[*seq_size], cipher_mode);
 553                set_cipher_config0(&desc[*seq_size], direction);
 554                set_flow_mode(&desc[*seq_size], flow_mode);
 555                set_dout_dlli(&desc[*seq_size], iv_dma_addr, CC_AES_BLOCK_SIZE,
 556                             NS_BIT, 1);
 557                set_queue_last_ind(ctx_p->drvdata, &desc[*seq_size]);
 558                (*seq_size)++;
 559                break;
 560        default:
 561                dev_err(dev, "Unsupported cipher mode (%d)\n", cipher_mode);
 562        }
 563}
 564
 565
 566static void cc_setup_state_desc(struct crypto_tfm *tfm,
 567                                 struct cipher_req_ctx *req_ctx,
 568                                 unsigned int ivsize, unsigned int nbytes,
 569                                 struct cc_hw_desc desc[],
 570                                 unsigned int *seq_size)
 571{
 572        struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
 573        struct device *dev = drvdata_to_dev(ctx_p->drvdata);
 574        int cipher_mode = ctx_p->cipher_mode;
 575        int flow_mode = ctx_p->flow_mode;
 576        int direction = req_ctx->gen_ctx.op_type;
 577        dma_addr_t iv_dma_addr = req_ctx->gen_ctx.iv_dma_addr;
 578
 579        switch (cipher_mode) {
 580        case DRV_CIPHER_ECB:
 581                break;
 582        case DRV_CIPHER_CBC:
 583        case DRV_CIPHER_CBC_CTS:
 584        case DRV_CIPHER_CTR:
 585        case DRV_CIPHER_OFB:
 586                /* Load IV */
 587                hw_desc_init(&desc[*seq_size]);
 588                set_din_type(&desc[*seq_size], DMA_DLLI, iv_dma_addr, ivsize,
 589                             NS_BIT);
 590                set_cipher_config0(&desc[*seq_size], direction);
 591                set_flow_mode(&desc[*seq_size], flow_mode);
 592                set_cipher_mode(&desc[*seq_size], cipher_mode);
 593                if (cipher_mode == DRV_CIPHER_CTR ||
 594                    cipher_mode == DRV_CIPHER_OFB) {
 595                        set_setup_mode(&desc[*seq_size], SETUP_LOAD_STATE1);
 596                } else {
 597                        set_setup_mode(&desc[*seq_size], SETUP_LOAD_STATE0);
 598                }
 599                (*seq_size)++;
 600                break;
 601        case DRV_CIPHER_XTS:
 602        case DRV_CIPHER_ESSIV:
 603                break;
 604        default:
 605                dev_err(dev, "Unsupported cipher mode (%d)\n", cipher_mode);
 606        }
 607}
 608
 609
 610static void cc_setup_xex_state_desc(struct crypto_tfm *tfm,
 611                                 struct cipher_req_ctx *req_ctx,
 612                                 unsigned int ivsize, unsigned int nbytes,
 613                                 struct cc_hw_desc desc[],
 614                                 unsigned int *seq_size)
 615{
 616        struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
 617        struct device *dev = drvdata_to_dev(ctx_p->drvdata);
 618        int cipher_mode = ctx_p->cipher_mode;
 619        int flow_mode = ctx_p->flow_mode;
 620        int direction = req_ctx->gen_ctx.op_type;
 621        dma_addr_t key_dma_addr = ctx_p->user.key_dma_addr;
 622        unsigned int key_len = (ctx_p->keylen / 2);
 623        dma_addr_t iv_dma_addr = req_ctx->gen_ctx.iv_dma_addr;
 624        unsigned int key_offset = key_len;
 625
 626        switch (cipher_mode) {
 627        case DRV_CIPHER_ECB:
 628                break;
 629        case DRV_CIPHER_CBC:
 630        case DRV_CIPHER_CBC_CTS:
 631        case DRV_CIPHER_CTR:
 632        case DRV_CIPHER_OFB:
 633                break;
 634        case DRV_CIPHER_XTS:
 635        case DRV_CIPHER_ESSIV:
 636
 637                if (cipher_mode == DRV_CIPHER_ESSIV)
 638                        key_len = SHA256_DIGEST_SIZE;
 639
 640                /* load XEX key */
 641                hw_desc_init(&desc[*seq_size]);
 642                set_cipher_mode(&desc[*seq_size], cipher_mode);
 643                set_cipher_config0(&desc[*seq_size], direction);
 644                if (cc_key_type(tfm) == CC_HW_PROTECTED_KEY) {
 645                        set_hw_crypto_key(&desc[*seq_size],
 646                                          ctx_p->hw.key2_slot);
 647                } else {
 648                        set_din_type(&desc[*seq_size], DMA_DLLI,
 649                                     (key_dma_addr + key_offset),
 650                                     key_len, NS_BIT);
 651                }
 652                set_xex_data_unit_size(&desc[*seq_size], nbytes);
 653                set_flow_mode(&desc[*seq_size], S_DIN_to_AES2);
 654                set_key_size_aes(&desc[*seq_size], key_len);
 655                set_setup_mode(&desc[*seq_size], SETUP_LOAD_XEX_KEY);
 656                (*seq_size)++;
 657
 658                /* Load IV */
 659                hw_desc_init(&desc[*seq_size]);
 660                set_setup_mode(&desc[*seq_size], SETUP_LOAD_STATE1);
 661                set_cipher_mode(&desc[*seq_size], cipher_mode);
 662                set_cipher_config0(&desc[*seq_size], direction);
 663                set_key_size_aes(&desc[*seq_size], key_len);
 664                set_flow_mode(&desc[*seq_size], flow_mode);
 665                set_din_type(&desc[*seq_size], DMA_DLLI, iv_dma_addr,
 666                             CC_AES_BLOCK_SIZE, NS_BIT);
 667                (*seq_size)++;
 668                break;
 669        default:
 670                dev_err(dev, "Unsupported cipher mode (%d)\n", cipher_mode);
 671        }
 672}
 673
 674static int cc_out_flow_mode(struct cc_cipher_ctx *ctx_p)
 675{
 676        switch (ctx_p->flow_mode) {
 677        case S_DIN_to_AES:
 678                return DIN_AES_DOUT;
 679        case S_DIN_to_DES:
 680                return DIN_DES_DOUT;
 681        case S_DIN_to_SM4:
 682                return DIN_SM4_DOUT;
 683        default:
 684                return ctx_p->flow_mode;
 685        }
 686}
 687
 688static void cc_setup_key_desc(struct crypto_tfm *tfm,
 689                              struct cipher_req_ctx *req_ctx,
 690                              unsigned int nbytes, struct cc_hw_desc desc[],
 691                              unsigned int *seq_size)
 692{
 693        struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
 694        struct device *dev = drvdata_to_dev(ctx_p->drvdata);
 695        int cipher_mode = ctx_p->cipher_mode;
 696        int flow_mode = ctx_p->flow_mode;
 697        int direction = req_ctx->gen_ctx.op_type;
 698        dma_addr_t key_dma_addr = ctx_p->user.key_dma_addr;
 699        unsigned int key_len = ctx_p->keylen;
 700        unsigned int din_size;
 701
 702        switch (cipher_mode) {
 703        case DRV_CIPHER_CBC:
 704        case DRV_CIPHER_CBC_CTS:
 705        case DRV_CIPHER_CTR:
 706        case DRV_CIPHER_OFB:
 707        case DRV_CIPHER_ECB:
 708                /* Load key */
 709                hw_desc_init(&desc[*seq_size]);
 710                set_cipher_mode(&desc[*seq_size], cipher_mode);
 711                set_cipher_config0(&desc[*seq_size], direction);
 712
 713                if (cc_key_type(tfm) == CC_POLICY_PROTECTED_KEY) {
 714                        /* We use the AES key size coding for all CPP algs */
 715                        set_key_size_aes(&desc[*seq_size], key_len);
 716                        set_cpp_crypto_key(&desc[*seq_size], ctx_p->cpp.slot);
 717                        flow_mode = cc_out_flow_mode(ctx_p);
 718                } else {
 719                        if (flow_mode == S_DIN_to_AES) {
 720                                if (cc_key_type(tfm) == CC_HW_PROTECTED_KEY) {
 721                                        set_hw_crypto_key(&desc[*seq_size],
 722                                                          ctx_p->hw.key1_slot);
 723                                } else {
 724                                        /* CC_POLICY_UNPROTECTED_KEY
 725                                         * Invalid keys are filtered out in
 726                                         * sethkey()
 727                                         */
 728                                        din_size = (key_len == 24) ?
 729                                                AES_MAX_KEY_SIZE : key_len;
 730
 731                                        set_din_type(&desc[*seq_size], DMA_DLLI,
 732                                                     key_dma_addr, din_size,
 733                                                     NS_BIT);
 734                                }
 735                                set_key_size_aes(&desc[*seq_size], key_len);
 736                        } else {
 737                                /*des*/
 738                                set_din_type(&desc[*seq_size], DMA_DLLI,
 739                                             key_dma_addr, key_len, NS_BIT);
 740                                set_key_size_des(&desc[*seq_size], key_len);
 741                        }
 742                        set_setup_mode(&desc[*seq_size], SETUP_LOAD_KEY0);
 743                }
 744                set_flow_mode(&desc[*seq_size], flow_mode);
 745                (*seq_size)++;
 746                break;
 747        case DRV_CIPHER_XTS:
 748        case DRV_CIPHER_ESSIV:
 749                /* Load AES key */
 750                hw_desc_init(&desc[*seq_size]);
 751                set_cipher_mode(&desc[*seq_size], cipher_mode);
 752                set_cipher_config0(&desc[*seq_size], direction);
 753                if (cc_key_type(tfm) == CC_HW_PROTECTED_KEY) {
 754                        set_hw_crypto_key(&desc[*seq_size],
 755                                          ctx_p->hw.key1_slot);
 756                } else {
 757                        set_din_type(&desc[*seq_size], DMA_DLLI, key_dma_addr,
 758                                     (key_len / 2), NS_BIT);
 759                }
 760                set_key_size_aes(&desc[*seq_size], (key_len / 2));
 761                set_flow_mode(&desc[*seq_size], flow_mode);
 762                set_setup_mode(&desc[*seq_size], SETUP_LOAD_KEY0);
 763                (*seq_size)++;
 764                break;
 765        default:
 766                dev_err(dev, "Unsupported cipher mode (%d)\n", cipher_mode);
 767        }
 768}
 769
 770static void cc_setup_mlli_desc(struct crypto_tfm *tfm,
 771                               struct cipher_req_ctx *req_ctx,
 772                               struct scatterlist *dst, struct scatterlist *src,
 773                               unsigned int nbytes, void *areq,
 774                               struct cc_hw_desc desc[], unsigned int *seq_size)
 775{
 776        struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
 777        struct device *dev = drvdata_to_dev(ctx_p->drvdata);
 778
 779        if (req_ctx->dma_buf_type == CC_DMA_BUF_MLLI) {
 780                /* bypass */
 781                dev_dbg(dev, " bypass params addr %pad length 0x%X addr 0x%08X\n",
 782                        &req_ctx->mlli_params.mlli_dma_addr,
 783                        req_ctx->mlli_params.mlli_len,
 784                        ctx_p->drvdata->mlli_sram_addr);
 785                hw_desc_init(&desc[*seq_size]);
 786                set_din_type(&desc[*seq_size], DMA_DLLI,
 787                             req_ctx->mlli_params.mlli_dma_addr,
 788                             req_ctx->mlli_params.mlli_len, NS_BIT);
 789                set_dout_sram(&desc[*seq_size],
 790                              ctx_p->drvdata->mlli_sram_addr,
 791                              req_ctx->mlli_params.mlli_len);
 792                set_flow_mode(&desc[*seq_size], BYPASS);
 793                (*seq_size)++;
 794        }
 795}
 796
 797static void cc_setup_flow_desc(struct crypto_tfm *tfm,
 798                               struct cipher_req_ctx *req_ctx,
 799                               struct scatterlist *dst, struct scatterlist *src,
 800                               unsigned int nbytes, struct cc_hw_desc desc[],
 801                               unsigned int *seq_size)
 802{
 803        struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
 804        struct device *dev = drvdata_to_dev(ctx_p->drvdata);
 805        unsigned int flow_mode = cc_out_flow_mode(ctx_p);
 806        bool last_desc = (ctx_p->key_type == CC_POLICY_PROTECTED_KEY ||
 807                          ctx_p->cipher_mode == DRV_CIPHER_ECB);
 808
 809        /* Process */
 810        if (req_ctx->dma_buf_type == CC_DMA_BUF_DLLI) {
 811                dev_dbg(dev, " data params addr %pad length 0x%X\n",
 812                        &sg_dma_address(src), nbytes);
 813                dev_dbg(dev, " data params addr %pad length 0x%X\n",
 814                        &sg_dma_address(dst), nbytes);
 815                hw_desc_init(&desc[*seq_size]);
 816                set_din_type(&desc[*seq_size], DMA_DLLI, sg_dma_address(src),
 817                             nbytes, NS_BIT);
 818                set_dout_dlli(&desc[*seq_size], sg_dma_address(dst),
 819                              nbytes, NS_BIT, (!last_desc ? 0 : 1));
 820                if (last_desc)
 821                        set_queue_last_ind(ctx_p->drvdata, &desc[*seq_size]);
 822
 823                set_flow_mode(&desc[*seq_size], flow_mode);
 824                (*seq_size)++;
 825        } else {
 826                hw_desc_init(&desc[*seq_size]);
 827                set_din_type(&desc[*seq_size], DMA_MLLI,
 828                             ctx_p->drvdata->mlli_sram_addr,
 829                             req_ctx->in_mlli_nents, NS_BIT);
 830                if (req_ctx->out_nents == 0) {
 831                        dev_dbg(dev, " din/dout params addr 0x%08X addr 0x%08X\n",
 832                                ctx_p->drvdata->mlli_sram_addr,
 833                                ctx_p->drvdata->mlli_sram_addr);
 834                        set_dout_mlli(&desc[*seq_size],
 835                                      ctx_p->drvdata->mlli_sram_addr,
 836                                      req_ctx->in_mlli_nents, NS_BIT,
 837                                      (!last_desc ? 0 : 1));
 838                } else {
 839                        dev_dbg(dev, " din/dout params addr 0x%08X addr 0x%08X\n",
 840                                ctx_p->drvdata->mlli_sram_addr,
 841                                ctx_p->drvdata->mlli_sram_addr +
 842                                (u32)LLI_ENTRY_BYTE_SIZE * req_ctx->in_nents);
 843                        set_dout_mlli(&desc[*seq_size],
 844                                      (ctx_p->drvdata->mlli_sram_addr +
 845                                       (LLI_ENTRY_BYTE_SIZE *
 846                                        req_ctx->in_mlli_nents)),
 847                                      req_ctx->out_mlli_nents, NS_BIT,
 848                                      (!last_desc ? 0 : 1));
 849                }
 850                if (last_desc)
 851                        set_queue_last_ind(ctx_p->drvdata, &desc[*seq_size]);
 852
 853                set_flow_mode(&desc[*seq_size], flow_mode);
 854                (*seq_size)++;
 855        }
 856}
 857
 858static void cc_cipher_complete(struct device *dev, void *cc_req, int err)
 859{
 860        struct skcipher_request *req = (struct skcipher_request *)cc_req;
 861        struct scatterlist *dst = req->dst;
 862        struct scatterlist *src = req->src;
 863        struct cipher_req_ctx *req_ctx = skcipher_request_ctx(req);
 864        struct crypto_skcipher *sk_tfm = crypto_skcipher_reqtfm(req);
 865        unsigned int ivsize = crypto_skcipher_ivsize(sk_tfm);
 866
 867        if (err != -EINPROGRESS) {
 868                /* Not a BACKLOG notification */
 869                cc_unmap_cipher_request(dev, req_ctx, ivsize, src, dst);
 870                memcpy(req->iv, req_ctx->iv, ivsize);
 871                kfree_sensitive(req_ctx->iv);
 872        }
 873
 874        skcipher_request_complete(req, err);
 875}
 876
 877static int cc_cipher_process(struct skcipher_request *req,
 878                             enum drv_crypto_direction direction)
 879{
 880        struct crypto_skcipher *sk_tfm = crypto_skcipher_reqtfm(req);
 881        struct crypto_tfm *tfm = crypto_skcipher_tfm(sk_tfm);
 882        struct cipher_req_ctx *req_ctx = skcipher_request_ctx(req);
 883        unsigned int ivsize = crypto_skcipher_ivsize(sk_tfm);
 884        struct scatterlist *dst = req->dst;
 885        struct scatterlist *src = req->src;
 886        unsigned int nbytes = req->cryptlen;
 887        void *iv = req->iv;
 888        struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
 889        struct device *dev = drvdata_to_dev(ctx_p->drvdata);
 890        struct cc_hw_desc desc[MAX_SKCIPHER_SEQ_LEN];
 891        struct cc_crypto_req cc_req = {};
 892        int rc;
 893        unsigned int seq_len = 0;
 894        gfp_t flags = cc_gfp_flags(&req->base);
 895
 896        dev_dbg(dev, "%s req=%p iv=%p nbytes=%d\n",
 897                ((direction == DRV_CRYPTO_DIRECTION_ENCRYPT) ?
 898                "Encrypt" : "Decrypt"), req, iv, nbytes);
 899
 900        /* STAT_PHASE_0: Init and sanity checks */
 901
 902        if (validate_data_size(ctx_p, nbytes)) {
 903                dev_dbg(dev, "Unsupported data size %d.\n", nbytes);
 904                rc = -EINVAL;
 905                goto exit_process;
 906        }
 907        if (nbytes == 0) {
 908                /* No data to process is valid */
 909                rc = 0;
 910                goto exit_process;
 911        }
 912
 913        if (ctx_p->fallback_on) {
 914                struct skcipher_request *subreq = skcipher_request_ctx(req);
 915
 916                *subreq = *req;
 917                skcipher_request_set_tfm(subreq, ctx_p->fallback_tfm);
 918                if (direction == DRV_CRYPTO_DIRECTION_ENCRYPT)
 919                        return crypto_skcipher_encrypt(subreq);
 920                else
 921                        return crypto_skcipher_decrypt(subreq);
 922        }
 923
 924        /* The IV we are handed may be allocated from the stack so
 925         * we must copy it to a DMAable buffer before use.
 926         */
 927        req_ctx->iv = kmemdup(iv, ivsize, flags);
 928        if (!req_ctx->iv) {
 929                rc = -ENOMEM;
 930                goto exit_process;
 931        }
 932
 933        /* Setup request structure */
 934        cc_req.user_cb = cc_cipher_complete;
 935        cc_req.user_arg = req;
 936
 937        /* Setup CPP operation details */
 938        if (ctx_p->key_type == CC_POLICY_PROTECTED_KEY) {
 939                cc_req.cpp.is_cpp = true;
 940                cc_req.cpp.alg = ctx_p->cpp.alg;
 941                cc_req.cpp.slot = ctx_p->cpp.slot;
 942        }
 943
 944        /* Setup request context */
 945        req_ctx->gen_ctx.op_type = direction;
 946
 947        /* STAT_PHASE_1: Map buffers */
 948
 949        rc = cc_map_cipher_request(ctx_p->drvdata, req_ctx, ivsize, nbytes,
 950                                      req_ctx->iv, src, dst, flags);
 951        if (rc) {
 952                dev_err(dev, "map_request() failed\n");
 953                goto exit_process;
 954        }
 955
 956        /* STAT_PHASE_2: Create sequence */
 957
 958        /* Setup state (IV)  */
 959        cc_setup_state_desc(tfm, req_ctx, ivsize, nbytes, desc, &seq_len);
 960        /* Setup MLLI line, if needed */
 961        cc_setup_mlli_desc(tfm, req_ctx, dst, src, nbytes, req, desc, &seq_len);
 962        /* Setup key */
 963        cc_setup_key_desc(tfm, req_ctx, nbytes, desc, &seq_len);
 964        /* Setup state (IV and XEX key)  */
 965        cc_setup_xex_state_desc(tfm, req_ctx, ivsize, nbytes, desc, &seq_len);
 966        /* Data processing */
 967        cc_setup_flow_desc(tfm, req_ctx, dst, src, nbytes, desc, &seq_len);
 968        /* Read next IV */
 969        cc_setup_readiv_desc(tfm, req_ctx, ivsize, desc, &seq_len);
 970
 971        /* STAT_PHASE_3: Lock HW and push sequence */
 972
 973        rc = cc_send_request(ctx_p->drvdata, &cc_req, desc, seq_len,
 974                             &req->base);
 975        if (rc != -EINPROGRESS && rc != -EBUSY) {
 976                /* Failed to send the request or request completed
 977                 * synchronously
 978                 */
 979                cc_unmap_cipher_request(dev, req_ctx, ivsize, src, dst);
 980        }
 981
 982exit_process:
 983        if (rc != -EINPROGRESS && rc != -EBUSY) {
 984                kfree_sensitive(req_ctx->iv);
 985        }
 986
 987        return rc;
 988}
 989
 990static int cc_cipher_encrypt(struct skcipher_request *req)
 991{
 992        struct cipher_req_ctx *req_ctx = skcipher_request_ctx(req);
 993
 994        memset(req_ctx, 0, sizeof(*req_ctx));
 995
 996        return cc_cipher_process(req, DRV_CRYPTO_DIRECTION_ENCRYPT);
 997}
 998
 999static int cc_cipher_decrypt(struct skcipher_request *req)
1000{
1001        struct cipher_req_ctx *req_ctx = skcipher_request_ctx(req);
1002
1003        memset(req_ctx, 0, sizeof(*req_ctx));
1004
1005        return cc_cipher_process(req, DRV_CRYPTO_DIRECTION_DECRYPT);
1006}
1007
1008/* Block cipher alg */
1009static const struct cc_alg_template skcipher_algs[] = {
1010        {
1011                .name = "xts(paes)",
1012                .driver_name = "xts-paes-ccree",
1013                .blocksize = 1,
1014                .template_skcipher = {
1015                        .setkey = cc_cipher_sethkey,
1016                        .encrypt = cc_cipher_encrypt,
1017                        .decrypt = cc_cipher_decrypt,
1018                        .min_keysize = CC_HW_KEY_SIZE,
1019                        .max_keysize = CC_HW_KEY_SIZE,
1020                        .ivsize = AES_BLOCK_SIZE,
1021                        },
1022                .cipher_mode = DRV_CIPHER_XTS,
1023                .flow_mode = S_DIN_to_AES,
1024                .min_hw_rev = CC_HW_REV_630,
1025                .std_body = CC_STD_NIST,
1026                .sec_func = true,
1027        },
1028        {
1029                .name = "essiv(cbc(paes),sha256)",
1030                .driver_name = "essiv-paes-ccree",
1031                .blocksize = AES_BLOCK_SIZE,
1032                .template_skcipher = {
1033                        .setkey = cc_cipher_sethkey,
1034                        .encrypt = cc_cipher_encrypt,
1035                        .decrypt = cc_cipher_decrypt,
1036                        .min_keysize = CC_HW_KEY_SIZE,
1037                        .max_keysize = CC_HW_KEY_SIZE,
1038                        .ivsize = AES_BLOCK_SIZE,
1039                        },
1040                .cipher_mode = DRV_CIPHER_ESSIV,
1041                .flow_mode = S_DIN_to_AES,
1042                .min_hw_rev = CC_HW_REV_712,
1043                .std_body = CC_STD_NIST,
1044                .sec_func = true,
1045        },
1046        {
1047                .name = "ecb(paes)",
1048                .driver_name = "ecb-paes-ccree",
1049                .blocksize = AES_BLOCK_SIZE,
1050                .template_skcipher = {
1051                        .setkey = cc_cipher_sethkey,
1052                        .encrypt = cc_cipher_encrypt,
1053                        .decrypt = cc_cipher_decrypt,
1054                        .min_keysize = CC_HW_KEY_SIZE,
1055                        .max_keysize = CC_HW_KEY_SIZE,
1056                        .ivsize = 0,
1057                        },
1058                .cipher_mode = DRV_CIPHER_ECB,
1059                .flow_mode = S_DIN_to_AES,
1060                .min_hw_rev = CC_HW_REV_712,
1061                .std_body = CC_STD_NIST,
1062                .sec_func = true,
1063        },
1064        {
1065                .name = "cbc(paes)",
1066                .driver_name = "cbc-paes-ccree",
1067                .blocksize = AES_BLOCK_SIZE,
1068                .template_skcipher = {
1069                        .setkey = cc_cipher_sethkey,
1070                        .encrypt = cc_cipher_encrypt,
1071                        .decrypt = cc_cipher_decrypt,
1072                        .min_keysize = CC_HW_KEY_SIZE,
1073                        .max_keysize = CC_HW_KEY_SIZE,
1074                        .ivsize = AES_BLOCK_SIZE,
1075                },
1076                .cipher_mode = DRV_CIPHER_CBC,
1077                .flow_mode = S_DIN_to_AES,
1078                .min_hw_rev = CC_HW_REV_712,
1079                .std_body = CC_STD_NIST,
1080                .sec_func = true,
1081        },
1082        {
1083                .name = "ofb(paes)",
1084                .driver_name = "ofb-paes-ccree",
1085                .blocksize = AES_BLOCK_SIZE,
1086                .template_skcipher = {
1087                        .setkey = cc_cipher_sethkey,
1088                        .encrypt = cc_cipher_encrypt,
1089                        .decrypt = cc_cipher_decrypt,
1090                        .min_keysize = CC_HW_KEY_SIZE,
1091                        .max_keysize = CC_HW_KEY_SIZE,
1092                        .ivsize = AES_BLOCK_SIZE,
1093                        },
1094                .cipher_mode = DRV_CIPHER_OFB,
1095                .flow_mode = S_DIN_to_AES,
1096                .min_hw_rev = CC_HW_REV_712,
1097                .std_body = CC_STD_NIST,
1098                .sec_func = true,
1099        },
1100        {
1101                .name = "cts(cbc(paes))",
1102                .driver_name = "cts-cbc-paes-ccree",
1103                .blocksize = AES_BLOCK_SIZE,
1104                .template_skcipher = {
1105                        .setkey = cc_cipher_sethkey,
1106                        .encrypt = cc_cipher_encrypt,
1107                        .decrypt = cc_cipher_decrypt,
1108                        .min_keysize = CC_HW_KEY_SIZE,
1109                        .max_keysize = CC_HW_KEY_SIZE,
1110                        .ivsize = AES_BLOCK_SIZE,
1111                        },
1112                .cipher_mode = DRV_CIPHER_CBC_CTS,
1113                .flow_mode = S_DIN_to_AES,
1114                .min_hw_rev = CC_HW_REV_712,
1115                .std_body = CC_STD_NIST,
1116                .sec_func = true,
1117        },
1118        {
1119                .name = "ctr(paes)",
1120                .driver_name = "ctr-paes-ccree",
1121                .blocksize = 1,
1122                .template_skcipher = {
1123                        .setkey = cc_cipher_sethkey,
1124                        .encrypt = cc_cipher_encrypt,
1125                        .decrypt = cc_cipher_decrypt,
1126                        .min_keysize = CC_HW_KEY_SIZE,
1127                        .max_keysize = CC_HW_KEY_SIZE,
1128                        .ivsize = AES_BLOCK_SIZE,
1129                        },
1130                .cipher_mode = DRV_CIPHER_CTR,
1131                .flow_mode = S_DIN_to_AES,
1132                .min_hw_rev = CC_HW_REV_712,
1133                .std_body = CC_STD_NIST,
1134                .sec_func = true,
1135        },
1136        {
1137                /* See https://www.mail-archive.com/linux-crypto@vger.kernel.org/msg40576.html
1138                 * for the reason why this differs from the generic
1139                 * implementation.
1140                 */
1141                .name = "xts(aes)",
1142                .driver_name = "xts-aes-ccree",
1143                .blocksize = 1,
1144                .template_skcipher = {
1145                        .setkey = cc_cipher_setkey,
1146                        .encrypt = cc_cipher_encrypt,
1147                        .decrypt = cc_cipher_decrypt,
1148                        .min_keysize = AES_MIN_KEY_SIZE * 2,
1149                        .max_keysize = AES_MAX_KEY_SIZE * 2,
1150                        .ivsize = AES_BLOCK_SIZE,
1151                        },
1152                .cipher_mode = DRV_CIPHER_XTS,
1153                .flow_mode = S_DIN_to_AES,
1154                .min_hw_rev = CC_HW_REV_630,
1155                .std_body = CC_STD_NIST,
1156        },
1157        {
1158                .name = "essiv(cbc(aes),sha256)",
1159                .driver_name = "essiv-aes-ccree",
1160                .blocksize = AES_BLOCK_SIZE,
1161                .template_skcipher = {
1162                        .setkey = cc_cipher_setkey,
1163                        .encrypt = cc_cipher_encrypt,
1164                        .decrypt = cc_cipher_decrypt,
1165                        .min_keysize = AES_MIN_KEY_SIZE,
1166                        .max_keysize = AES_MAX_KEY_SIZE,
1167                        .ivsize = AES_BLOCK_SIZE,
1168                        },
1169                .cipher_mode = DRV_CIPHER_ESSIV,
1170                .flow_mode = S_DIN_to_AES,
1171                .min_hw_rev = CC_HW_REV_712,
1172                .std_body = CC_STD_NIST,
1173        },
1174        {
1175                .name = "ecb(aes)",
1176                .driver_name = "ecb-aes-ccree",
1177                .blocksize = AES_BLOCK_SIZE,
1178                .template_skcipher = {
1179                        .setkey = cc_cipher_setkey,
1180                        .encrypt = cc_cipher_encrypt,
1181                        .decrypt = cc_cipher_decrypt,
1182                        .min_keysize = AES_MIN_KEY_SIZE,
1183                        .max_keysize = AES_MAX_KEY_SIZE,
1184                        .ivsize = 0,
1185                        },
1186                .cipher_mode = DRV_CIPHER_ECB,
1187                .flow_mode = S_DIN_to_AES,
1188                .min_hw_rev = CC_HW_REV_630,
1189                .std_body = CC_STD_NIST,
1190        },
1191        {
1192                .name = "cbc(aes)",
1193                .driver_name = "cbc-aes-ccree",
1194                .blocksize = AES_BLOCK_SIZE,
1195                .template_skcipher = {
1196                        .setkey = cc_cipher_setkey,
1197                        .encrypt = cc_cipher_encrypt,
1198                        .decrypt = cc_cipher_decrypt,
1199                        .min_keysize = AES_MIN_KEY_SIZE,
1200                        .max_keysize = AES_MAX_KEY_SIZE,
1201                        .ivsize = AES_BLOCK_SIZE,
1202                },
1203                .cipher_mode = DRV_CIPHER_CBC,
1204                .flow_mode = S_DIN_to_AES,
1205                .min_hw_rev = CC_HW_REV_630,
1206                .std_body = CC_STD_NIST,
1207        },
1208        {
1209                .name = "ofb(aes)",
1210                .driver_name = "ofb-aes-ccree",
1211                .blocksize = 1,
1212                .template_skcipher = {
1213                        .setkey = cc_cipher_setkey,
1214                        .encrypt = cc_cipher_encrypt,
1215                        .decrypt = cc_cipher_decrypt,
1216                        .min_keysize = AES_MIN_KEY_SIZE,
1217                        .max_keysize = AES_MAX_KEY_SIZE,
1218                        .ivsize = AES_BLOCK_SIZE,
1219                        },
1220                .cipher_mode = DRV_CIPHER_OFB,
1221                .flow_mode = S_DIN_to_AES,
1222                .min_hw_rev = CC_HW_REV_630,
1223                .std_body = CC_STD_NIST,
1224        },
1225        {
1226                .name = "cts(cbc(aes))",
1227                .driver_name = "cts-cbc-aes-ccree",
1228                .blocksize = AES_BLOCK_SIZE,
1229                .template_skcipher = {
1230                        .setkey = cc_cipher_setkey,
1231                        .encrypt = cc_cipher_encrypt,
1232                        .decrypt = cc_cipher_decrypt,
1233                        .min_keysize = AES_MIN_KEY_SIZE,
1234                        .max_keysize = AES_MAX_KEY_SIZE,
1235                        .ivsize = AES_BLOCK_SIZE,
1236                        },
1237                .cipher_mode = DRV_CIPHER_CBC_CTS,
1238                .flow_mode = S_DIN_to_AES,
1239                .min_hw_rev = CC_HW_REV_630,
1240                .std_body = CC_STD_NIST,
1241        },
1242        {
1243                .name = "ctr(aes)",
1244                .driver_name = "ctr-aes-ccree",
1245                .blocksize = 1,
1246                .template_skcipher = {
1247                        .setkey = cc_cipher_setkey,
1248                        .encrypt = cc_cipher_encrypt,
1249                        .decrypt = cc_cipher_decrypt,
1250                        .min_keysize = AES_MIN_KEY_SIZE,
1251                        .max_keysize = AES_MAX_KEY_SIZE,
1252                        .ivsize = AES_BLOCK_SIZE,
1253                        },
1254                .cipher_mode = DRV_CIPHER_CTR,
1255                .flow_mode = S_DIN_to_AES,
1256                .min_hw_rev = CC_HW_REV_630,
1257                .std_body = CC_STD_NIST,
1258        },
1259        {
1260                .name = "cbc(des3_ede)",
1261                .driver_name = "cbc-3des-ccree",
1262                .blocksize = DES3_EDE_BLOCK_SIZE,
1263                .template_skcipher = {
1264                        .setkey = cc_cipher_setkey,
1265                        .encrypt = cc_cipher_encrypt,
1266                        .decrypt = cc_cipher_decrypt,
1267                        .min_keysize = DES3_EDE_KEY_SIZE,
1268                        .max_keysize = DES3_EDE_KEY_SIZE,
1269                        .ivsize = DES3_EDE_BLOCK_SIZE,
1270                        },
1271                .cipher_mode = DRV_CIPHER_CBC,
1272                .flow_mode = S_DIN_to_DES,
1273                .min_hw_rev = CC_HW_REV_630,
1274                .std_body = CC_STD_NIST,
1275        },
1276        {
1277                .name = "ecb(des3_ede)",
1278                .driver_name = "ecb-3des-ccree",
1279                .blocksize = DES3_EDE_BLOCK_SIZE,
1280                .template_skcipher = {
1281                        .setkey = cc_cipher_setkey,
1282                        .encrypt = cc_cipher_encrypt,
1283                        .decrypt = cc_cipher_decrypt,
1284                        .min_keysize = DES3_EDE_KEY_SIZE,
1285                        .max_keysize = DES3_EDE_KEY_SIZE,
1286                        .ivsize = 0,
1287                        },
1288                .cipher_mode = DRV_CIPHER_ECB,
1289                .flow_mode = S_DIN_to_DES,
1290                .min_hw_rev = CC_HW_REV_630,
1291                .std_body = CC_STD_NIST,
1292        },
1293        {
1294                .name = "cbc(des)",
1295                .driver_name = "cbc-des-ccree",
1296                .blocksize = DES_BLOCK_SIZE,
1297                .template_skcipher = {
1298                        .setkey = cc_cipher_setkey,
1299                        .encrypt = cc_cipher_encrypt,
1300                        .decrypt = cc_cipher_decrypt,
1301                        .min_keysize = DES_KEY_SIZE,
1302                        .max_keysize = DES_KEY_SIZE,
1303                        .ivsize = DES_BLOCK_SIZE,
1304                        },
1305                .cipher_mode = DRV_CIPHER_CBC,
1306                .flow_mode = S_DIN_to_DES,
1307                .min_hw_rev = CC_HW_REV_630,
1308                .std_body = CC_STD_NIST,
1309        },
1310        {
1311                .name = "ecb(des)",
1312                .driver_name = "ecb-des-ccree",
1313                .blocksize = DES_BLOCK_SIZE,
1314                .template_skcipher = {
1315                        .setkey = cc_cipher_setkey,
1316                        .encrypt = cc_cipher_encrypt,
1317                        .decrypt = cc_cipher_decrypt,
1318                        .min_keysize = DES_KEY_SIZE,
1319                        .max_keysize = DES_KEY_SIZE,
1320                        .ivsize = 0,
1321                        },
1322                .cipher_mode = DRV_CIPHER_ECB,
1323                .flow_mode = S_DIN_to_DES,
1324                .min_hw_rev = CC_HW_REV_630,
1325                .std_body = CC_STD_NIST,
1326        },
1327        {
1328                .name = "cbc(sm4)",
1329                .driver_name = "cbc-sm4-ccree",
1330                .blocksize = SM4_BLOCK_SIZE,
1331                .template_skcipher = {
1332                        .setkey = cc_cipher_setkey,
1333                        .encrypt = cc_cipher_encrypt,
1334                        .decrypt = cc_cipher_decrypt,
1335                        .min_keysize = SM4_KEY_SIZE,
1336                        .max_keysize = SM4_KEY_SIZE,
1337                        .ivsize = SM4_BLOCK_SIZE,
1338                        },
1339                .cipher_mode = DRV_CIPHER_CBC,
1340                .flow_mode = S_DIN_to_SM4,
1341                .min_hw_rev = CC_HW_REV_713,
1342                .std_body = CC_STD_OSCCA,
1343        },
1344        {
1345                .name = "ecb(sm4)",
1346                .driver_name = "ecb-sm4-ccree",
1347                .blocksize = SM4_BLOCK_SIZE,
1348                .template_skcipher = {
1349                        .setkey = cc_cipher_setkey,
1350                        .encrypt = cc_cipher_encrypt,
1351                        .decrypt = cc_cipher_decrypt,
1352                        .min_keysize = SM4_KEY_SIZE,
1353                        .max_keysize = SM4_KEY_SIZE,
1354                        .ivsize = 0,
1355                        },
1356                .cipher_mode = DRV_CIPHER_ECB,
1357                .flow_mode = S_DIN_to_SM4,
1358                .min_hw_rev = CC_HW_REV_713,
1359                .std_body = CC_STD_OSCCA,
1360        },
1361        {
1362                .name = "ctr(sm4)",
1363                .driver_name = "ctr-sm4-ccree",
1364                .blocksize = 1,
1365                .template_skcipher = {
1366                        .setkey = cc_cipher_setkey,
1367                        .encrypt = cc_cipher_encrypt,
1368                        .decrypt = cc_cipher_decrypt,
1369                        .min_keysize = SM4_KEY_SIZE,
1370                        .max_keysize = SM4_KEY_SIZE,
1371                        .ivsize = SM4_BLOCK_SIZE,
1372                        },
1373                .cipher_mode = DRV_CIPHER_CTR,
1374                .flow_mode = S_DIN_to_SM4,
1375                .min_hw_rev = CC_HW_REV_713,
1376                .std_body = CC_STD_OSCCA,
1377        },
1378        {
1379                .name = "cbc(psm4)",
1380                .driver_name = "cbc-psm4-ccree",
1381                .blocksize = SM4_BLOCK_SIZE,
1382                .template_skcipher = {
1383                        .setkey = cc_cipher_sethkey,
1384                        .encrypt = cc_cipher_encrypt,
1385                        .decrypt = cc_cipher_decrypt,
1386                        .min_keysize = CC_HW_KEY_SIZE,
1387                        .max_keysize = CC_HW_KEY_SIZE,
1388                        .ivsize = SM4_BLOCK_SIZE,
1389                        },
1390                .cipher_mode = DRV_CIPHER_CBC,
1391                .flow_mode = S_DIN_to_SM4,
1392                .min_hw_rev = CC_HW_REV_713,
1393                .std_body = CC_STD_OSCCA,
1394                .sec_func = true,
1395        },
1396        {
1397                .name = "ctr(psm4)",
1398                .driver_name = "ctr-psm4-ccree",
1399                .blocksize = SM4_BLOCK_SIZE,
1400                .template_skcipher = {
1401                        .setkey = cc_cipher_sethkey,
1402                        .encrypt = cc_cipher_encrypt,
1403                        .decrypt = cc_cipher_decrypt,
1404                        .min_keysize = CC_HW_KEY_SIZE,
1405                        .max_keysize = CC_HW_KEY_SIZE,
1406                        .ivsize = SM4_BLOCK_SIZE,
1407                        },
1408                .cipher_mode = DRV_CIPHER_CTR,
1409                .flow_mode = S_DIN_to_SM4,
1410                .min_hw_rev = CC_HW_REV_713,
1411                .std_body = CC_STD_OSCCA,
1412                .sec_func = true,
1413        },
1414};
1415
1416static struct cc_crypto_alg *cc_create_alg(const struct cc_alg_template *tmpl,
1417                                           struct device *dev)
1418{
1419        struct cc_crypto_alg *t_alg;
1420        struct skcipher_alg *alg;
1421
1422        t_alg = devm_kzalloc(dev, sizeof(*t_alg), GFP_KERNEL);
1423        if (!t_alg)
1424                return ERR_PTR(-ENOMEM);
1425
1426        alg = &t_alg->skcipher_alg;
1427
1428        memcpy(alg, &tmpl->template_skcipher, sizeof(*alg));
1429
1430        snprintf(alg->base.cra_name, CRYPTO_MAX_ALG_NAME, "%s", tmpl->name);
1431        snprintf(alg->base.cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s",
1432                 tmpl->driver_name);
1433        alg->base.cra_module = THIS_MODULE;
1434        alg->base.cra_priority = CC_CRA_PRIO;
1435        alg->base.cra_blocksize = tmpl->blocksize;
1436        alg->base.cra_alignmask = 0;
1437        alg->base.cra_ctxsize = sizeof(struct cc_cipher_ctx);
1438
1439        alg->base.cra_init = cc_cipher_init;
1440        alg->base.cra_exit = cc_cipher_exit;
1441        alg->base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY;
1442
1443        t_alg->cipher_mode = tmpl->cipher_mode;
1444        t_alg->flow_mode = tmpl->flow_mode;
1445
1446        return t_alg;
1447}
1448
1449int cc_cipher_free(struct cc_drvdata *drvdata)
1450{
1451        struct cc_crypto_alg *t_alg, *n;
1452
1453        /* Remove registered algs */
1454        list_for_each_entry_safe(t_alg, n, &drvdata->alg_list, entry) {
1455                crypto_unregister_skcipher(&t_alg->skcipher_alg);
1456                list_del(&t_alg->entry);
1457        }
1458        return 0;
1459}
1460
1461int cc_cipher_alloc(struct cc_drvdata *drvdata)
1462{
1463        struct cc_crypto_alg *t_alg;
1464        struct device *dev = drvdata_to_dev(drvdata);
1465        int rc = -ENOMEM;
1466        int alg;
1467
1468        INIT_LIST_HEAD(&drvdata->alg_list);
1469
1470        /* Linux crypto */
1471        dev_dbg(dev, "Number of algorithms = %zu\n",
1472                ARRAY_SIZE(skcipher_algs));
1473        for (alg = 0; alg < ARRAY_SIZE(skcipher_algs); alg++) {
1474                if ((skcipher_algs[alg].min_hw_rev > drvdata->hw_rev) ||
1475                    !(drvdata->std_bodies & skcipher_algs[alg].std_body) ||
1476                    (drvdata->sec_disabled && skcipher_algs[alg].sec_func))
1477                        continue;
1478
1479                dev_dbg(dev, "creating %s\n", skcipher_algs[alg].driver_name);
1480                t_alg = cc_create_alg(&skcipher_algs[alg], dev);
1481                if (IS_ERR(t_alg)) {
1482                        rc = PTR_ERR(t_alg);
1483                        dev_err(dev, "%s alg allocation failed\n",
1484                                skcipher_algs[alg].driver_name);
1485                        goto fail0;
1486                }
1487                t_alg->drvdata = drvdata;
1488
1489                dev_dbg(dev, "registering %s\n",
1490                        skcipher_algs[alg].driver_name);
1491                rc = crypto_register_skcipher(&t_alg->skcipher_alg);
1492                dev_dbg(dev, "%s alg registration rc = %x\n",
1493                        t_alg->skcipher_alg.base.cra_driver_name, rc);
1494                if (rc) {
1495                        dev_err(dev, "%s alg registration failed\n",
1496                                t_alg->skcipher_alg.base.cra_driver_name);
1497                        goto fail0;
1498                }
1499
1500                list_add_tail(&t_alg->entry, &drvdata->alg_list);
1501                dev_dbg(dev, "Registered %s\n",
1502                        t_alg->skcipher_alg.base.cra_driver_name);
1503        }
1504        return 0;
1505
1506fail0:
1507        cc_cipher_free(drvdata);
1508        return rc;
1509}
1510