linux/drivers/staging/ccree/ssi_cipher.c
<<
>>
Prefs
   1/*
   2 * Copyright (C) 2012-2017 ARM Limited or its affiliates.
   3 *
   4 * This program is free software; you can redistribute it and/or modify
   5 * it under the terms of the GNU General Public License version 2 as
   6 * published by the Free Software Foundation.
   7 *
   8 * This program is distributed in the hope that it will be useful,
   9 * but WITHOUT ANY WARRANTY; without even the implied warranty of
  10 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
  11 * GNU General Public License for more details.
  12 *
  13 * You should have received a copy of the GNU General Public License
  14 * along with this program; if not, see <http://www.gnu.org/licenses/>.
  15 */
  16
  17#include <linux/kernel.h>
  18#include <linux/module.h>
  19#include <linux/platform_device.h>
  20#include <linux/semaphore.h>
  21#include <crypto/algapi.h>
  22#include <crypto/internal/skcipher.h>
  23#include <crypto/aes.h>
  24#include <crypto/ctr.h>
  25#include <crypto/des.h>
  26
  27#include "ssi_config.h"
  28#include "ssi_driver.h"
  29#include "cc_lli_defs.h"
  30#include "ssi_buffer_mgr.h"
  31#include "ssi_cipher.h"
  32#include "ssi_request_mgr.h"
  33#include "ssi_sysfs.h"
  34#include "ssi_fips_local.h"
  35
  36#define MAX_ABLKCIPHER_SEQ_LEN 6
  37
  38#define template_ablkcipher     template_u.ablkcipher
  39
  40#define SSI_MIN_AES_XTS_SIZE 0x10
  41#define SSI_MAX_AES_XTS_SIZE 0x2000
  42struct ssi_blkcipher_handle {
  43        struct list_head blkcipher_alg_list;
  44};
  45
  46struct cc_user_key_info {
  47        u8 *key;
  48        dma_addr_t key_dma_addr;
  49};
  50
  51struct cc_hw_key_info {
  52        enum cc_hw_crypto_key key1_slot;
  53        enum cc_hw_crypto_key key2_slot;
  54};
  55
  56struct ssi_ablkcipher_ctx {
  57        struct ssi_drvdata *drvdata;
  58        int keylen;
  59        int key_round_number;
  60        int cipher_mode;
  61        int flow_mode;
  62        unsigned int flags;
  63        struct blkcipher_req_ctx *sync_ctx;
  64        struct cc_user_key_info user;
  65        struct cc_hw_key_info hw;
  66        struct crypto_shash *shash_tfm;
  67};
  68
  69static void ssi_ablkcipher_complete(struct device *dev, void *ssi_req, void __iomem *cc_base);
  70
  71static int validate_keys_sizes(struct ssi_ablkcipher_ctx *ctx_p, u32 size) {
  72        switch (ctx_p->flow_mode) {
  73        case S_DIN_to_AES:
  74                switch (size) {
  75                case CC_AES_128_BIT_KEY_SIZE:
  76                case CC_AES_192_BIT_KEY_SIZE:
  77                        if (likely((ctx_p->cipher_mode != DRV_CIPHER_XTS) &&
  78                                   (ctx_p->cipher_mode != DRV_CIPHER_ESSIV) &&
  79                                   (ctx_p->cipher_mode != DRV_CIPHER_BITLOCKER)))
  80                                return 0;
  81                        break;
  82                case CC_AES_256_BIT_KEY_SIZE:
  83                        return 0;
  84                case (CC_AES_192_BIT_KEY_SIZE * 2):
  85                case (CC_AES_256_BIT_KEY_SIZE * 2):
  86                        if (likely((ctx_p->cipher_mode == DRV_CIPHER_XTS) ||
  87                                   (ctx_p->cipher_mode == DRV_CIPHER_ESSIV) ||
  88                                   (ctx_p->cipher_mode == DRV_CIPHER_BITLOCKER)))
  89                                return 0;
  90                        break;
  91                default:
  92                        break;
  93                }
  94        case S_DIN_to_DES:
  95                if (likely(size == DES3_EDE_KEY_SIZE ||
  96                    size == DES_KEY_SIZE))
  97                        return 0;
  98                break;
  99#if SSI_CC_HAS_MULTI2
 100        case S_DIN_to_MULTI2:
 101                if (likely(size == CC_MULTI2_SYSTEM_N_DATA_KEY_SIZE))
 102                        return 0;
 103                break;
 104#endif
 105        default:
 106                break;
 107        }
 108        return -EINVAL;
 109}
 110
 111static int validate_data_size(struct ssi_ablkcipher_ctx *ctx_p, unsigned int size) {
 112        switch (ctx_p->flow_mode) {
 113        case S_DIN_to_AES:
 114                switch (ctx_p->cipher_mode) {
 115                case DRV_CIPHER_XTS:
 116                        if ((size >= SSI_MIN_AES_XTS_SIZE) &&
 117                            (size <= SSI_MAX_AES_XTS_SIZE) &&
 118                            IS_ALIGNED(size, AES_BLOCK_SIZE))
 119                                return 0;
 120                        break;
 121                case DRV_CIPHER_CBC_CTS:
 122                        if (likely(size >= AES_BLOCK_SIZE))
 123                                return 0;
 124                        break;
 125                case DRV_CIPHER_OFB:
 126                case DRV_CIPHER_CTR:
 127                                return 0;
 128                case DRV_CIPHER_ECB:
 129                case DRV_CIPHER_CBC:
 130                case DRV_CIPHER_ESSIV:
 131                case DRV_CIPHER_BITLOCKER:
 132                        if (likely(IS_ALIGNED(size, AES_BLOCK_SIZE)))
 133                                return 0;
 134                        break;
 135                default:
 136                        break;
 137                }
 138                break;
 139        case S_DIN_to_DES:
 140                if (likely(IS_ALIGNED(size, DES_BLOCK_SIZE)))
 141                                return 0;
 142                break;
 143#if SSI_CC_HAS_MULTI2
 144        case S_DIN_to_MULTI2:
 145                switch (ctx_p->cipher_mode) {
 146                case DRV_MULTI2_CBC:
 147                        if (likely(IS_ALIGNED(size, CC_MULTI2_BLOCK_SIZE)))
 148                                return 0;
 149                        break;
 150                case DRV_MULTI2_OFB:
 151                        return 0;
 152                default:
 153                        break;
 154                }
 155                break;
 156#endif /*SSI_CC_HAS_MULTI2*/
 157        default:
 158                break;
 159        }
 160        return -EINVAL;
 161}
 162
 163static unsigned int get_max_keysize(struct crypto_tfm *tfm)
 164{
 165        struct ssi_crypto_alg *ssi_alg = container_of(tfm->__crt_alg, struct ssi_crypto_alg, crypto_alg);
 166
 167        if ((ssi_alg->crypto_alg.cra_flags & CRYPTO_ALG_TYPE_MASK) == CRYPTO_ALG_TYPE_ABLKCIPHER)
 168                return ssi_alg->crypto_alg.cra_ablkcipher.max_keysize;
 169
 170        if ((ssi_alg->crypto_alg.cra_flags & CRYPTO_ALG_TYPE_MASK) == CRYPTO_ALG_TYPE_BLKCIPHER)
 171                return ssi_alg->crypto_alg.cra_blkcipher.max_keysize;
 172
 173        return 0;
 174}
 175
 176static int ssi_blkcipher_init(struct crypto_tfm *tfm)
 177{
 178        struct ssi_ablkcipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
 179        struct crypto_alg *alg = tfm->__crt_alg;
 180        struct ssi_crypto_alg *ssi_alg =
 181                        container_of(alg, struct ssi_crypto_alg, crypto_alg);
 182        struct device *dev;
 183        int rc = 0;
 184        unsigned int max_key_buf_size = get_max_keysize(tfm);
 185
 186        SSI_LOG_DEBUG("Initializing context @%p for %s\n", ctx_p,
 187                                                crypto_tfm_alg_name(tfm));
 188
 189        CHECK_AND_RETURN_UPON_FIPS_ERROR();
 190        ctx_p->cipher_mode = ssi_alg->cipher_mode;
 191        ctx_p->flow_mode = ssi_alg->flow_mode;
 192        ctx_p->drvdata = ssi_alg->drvdata;
 193        dev = &ctx_p->drvdata->plat_dev->dev;
 194
 195        /* Allocate key buffer, cache line aligned */
 196        ctx_p->user.key = kmalloc(max_key_buf_size, GFP_KERNEL | GFP_DMA);
 197        if (!ctx_p->user.key) {
 198                SSI_LOG_ERR("Allocating key buffer in context failed\n");
 199                rc = -ENOMEM;
 200        }
 201        SSI_LOG_DEBUG("Allocated key buffer in context. key=@%p\n",
 202                      ctx_p->user.key);
 203
 204        /* Map key buffer */
 205        ctx_p->user.key_dma_addr = dma_map_single(dev, (void *)ctx_p->user.key,
 206                                             max_key_buf_size, DMA_TO_DEVICE);
 207        if (dma_mapping_error(dev, ctx_p->user.key_dma_addr)) {
 208                SSI_LOG_ERR("Mapping Key %u B at va=%pK for DMA failed\n",
 209                        max_key_buf_size, ctx_p->user.key);
 210                return -ENOMEM;
 211        }
 212        SSI_LOG_DEBUG("Mapped key %u B at va=%pK to dma=0x%llX\n",
 213                max_key_buf_size, ctx_p->user.key,
 214                (unsigned long long)ctx_p->user.key_dma_addr);
 215
 216        if (ctx_p->cipher_mode == DRV_CIPHER_ESSIV) {
 217                /* Alloc hash tfm for essiv */
 218                ctx_p->shash_tfm = crypto_alloc_shash("sha256-generic", 0, 0);
 219                if (IS_ERR(ctx_p->shash_tfm)) {
 220                        SSI_LOG_ERR("Error allocating hash tfm for ESSIV.\n");
 221                        return PTR_ERR(ctx_p->shash_tfm);
 222                }
 223        }
 224
 225        return rc;
 226}
 227
 228static void ssi_blkcipher_exit(struct crypto_tfm *tfm)
 229{
 230        struct ssi_ablkcipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
 231        struct device *dev = &ctx_p->drvdata->plat_dev->dev;
 232        unsigned int max_key_buf_size = get_max_keysize(tfm);
 233
 234        SSI_LOG_DEBUG("Clearing context @%p for %s\n",
 235                crypto_tfm_ctx(tfm), crypto_tfm_alg_name(tfm));
 236
 237        if (ctx_p->cipher_mode == DRV_CIPHER_ESSIV) {
 238                /* Free hash tfm for essiv */
 239                crypto_free_shash(ctx_p->shash_tfm);
 240                ctx_p->shash_tfm = NULL;
 241        }
 242
 243        /* Unmap key buffer */
 244        dma_unmap_single(dev, ctx_p->user.key_dma_addr, max_key_buf_size,
 245                                                                DMA_TO_DEVICE);
 246        SSI_LOG_DEBUG("Unmapped key buffer key_dma_addr=0x%llX\n",
 247                (unsigned long long)ctx_p->user.key_dma_addr);
 248
 249        /* Free key buffer in context */
 250        kfree(ctx_p->user.key);
 251        SSI_LOG_DEBUG("Free key buffer in context. key=@%p\n", ctx_p->user.key);
 252}
 253
 254struct tdes_keys {
 255        u8      key1[DES_KEY_SIZE];
 256        u8      key2[DES_KEY_SIZE];
 257        u8      key3[DES_KEY_SIZE];
 258};
 259
 260static const u8 zero_buff[] = { 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
 261                                0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
 262                                0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
 263                                0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0};
 264
 265/* The function verifies that tdes keys are not weak.*/
 266static int ssi_fips_verify_3des_keys(const u8 *key, unsigned int keylen)
 267{
 268#ifdef CCREE_FIPS_SUPPORT
 269        struct tdes_keys *tdes_key = (struct tdes_keys *)key;
 270
 271        /* verify key1 != key2 and key3 != key2*/
 272        if (unlikely((memcmp((u8 *)tdes_key->key1, (u8 *)tdes_key->key2, sizeof(tdes_key->key1)) == 0) ||
 273                      (memcmp((u8 *)tdes_key->key3, (u8 *)tdes_key->key2, sizeof(tdes_key->key3)) == 0))) {
 274                return -ENOEXEC;
 275        }
 276#endif /* CCREE_FIPS_SUPPORT */
 277
 278        return 0;
 279}
 280
 281/* The function verifies that xts keys are not weak.*/
 282static int ssi_fips_verify_xts_keys(const u8 *key, unsigned int keylen)
 283{
 284#ifdef CCREE_FIPS_SUPPORT
 285        /* Weak key is define as key that its first half (128/256 lsb) equals its second half (128/256 msb) */
 286        int singleKeySize = keylen >> 1;
 287
 288        if (unlikely(memcmp(key, &key[singleKeySize], singleKeySize) == 0))
 289                return -ENOEXEC;
 290#endif /* CCREE_FIPS_SUPPORT */
 291
 292        return 0;
 293}
 294
 295static enum cc_hw_crypto_key hw_key_to_cc_hw_key(int slot_num)
 296{
 297        switch (slot_num) {
 298        case 0:
 299                return KFDE0_KEY;
 300        case 1:
 301                return KFDE1_KEY;
 302        case 2:
 303                return KFDE2_KEY;
 304        case 3:
 305                return KFDE3_KEY;
 306        }
 307        return END_OF_KEYS;
 308}
 309
 310static int ssi_blkcipher_setkey(struct crypto_tfm *tfm,
 311                                const u8 *key,
 312                                unsigned int keylen)
 313{
 314        struct ssi_ablkcipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
 315        struct device *dev = &ctx_p->drvdata->plat_dev->dev;
 316        u32 tmp[DES_EXPKEY_WORDS];
 317        unsigned int max_key_buf_size = get_max_keysize(tfm);
 318
 319        SSI_LOG_DEBUG("Setting key in context @%p for %s. keylen=%u\n",
 320                ctx_p, crypto_tfm_alg_name(tfm), keylen);
 321        dump_byte_array("key", (u8 *)key, keylen);
 322
 323        CHECK_AND_RETURN_UPON_FIPS_ERROR();
 324
 325        SSI_LOG_DEBUG("ssi_blkcipher_setkey: after FIPS check");
 326
 327        /* STAT_PHASE_0: Init and sanity checks */
 328
 329#if SSI_CC_HAS_MULTI2
 330        /*last byte of key buffer is round number and should not be a part of key size*/
 331        if (ctx_p->flow_mode == S_DIN_to_MULTI2)
 332                keylen -= 1;
 333#endif /*SSI_CC_HAS_MULTI2*/
 334
 335        if (unlikely(validate_keys_sizes(ctx_p, keylen) != 0)) {
 336                SSI_LOG_ERR("Unsupported key size %d.\n", keylen);
 337                crypto_tfm_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
 338                return -EINVAL;
 339        }
 340
 341        if (ssi_is_hw_key(tfm)) {
 342                /* setting HW key slots */
 343                struct arm_hw_key_info *hki = (struct arm_hw_key_info *)key;
 344
 345                if (unlikely(ctx_p->flow_mode != S_DIN_to_AES)) {
 346                        SSI_LOG_ERR("HW key not supported for non-AES flows\n");
 347                        return -EINVAL;
 348                }
 349
 350                ctx_p->hw.key1_slot = hw_key_to_cc_hw_key(hki->hw_key1);
 351                if (unlikely(ctx_p->hw.key1_slot == END_OF_KEYS)) {
 352                        SSI_LOG_ERR("Unsupported hw key1 number (%d)\n", hki->hw_key1);
 353                        return -EINVAL;
 354                }
 355
 356                if ((ctx_p->cipher_mode == DRV_CIPHER_XTS) ||
 357                    (ctx_p->cipher_mode == DRV_CIPHER_ESSIV) ||
 358                    (ctx_p->cipher_mode == DRV_CIPHER_BITLOCKER)) {
 359                        if (unlikely(hki->hw_key1 == hki->hw_key2)) {
 360                                SSI_LOG_ERR("Illegal hw key numbers (%d,%d)\n", hki->hw_key1, hki->hw_key2);
 361                                return -EINVAL;
 362                        }
 363                        ctx_p->hw.key2_slot = hw_key_to_cc_hw_key(hki->hw_key2);
 364                        if (unlikely(ctx_p->hw.key2_slot == END_OF_KEYS)) {
 365                                SSI_LOG_ERR("Unsupported hw key2 number (%d)\n", hki->hw_key2);
 366                                return -EINVAL;
 367                        }
 368                }
 369
 370                ctx_p->keylen = keylen;
 371                SSI_LOG_DEBUG("ssi_blkcipher_setkey: ssi_is_hw_key ret 0");
 372
 373                return 0;
 374        }
 375
 376        // verify weak keys
 377        if (ctx_p->flow_mode == S_DIN_to_DES) {
 378                if (unlikely(!des_ekey(tmp, key)) &&
 379                    (crypto_tfm_get_flags(tfm) & CRYPTO_TFM_REQ_WEAK_KEY)) {
 380                        tfm->crt_flags |= CRYPTO_TFM_RES_WEAK_KEY;
 381                        SSI_LOG_DEBUG("ssi_blkcipher_setkey:  weak DES key");
 382                        return -EINVAL;
 383                }
 384        }
 385        if ((ctx_p->cipher_mode == DRV_CIPHER_XTS) &&
 386            ssi_fips_verify_xts_keys(key, keylen) != 0) {
 387                SSI_LOG_DEBUG("ssi_blkcipher_setkey: weak XTS key");
 388                return -EINVAL;
 389        }
 390        if ((ctx_p->flow_mode == S_DIN_to_DES) &&
 391            (keylen == DES3_EDE_KEY_SIZE) &&
 392            ssi_fips_verify_3des_keys(key, keylen) != 0) {
 393                SSI_LOG_DEBUG("ssi_blkcipher_setkey: weak 3DES key");
 394                return -EINVAL;
 395        }
 396
 397        /* STAT_PHASE_1: Copy key to ctx */
 398        dma_sync_single_for_cpu(dev, ctx_p->user.key_dma_addr,
 399                                        max_key_buf_size, DMA_TO_DEVICE);
 400
 401        if (ctx_p->flow_mode == S_DIN_to_MULTI2) {
 402#if SSI_CC_HAS_MULTI2
 403                memcpy(ctx_p->user.key, key, CC_MULTI2_SYSTEM_N_DATA_KEY_SIZE);
 404                ctx_p->key_round_number = key[CC_MULTI2_SYSTEM_N_DATA_KEY_SIZE];
 405                if (ctx_p->key_round_number < CC_MULTI2_MIN_NUM_ROUNDS ||
 406                    ctx_p->key_round_number > CC_MULTI2_MAX_NUM_ROUNDS) {
 407                        crypto_tfm_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
 408                        SSI_LOG_DEBUG("ssi_blkcipher_setkey: SSI_CC_HAS_MULTI2 einval");
 409                        return -EINVAL;
 410#endif /*SSI_CC_HAS_MULTI2*/
 411        } else {
 412                memcpy(ctx_p->user.key, key, keylen);
 413                if (keylen == 24)
 414                        memset(ctx_p->user.key + 24, 0, CC_AES_KEY_SIZE_MAX - 24);
 415
 416                if (ctx_p->cipher_mode == DRV_CIPHER_ESSIV) {
 417                        /* sha256 for key2 - use sw implementation */
 418                        int key_len = keylen >> 1;
 419                        int err;
 420                        SHASH_DESC_ON_STACK(desc, ctx_p->shash_tfm);
 421
 422                        desc->tfm = ctx_p->shash_tfm;
 423
 424                        err = crypto_shash_digest(desc, ctx_p->user.key, key_len, ctx_p->user.key + key_len);
 425                        if (err) {
 426                                SSI_LOG_ERR("Failed to hash ESSIV key.\n");
 427                                return err;
 428                        }
 429                }
 430        }
 431        dma_sync_single_for_device(dev, ctx_p->user.key_dma_addr,
 432                                        max_key_buf_size, DMA_TO_DEVICE);
 433        ctx_p->keylen = keylen;
 434
 435         SSI_LOG_DEBUG("ssi_blkcipher_setkey: return safely");
 436        return 0;
 437}
 438
 439static inline void
 440ssi_blkcipher_create_setup_desc(
 441        struct crypto_tfm *tfm,
 442        struct blkcipher_req_ctx *req_ctx,
 443        unsigned int ivsize,
 444        unsigned int nbytes,
 445        struct cc_hw_desc desc[],
 446        unsigned int *seq_size)
 447{
 448        struct ssi_ablkcipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
 449        int cipher_mode = ctx_p->cipher_mode;
 450        int flow_mode = ctx_p->flow_mode;
 451        int direction = req_ctx->gen_ctx.op_type;
 452        dma_addr_t key_dma_addr = ctx_p->user.key_dma_addr;
 453        unsigned int key_len = ctx_p->keylen;
 454        dma_addr_t iv_dma_addr = req_ctx->gen_ctx.iv_dma_addr;
 455        unsigned int du_size = nbytes;
 456
 457        struct ssi_crypto_alg *ssi_alg = container_of(tfm->__crt_alg, struct ssi_crypto_alg, crypto_alg);
 458
 459        if ((ssi_alg->crypto_alg.cra_flags & CRYPTO_ALG_BULK_MASK) == CRYPTO_ALG_BULK_DU_512)
 460                du_size = 512;
 461        if ((ssi_alg->crypto_alg.cra_flags & CRYPTO_ALG_BULK_MASK) == CRYPTO_ALG_BULK_DU_4096)
 462                du_size = 4096;
 463
 464        switch (cipher_mode) {
 465        case DRV_CIPHER_CBC:
 466        case DRV_CIPHER_CBC_CTS:
 467        case DRV_CIPHER_CTR:
 468        case DRV_CIPHER_OFB:
 469                /* Load cipher state */
 470                hw_desc_init(&desc[*seq_size]);
 471                set_din_type(&desc[*seq_size], DMA_DLLI, iv_dma_addr, ivsize,
 472                             NS_BIT);
 473                set_cipher_config0(&desc[*seq_size], direction);
 474                set_flow_mode(&desc[*seq_size], flow_mode);
 475                set_cipher_mode(&desc[*seq_size], cipher_mode);
 476                if ((cipher_mode == DRV_CIPHER_CTR) ||
 477                    (cipher_mode == DRV_CIPHER_OFB)) {
 478                        set_setup_mode(&desc[*seq_size], SETUP_LOAD_STATE1);
 479                } else {
 480                        set_setup_mode(&desc[*seq_size], SETUP_LOAD_STATE0);
 481                }
 482                (*seq_size)++;
 483                /*FALLTHROUGH*/
 484        case DRV_CIPHER_ECB:
 485                /* Load key */
 486                hw_desc_init(&desc[*seq_size]);
 487                set_cipher_mode(&desc[*seq_size], cipher_mode);
 488                set_cipher_config0(&desc[*seq_size], direction);
 489                if (flow_mode == S_DIN_to_AES) {
 490                        if (ssi_is_hw_key(tfm)) {
 491                                set_hw_crypto_key(&desc[*seq_size],
 492                                                  ctx_p->hw.key1_slot);
 493                        } else {
 494                                set_din_type(&desc[*seq_size], DMA_DLLI,
 495                                             key_dma_addr, ((key_len == 24) ?
 496                                                            AES_MAX_KEY_SIZE :
 497                                                            key_len), NS_BIT);
 498                        }
 499                        set_key_size_aes(&desc[*seq_size], key_len);
 500                } else {
 501                        /*des*/
 502                        set_din_type(&desc[*seq_size], DMA_DLLI, key_dma_addr,
 503                                     key_len, NS_BIT);
 504                        set_key_size_des(&desc[*seq_size], key_len);
 505                }
 506                set_flow_mode(&desc[*seq_size], flow_mode);
 507                set_setup_mode(&desc[*seq_size], SETUP_LOAD_KEY0);
 508                (*seq_size)++;
 509                break;
 510        case DRV_CIPHER_XTS:
 511        case DRV_CIPHER_ESSIV:
 512        case DRV_CIPHER_BITLOCKER:
 513                /* Load AES key */
 514                hw_desc_init(&desc[*seq_size]);
 515                set_cipher_mode(&desc[*seq_size], cipher_mode);
 516                set_cipher_config0(&desc[*seq_size], direction);
 517                if (ssi_is_hw_key(tfm)) {
 518                        set_hw_crypto_key(&desc[*seq_size],
 519                                          ctx_p->hw.key1_slot);
 520                } else {
 521                        set_din_type(&desc[*seq_size], DMA_DLLI, key_dma_addr,
 522                                     (key_len / 2), NS_BIT);
 523                }
 524                set_key_size_aes(&desc[*seq_size], (key_len / 2));
 525                set_flow_mode(&desc[*seq_size], flow_mode);
 526                set_setup_mode(&desc[*seq_size], SETUP_LOAD_KEY0);
 527                (*seq_size)++;
 528
 529                /* load XEX key */
 530                hw_desc_init(&desc[*seq_size]);
 531                set_cipher_mode(&desc[*seq_size], cipher_mode);
 532                set_cipher_config0(&desc[*seq_size], direction);
 533                if (ssi_is_hw_key(tfm)) {
 534                        set_hw_crypto_key(&desc[*seq_size],
 535                                          ctx_p->hw.key2_slot);
 536                } else {
 537                        set_din_type(&desc[*seq_size], DMA_DLLI,
 538                                     (key_dma_addr + (key_len / 2)),
 539                                     (key_len / 2), NS_BIT);
 540                }
 541                set_xex_data_unit_size(&desc[*seq_size], du_size);
 542                set_flow_mode(&desc[*seq_size], S_DIN_to_AES2);
 543                set_key_size_aes(&desc[*seq_size], (key_len / 2));
 544                set_setup_mode(&desc[*seq_size], SETUP_LOAD_XEX_KEY);
 545                (*seq_size)++;
 546
 547                /* Set state */
 548                hw_desc_init(&desc[*seq_size]);
 549                set_setup_mode(&desc[*seq_size], SETUP_LOAD_STATE1);
 550                set_cipher_mode(&desc[*seq_size], cipher_mode);
 551                set_cipher_config0(&desc[*seq_size], direction);
 552                set_key_size_aes(&desc[*seq_size], (key_len / 2));
 553                set_flow_mode(&desc[*seq_size], flow_mode);
 554                set_din_type(&desc[*seq_size], DMA_DLLI, iv_dma_addr,
 555                             CC_AES_BLOCK_SIZE, NS_BIT);
 556                (*seq_size)++;
 557                break;
 558        default:
 559                SSI_LOG_ERR("Unsupported cipher mode (%d)\n", cipher_mode);
 560                BUG();
 561        }
 562}
 563
 564#if SSI_CC_HAS_MULTI2
 565static inline void ssi_blkcipher_create_multi2_setup_desc(
 566        struct crypto_tfm *tfm,
 567        struct blkcipher_req_ctx *req_ctx,
 568        unsigned int ivsize,
 569        struct cc_hw_desc desc[],
 570        unsigned int *seq_size)
 571{
 572        struct ssi_ablkcipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
 573
 574        int direction = req_ctx->gen_ctx.op_type;
 575        /* Load system key */
 576        hw_desc_init(&desc[*seq_size]);
 577        set_cipher_mode(&desc[*seq_size], ctx_p->cipher_mode);
 578        set_cipher_config0(&desc[*seq_size], direction);
 579        set_din_type(&desc[*seq_size], DMA_DLLI, ctx_p->user.key_dma_addr,
 580                     CC_MULTI2_SYSTEM_KEY_SIZE, NS_BIT);
 581        set_flow_mode(&desc[*seq_size], ctx_p->flow_mode);
 582        set_setup_mode(&desc[*seq_size], SETUP_LOAD_KEY0);
 583        (*seq_size)++;
 584
 585        /* load data key */
 586        hw_desc_init(&desc[*seq_size]);
 587        set_din_type(&desc[*seq_size], DMA_DLLI,
 588                     (ctx_p->user.key_dma_addr + CC_MULTI2_SYSTEM_KEY_SIZE),
 589                     CC_MULTI2_DATA_KEY_SIZE, NS_BIT);
 590        set_multi2_num_rounds(&desc[*seq_size], ctx_p->key_round_number);
 591        set_flow_mode(&desc[*seq_size], ctx_p->flow_mode);
 592        set_cipher_mode(&desc[*seq_size], ctx_p->cipher_mode);
 593        set_cipher_config0(&desc[*seq_size], direction);
 594        set_setup_mode(&desc[*seq_size], SETUP_LOAD_STATE0);
 595        (*seq_size)++;
 596
 597        /* Set state */
 598        hw_desc_init(&desc[*seq_size]);
 599        set_din_type(&desc[*seq_size], DMA_DLLI, req_ctx->gen_ctx.iv_dma_addr,
 600                     ivsize, NS_BIT);
 601        set_cipher_config0(&desc[*seq_size], direction);
 602        set_flow_mode(&desc[*seq_size], ctx_p->flow_mode);
 603        set_cipher_mode(&desc[*seq_size], ctx_p->cipher_mode);
 604        set_setup_mode(&desc[*seq_size], SETUP_LOAD_STATE1);
 605        (*seq_size)++;
 606}
 607#endif /*SSI_CC_HAS_MULTI2*/
 608
 609static inline void
 610ssi_blkcipher_create_data_desc(
 611        struct crypto_tfm *tfm,
 612        struct blkcipher_req_ctx *req_ctx,
 613        struct scatterlist *dst, struct scatterlist *src,
 614        unsigned int nbytes,
 615        void *areq,
 616        struct cc_hw_desc desc[],
 617        unsigned int *seq_size)
 618{
 619        struct ssi_ablkcipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
 620        unsigned int flow_mode = ctx_p->flow_mode;
 621
 622        switch (ctx_p->flow_mode) {
 623        case S_DIN_to_AES:
 624                flow_mode = DIN_AES_DOUT;
 625                break;
 626        case S_DIN_to_DES:
 627                flow_mode = DIN_DES_DOUT;
 628                break;
 629#if SSI_CC_HAS_MULTI2
 630        case S_DIN_to_MULTI2:
 631                flow_mode = DIN_MULTI2_DOUT;
 632                break;
 633#endif /*SSI_CC_HAS_MULTI2*/
 634        default:
 635                SSI_LOG_ERR("invalid flow mode, flow_mode = %d \n", flow_mode);
 636                return;
 637        }
 638        /* Process */
 639        if (likely(req_ctx->dma_buf_type == SSI_DMA_BUF_DLLI)) {
 640                SSI_LOG_DEBUG(" data params addr 0x%llX length 0x%X \n",
 641                             (unsigned long long)sg_dma_address(src),
 642                             nbytes);
 643                SSI_LOG_DEBUG(" data params addr 0x%llX length 0x%X \n",
 644                             (unsigned long long)sg_dma_address(dst),
 645                             nbytes);
 646                hw_desc_init(&desc[*seq_size]);
 647                set_din_type(&desc[*seq_size], DMA_DLLI, sg_dma_address(src),
 648                             nbytes, NS_BIT);
 649                set_dout_dlli(&desc[*seq_size], sg_dma_address(dst),
 650                              nbytes, NS_BIT, (!areq ? 0 : 1));
 651                if (areq)
 652                        set_queue_last_ind(&desc[*seq_size]);
 653
 654                set_flow_mode(&desc[*seq_size], flow_mode);
 655                (*seq_size)++;
 656        } else {
 657                /* bypass */
 658                SSI_LOG_DEBUG(" bypass params addr 0x%llX "
 659                             "length 0x%X addr 0x%08X\n",
 660                        (unsigned long long)req_ctx->mlli_params.mlli_dma_addr,
 661                        req_ctx->mlli_params.mlli_len,
 662                        (unsigned int)ctx_p->drvdata->mlli_sram_addr);
 663                hw_desc_init(&desc[*seq_size]);
 664                set_din_type(&desc[*seq_size], DMA_DLLI,
 665                             req_ctx->mlli_params.mlli_dma_addr,
 666                             req_ctx->mlli_params.mlli_len, NS_BIT);
 667                set_dout_sram(&desc[*seq_size],
 668                              ctx_p->drvdata->mlli_sram_addr,
 669                              req_ctx->mlli_params.mlli_len);
 670                set_flow_mode(&desc[*seq_size], BYPASS);
 671                (*seq_size)++;
 672
 673                hw_desc_init(&desc[*seq_size]);
 674                set_din_type(&desc[*seq_size], DMA_MLLI,
 675                             ctx_p->drvdata->mlli_sram_addr,
 676                             req_ctx->in_mlli_nents, NS_BIT);
 677                if (req_ctx->out_nents == 0) {
 678                        SSI_LOG_DEBUG(" din/dout params addr 0x%08X "
 679                                     "addr 0x%08X\n",
 680                        (unsigned int)ctx_p->drvdata->mlli_sram_addr,
 681                        (unsigned int)ctx_p->drvdata->mlli_sram_addr);
 682                        set_dout_mlli(&desc[*seq_size],
 683                                      ctx_p->drvdata->mlli_sram_addr,
 684                                      req_ctx->in_mlli_nents, NS_BIT,
 685                                      (!areq ? 0 : 1));
 686                } else {
 687                        SSI_LOG_DEBUG(" din/dout params "
 688                                     "addr 0x%08X addr 0x%08X\n",
 689                                (unsigned int)ctx_p->drvdata->mlli_sram_addr,
 690                                (unsigned int)ctx_p->drvdata->mlli_sram_addr +
 691                                (u32)LLI_ENTRY_BYTE_SIZE *
 692                                                        req_ctx->in_nents);
 693                        set_dout_mlli(&desc[*seq_size],
 694                                      (ctx_p->drvdata->mlli_sram_addr +
 695                                       (LLI_ENTRY_BYTE_SIZE *
 696                                        req_ctx->in_mlli_nents)),
 697                                      req_ctx->out_mlli_nents, NS_BIT,
 698                                      (!areq ? 0 : 1));
 699                }
 700                if (areq)
 701                        set_queue_last_ind(&desc[*seq_size]);
 702
 703                set_flow_mode(&desc[*seq_size], flow_mode);
 704                (*seq_size)++;
 705        }
 706}
 707
 708static int ssi_blkcipher_complete(struct device *dev,
 709                                struct ssi_ablkcipher_ctx *ctx_p,
 710                                struct blkcipher_req_ctx *req_ctx,
 711                                struct scatterlist *dst,
 712                                struct scatterlist *src,
 713                                unsigned int ivsize,
 714                                void *areq,
 715                                void __iomem *cc_base)
 716{
 717        int completion_error = 0;
 718        u32 inflight_counter;
 719
 720        ssi_buffer_mgr_unmap_blkcipher_request(dev, req_ctx, ivsize, src, dst);
 721
 722        /*Set the inflight couter value to local variable*/
 723        inflight_counter =  ctx_p->drvdata->inflight_counter;
 724        /*Decrease the inflight counter*/
 725        if (ctx_p->flow_mode == BYPASS && ctx_p->drvdata->inflight_counter > 0)
 726                ctx_p->drvdata->inflight_counter--;
 727
 728        if (areq) {
 729                ablkcipher_request_complete(areq, completion_error);
 730                return 0;
 731        }
 732        return completion_error;
 733}
 734
 735static int ssi_blkcipher_process(
 736        struct crypto_tfm *tfm,
 737        struct blkcipher_req_ctx *req_ctx,
 738        struct scatterlist *dst, struct scatterlist *src,
 739        unsigned int nbytes,
 740        void *info, //req info
 741        unsigned int ivsize,
 742        void *areq,
 743        enum drv_crypto_direction direction)
 744{
 745        struct ssi_ablkcipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
 746        struct device *dev = &ctx_p->drvdata->plat_dev->dev;
 747        struct cc_hw_desc desc[MAX_ABLKCIPHER_SEQ_LEN];
 748        struct ssi_crypto_req ssi_req = {};
 749        int rc, seq_len = 0, cts_restore_flag = 0;
 750
 751        SSI_LOG_DEBUG("%s areq=%p info=%p nbytes=%d\n",
 752                ((direction == DRV_CRYPTO_DIRECTION_ENCRYPT) ? "Encrypt" : "Decrypt"),
 753                     areq, info, nbytes);
 754
 755        CHECK_AND_RETURN_UPON_FIPS_ERROR();
 756        /* STAT_PHASE_0: Init and sanity checks */
 757
 758        /* TODO: check data length according to mode */
 759        if (unlikely(validate_data_size(ctx_p, nbytes))) {
 760                SSI_LOG_ERR("Unsupported data size %d.\n", nbytes);
 761                crypto_tfm_set_flags(tfm, CRYPTO_TFM_RES_BAD_BLOCK_LEN);
 762                return -EINVAL;
 763        }
 764        if (nbytes == 0) {
 765                /* No data to process is valid */
 766                return 0;
 767        }
 768        /*For CTS in case of data size aligned to 16 use CBC mode*/
 769        if (((nbytes % AES_BLOCK_SIZE) == 0) && (ctx_p->cipher_mode == DRV_CIPHER_CBC_CTS)) {
 770                ctx_p->cipher_mode = DRV_CIPHER_CBC;
 771                cts_restore_flag = 1;
 772        }
 773
 774        /* Setup DX request structure */
 775        ssi_req.user_cb = (void *)ssi_ablkcipher_complete;
 776        ssi_req.user_arg = (void *)areq;
 777
 778#ifdef ENABLE_CYCLE_COUNT
 779        ssi_req.op_type = (direction == DRV_CRYPTO_DIRECTION_DECRYPT) ?
 780                STAT_OP_TYPE_DECODE : STAT_OP_TYPE_ENCODE;
 781
 782#endif
 783
 784        /* Setup request context */
 785        req_ctx->gen_ctx.op_type = direction;
 786
 787        /* STAT_PHASE_1: Map buffers */
 788
 789        rc = ssi_buffer_mgr_map_blkcipher_request(ctx_p->drvdata, req_ctx, ivsize, nbytes, info, src, dst);
 790        if (unlikely(rc != 0)) {
 791                SSI_LOG_ERR("map_request() failed\n");
 792                goto exit_process;
 793        }
 794
 795        /* STAT_PHASE_2: Create sequence */
 796
 797        /* Setup processing */
 798#if SSI_CC_HAS_MULTI2
 799        if (ctx_p->flow_mode == S_DIN_to_MULTI2)
 800                ssi_blkcipher_create_multi2_setup_desc(tfm, req_ctx, ivsize,
 801                                                       desc, &seq_len);
 802        else
 803#endif /*SSI_CC_HAS_MULTI2*/
 804                ssi_blkcipher_create_setup_desc(tfm, req_ctx, ivsize, nbytes,
 805                                                desc, &seq_len);
 806        /* Data processing */
 807        ssi_blkcipher_create_data_desc(tfm,
 808                              req_ctx,
 809                              dst, src,
 810                              nbytes,
 811                              areq,
 812                              desc, &seq_len);
 813
 814        /* do we need to generate IV? */
 815        if (req_ctx->is_giv) {
 816                ssi_req.ivgen_dma_addr[0] = req_ctx->gen_ctx.iv_dma_addr;
 817                ssi_req.ivgen_dma_addr_len = 1;
 818                /* set the IV size (8/16 B long)*/
 819                ssi_req.ivgen_size = ivsize;
 820        }
 821
 822        /* STAT_PHASE_3: Lock HW and push sequence */
 823
 824        rc = send_request(ctx_p->drvdata, &ssi_req, desc, seq_len, (!areq) ? 0 : 1);
 825        if (areq) {
 826                if (unlikely(rc != -EINPROGRESS)) {
 827                        /* Failed to send the request or request completed synchronously */
 828                        ssi_buffer_mgr_unmap_blkcipher_request(dev, req_ctx, ivsize, src, dst);
 829                }
 830
 831        } else {
 832                if (rc != 0) {
 833                        ssi_buffer_mgr_unmap_blkcipher_request(dev, req_ctx, ivsize, src, dst);
 834                } else {
 835                        rc = ssi_blkcipher_complete(dev, ctx_p, req_ctx, dst,
 836                                                    src, ivsize, NULL,
 837                                                    ctx_p->drvdata->cc_base);
 838                }
 839        }
 840
 841exit_process:
 842        if (cts_restore_flag != 0)
 843                ctx_p->cipher_mode = DRV_CIPHER_CBC_CTS;
 844
 845        return rc;
 846}
 847
 848static void ssi_ablkcipher_complete(struct device *dev, void *ssi_req, void __iomem *cc_base)
 849{
 850        struct ablkcipher_request *areq = (struct ablkcipher_request *)ssi_req;
 851        struct blkcipher_req_ctx *req_ctx = ablkcipher_request_ctx(areq);
 852        struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(areq);
 853        struct ssi_ablkcipher_ctx *ctx_p = crypto_ablkcipher_ctx(tfm);
 854        unsigned int ivsize = crypto_ablkcipher_ivsize(tfm);
 855
 856        CHECK_AND_RETURN_VOID_UPON_FIPS_ERROR();
 857
 858        ssi_blkcipher_complete(dev, ctx_p, req_ctx, areq->dst, areq->src,
 859                               ivsize, areq, cc_base);
 860}
 861
 862/* Async wrap functions */
 863
 864static int ssi_ablkcipher_init(struct crypto_tfm *tfm)
 865{
 866        struct ablkcipher_tfm *ablktfm = &tfm->crt_ablkcipher;
 867
 868        ablktfm->reqsize = sizeof(struct blkcipher_req_ctx);
 869
 870        return ssi_blkcipher_init(tfm);
 871}
 872
 873static int ssi_ablkcipher_setkey(struct crypto_ablkcipher *tfm,
 874                                const u8 *key,
 875                                unsigned int keylen)
 876{
 877        return ssi_blkcipher_setkey(crypto_ablkcipher_tfm(tfm), key, keylen);
 878}
 879
 880static int ssi_ablkcipher_encrypt(struct ablkcipher_request *req)
 881{
 882        struct crypto_ablkcipher *ablk_tfm = crypto_ablkcipher_reqtfm(req);
 883        struct crypto_tfm *tfm = crypto_ablkcipher_tfm(ablk_tfm);
 884        struct blkcipher_req_ctx *req_ctx = ablkcipher_request_ctx(req);
 885        unsigned int ivsize = crypto_ablkcipher_ivsize(ablk_tfm);
 886
 887        req_ctx->backup_info = req->info;
 888        req_ctx->is_giv = false;
 889
 890        return ssi_blkcipher_process(tfm, req_ctx, req->dst, req->src, req->nbytes, req->info, ivsize, (void *)req, DRV_CRYPTO_DIRECTION_ENCRYPT);
 891}
 892
 893static int ssi_ablkcipher_decrypt(struct ablkcipher_request *req)
 894{
 895        struct crypto_ablkcipher *ablk_tfm = crypto_ablkcipher_reqtfm(req);
 896        struct crypto_tfm *tfm = crypto_ablkcipher_tfm(ablk_tfm);
 897        struct blkcipher_req_ctx *req_ctx = ablkcipher_request_ctx(req);
 898        unsigned int ivsize = crypto_ablkcipher_ivsize(ablk_tfm);
 899
 900        req_ctx->backup_info = req->info;
 901        req_ctx->is_giv = false;
 902        return ssi_blkcipher_process(tfm, req_ctx, req->dst, req->src, req->nbytes, req->info, ivsize, (void *)req, DRV_CRYPTO_DIRECTION_DECRYPT);
 903}
 904
 905/* DX Block cipher alg */
 906static struct ssi_alg_template blkcipher_algs[] = {
 907/* Async template */
 908#if SSI_CC_HAS_AES_XTS
 909        {
 910                .name = "xts(aes)",
 911                .driver_name = "xts-aes-dx",
 912                .blocksize = AES_BLOCK_SIZE,
 913                .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
 914                .template_ablkcipher = {
 915                        .setkey = ssi_ablkcipher_setkey,
 916                        .encrypt = ssi_ablkcipher_encrypt,
 917                        .decrypt = ssi_ablkcipher_decrypt,
 918                        .min_keysize = AES_MIN_KEY_SIZE * 2,
 919                        .max_keysize = AES_MAX_KEY_SIZE * 2,
 920                        .ivsize = AES_BLOCK_SIZE,
 921                        .geniv = "eseqiv",
 922                        },
 923                .cipher_mode = DRV_CIPHER_XTS,
 924                .flow_mode = S_DIN_to_AES,
 925        },
 926        {
 927                .name = "xts(aes)",
 928                .driver_name = "xts-aes-du512-dx",
 929                .blocksize = AES_BLOCK_SIZE,
 930                .type = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_BULK_DU_512,
 931                .template_ablkcipher = {
 932                        .setkey = ssi_ablkcipher_setkey,
 933                        .encrypt = ssi_ablkcipher_encrypt,
 934                        .decrypt = ssi_ablkcipher_decrypt,
 935                        .min_keysize = AES_MIN_KEY_SIZE * 2,
 936                        .max_keysize = AES_MAX_KEY_SIZE * 2,
 937                        .ivsize = AES_BLOCK_SIZE,
 938                        },
 939                .cipher_mode = DRV_CIPHER_XTS,
 940                .flow_mode = S_DIN_to_AES,
 941        },
 942        {
 943                .name = "xts(aes)",
 944                .driver_name = "xts-aes-du4096-dx",
 945                .blocksize = AES_BLOCK_SIZE,
 946                .type = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_BULK_DU_4096,
 947                .template_ablkcipher = {
 948                        .setkey = ssi_ablkcipher_setkey,
 949                        .encrypt = ssi_ablkcipher_encrypt,
 950                        .decrypt = ssi_ablkcipher_decrypt,
 951                        .min_keysize = AES_MIN_KEY_SIZE * 2,
 952                        .max_keysize = AES_MAX_KEY_SIZE * 2,
 953                        .ivsize = AES_BLOCK_SIZE,
 954                        },
 955                .cipher_mode = DRV_CIPHER_XTS,
 956                .flow_mode = S_DIN_to_AES,
 957        },
 958#endif /*SSI_CC_HAS_AES_XTS*/
 959#if SSI_CC_HAS_AES_ESSIV
 960        {
 961                .name = "essiv(aes)",
 962                .driver_name = "essiv-aes-dx",
 963                .blocksize = AES_BLOCK_SIZE,
 964                .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
 965                .template_ablkcipher = {
 966                        .setkey = ssi_ablkcipher_setkey,
 967                        .encrypt = ssi_ablkcipher_encrypt,
 968                        .decrypt = ssi_ablkcipher_decrypt,
 969                        .min_keysize = AES_MIN_KEY_SIZE * 2,
 970                        .max_keysize = AES_MAX_KEY_SIZE * 2,
 971                        .ivsize = AES_BLOCK_SIZE,
 972                        },
 973                .cipher_mode = DRV_CIPHER_ESSIV,
 974                .flow_mode = S_DIN_to_AES,
 975        },
 976        {
 977                .name = "essiv(aes)",
 978                .driver_name = "essiv-aes-du512-dx",
 979                .blocksize = AES_BLOCK_SIZE,
 980                .type = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_BULK_DU_512,
 981                .template_ablkcipher = {
 982                        .setkey = ssi_ablkcipher_setkey,
 983                        .encrypt = ssi_ablkcipher_encrypt,
 984                        .decrypt = ssi_ablkcipher_decrypt,
 985                        .min_keysize = AES_MIN_KEY_SIZE * 2,
 986                        .max_keysize = AES_MAX_KEY_SIZE * 2,
 987                        .ivsize = AES_BLOCK_SIZE,
 988                        },
 989                .cipher_mode = DRV_CIPHER_ESSIV,
 990                .flow_mode = S_DIN_to_AES,
 991        },
 992        {
 993                .name = "essiv(aes)",
 994                .driver_name = "essiv-aes-du4096-dx",
 995                .blocksize = AES_BLOCK_SIZE,
 996                .type = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_BULK_DU_4096,
 997                .template_ablkcipher = {
 998                        .setkey = ssi_ablkcipher_setkey,
 999                        .encrypt = ssi_ablkcipher_encrypt,
1000                        .decrypt = ssi_ablkcipher_decrypt,
1001                        .min_keysize = AES_MIN_KEY_SIZE * 2,
1002                        .max_keysize = AES_MAX_KEY_SIZE * 2,
1003                        .ivsize = AES_BLOCK_SIZE,
1004                        },
1005                .cipher_mode = DRV_CIPHER_ESSIV,
1006                .flow_mode = S_DIN_to_AES,
1007        },
1008#endif /*SSI_CC_HAS_AES_ESSIV*/
1009#if SSI_CC_HAS_AES_BITLOCKER
1010        {
1011                .name = "bitlocker(aes)",
1012                .driver_name = "bitlocker-aes-dx",
1013                .blocksize = AES_BLOCK_SIZE,
1014                .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
1015                .template_ablkcipher = {
1016                        .setkey = ssi_ablkcipher_setkey,
1017                        .encrypt = ssi_ablkcipher_encrypt,
1018                        .decrypt = ssi_ablkcipher_decrypt,
1019                        .min_keysize = AES_MIN_KEY_SIZE * 2,
1020                        .max_keysize = AES_MAX_KEY_SIZE * 2,
1021                        .ivsize = AES_BLOCK_SIZE,
1022                        },
1023                .cipher_mode = DRV_CIPHER_BITLOCKER,
1024                .flow_mode = S_DIN_to_AES,
1025        },
1026        {
1027                .name = "bitlocker(aes)",
1028                .driver_name = "bitlocker-aes-du512-dx",
1029                .blocksize = AES_BLOCK_SIZE,
1030                .type = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_BULK_DU_512,
1031                .template_ablkcipher = {
1032                        .setkey = ssi_ablkcipher_setkey,
1033                        .encrypt = ssi_ablkcipher_encrypt,
1034                        .decrypt = ssi_ablkcipher_decrypt,
1035                        .min_keysize = AES_MIN_KEY_SIZE * 2,
1036                        .max_keysize = AES_MAX_KEY_SIZE * 2,
1037                        .ivsize = AES_BLOCK_SIZE,
1038                        },
1039                .cipher_mode = DRV_CIPHER_BITLOCKER,
1040                .flow_mode = S_DIN_to_AES,
1041        },
1042        {
1043                .name = "bitlocker(aes)",
1044                .driver_name = "bitlocker-aes-du4096-dx",
1045                .blocksize = AES_BLOCK_SIZE,
1046                .type = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_BULK_DU_4096,
1047                .template_ablkcipher = {
1048                        .setkey = ssi_ablkcipher_setkey,
1049                        .encrypt = ssi_ablkcipher_encrypt,
1050                        .decrypt = ssi_ablkcipher_decrypt,
1051                        .min_keysize = AES_MIN_KEY_SIZE * 2,
1052                        .max_keysize = AES_MAX_KEY_SIZE * 2,
1053                        .ivsize = AES_BLOCK_SIZE,
1054                        },
1055                .cipher_mode = DRV_CIPHER_BITLOCKER,
1056                .flow_mode = S_DIN_to_AES,
1057        },
1058#endif /*SSI_CC_HAS_AES_BITLOCKER*/
1059        {
1060                .name = "ecb(aes)",
1061                .driver_name = "ecb-aes-dx",
1062                .blocksize = AES_BLOCK_SIZE,
1063                .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
1064                .template_ablkcipher = {
1065                        .setkey = ssi_ablkcipher_setkey,
1066                        .encrypt = ssi_ablkcipher_encrypt,
1067                        .decrypt = ssi_ablkcipher_decrypt,
1068                        .min_keysize = AES_MIN_KEY_SIZE,
1069                        .max_keysize = AES_MAX_KEY_SIZE,
1070                        .ivsize = 0,
1071                        },
1072                .cipher_mode = DRV_CIPHER_ECB,
1073                .flow_mode = S_DIN_to_AES,
1074        },
1075        {
1076                .name = "cbc(aes)",
1077                .driver_name = "cbc-aes-dx",
1078                .blocksize = AES_BLOCK_SIZE,
1079                .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
1080                .template_ablkcipher = {
1081                        .setkey = ssi_ablkcipher_setkey,
1082                        .encrypt = ssi_ablkcipher_encrypt,
1083                        .decrypt = ssi_ablkcipher_decrypt,
1084                        .min_keysize = AES_MIN_KEY_SIZE,
1085                        .max_keysize = AES_MAX_KEY_SIZE,
1086                        .ivsize = AES_BLOCK_SIZE,
1087                },
1088                .cipher_mode = DRV_CIPHER_CBC,
1089                .flow_mode = S_DIN_to_AES,
1090        },
1091        {
1092                .name = "ofb(aes)",
1093                .driver_name = "ofb-aes-dx",
1094                .blocksize = AES_BLOCK_SIZE,
1095                .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
1096                .template_ablkcipher = {
1097                        .setkey = ssi_ablkcipher_setkey,
1098                        .encrypt = ssi_ablkcipher_encrypt,
1099                        .decrypt = ssi_ablkcipher_decrypt,
1100                        .min_keysize = AES_MIN_KEY_SIZE,
1101                        .max_keysize = AES_MAX_KEY_SIZE,
1102                        .ivsize = AES_BLOCK_SIZE,
1103                        },
1104                .cipher_mode = DRV_CIPHER_OFB,
1105                .flow_mode = S_DIN_to_AES,
1106        },
1107#if SSI_CC_HAS_AES_CTS
1108        {
1109                .name = "cts1(cbc(aes))",
1110                .driver_name = "cts1-cbc-aes-dx",
1111                .blocksize = AES_BLOCK_SIZE,
1112                .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
1113                .template_ablkcipher = {
1114                        .setkey = ssi_ablkcipher_setkey,
1115                        .encrypt = ssi_ablkcipher_encrypt,
1116                        .decrypt = ssi_ablkcipher_decrypt,
1117                        .min_keysize = AES_MIN_KEY_SIZE,
1118                        .max_keysize = AES_MAX_KEY_SIZE,
1119                        .ivsize = AES_BLOCK_SIZE,
1120                        },
1121                .cipher_mode = DRV_CIPHER_CBC_CTS,
1122                .flow_mode = S_DIN_to_AES,
1123        },
1124#endif
1125        {
1126                .name = "ctr(aes)",
1127                .driver_name = "ctr-aes-dx",
1128                .blocksize = 1,
1129                .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
1130                .template_ablkcipher = {
1131                        .setkey = ssi_ablkcipher_setkey,
1132                        .encrypt = ssi_ablkcipher_encrypt,
1133                        .decrypt = ssi_ablkcipher_decrypt,
1134                        .min_keysize = AES_MIN_KEY_SIZE,
1135                        .max_keysize = AES_MAX_KEY_SIZE,
1136                        .ivsize = AES_BLOCK_SIZE,
1137                        },
1138                .cipher_mode = DRV_CIPHER_CTR,
1139                .flow_mode = S_DIN_to_AES,
1140        },
1141        {
1142                .name = "cbc(des3_ede)",
1143                .driver_name = "cbc-3des-dx",
1144                .blocksize = DES3_EDE_BLOCK_SIZE,
1145                .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
1146                .template_ablkcipher = {
1147                        .setkey = ssi_ablkcipher_setkey,
1148                        .encrypt = ssi_ablkcipher_encrypt,
1149                        .decrypt = ssi_ablkcipher_decrypt,
1150                        .min_keysize = DES3_EDE_KEY_SIZE,
1151                        .max_keysize = DES3_EDE_KEY_SIZE,
1152                        .ivsize = DES3_EDE_BLOCK_SIZE,
1153                        },
1154                .cipher_mode = DRV_CIPHER_CBC,
1155                .flow_mode = S_DIN_to_DES,
1156        },
1157        {
1158                .name = "ecb(des3_ede)",
1159                .driver_name = "ecb-3des-dx",
1160                .blocksize = DES3_EDE_BLOCK_SIZE,
1161                .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
1162                .template_ablkcipher = {
1163                        .setkey = ssi_ablkcipher_setkey,
1164                        .encrypt = ssi_ablkcipher_encrypt,
1165                        .decrypt = ssi_ablkcipher_decrypt,
1166                        .min_keysize = DES3_EDE_KEY_SIZE,
1167                        .max_keysize = DES3_EDE_KEY_SIZE,
1168                        .ivsize = 0,
1169                        },
1170                .cipher_mode = DRV_CIPHER_ECB,
1171                .flow_mode = S_DIN_to_DES,
1172        },
1173        {
1174                .name = "cbc(des)",
1175                .driver_name = "cbc-des-dx",
1176                .blocksize = DES_BLOCK_SIZE,
1177                .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
1178                .template_ablkcipher = {
1179                        .setkey = ssi_ablkcipher_setkey,
1180                        .encrypt = ssi_ablkcipher_encrypt,
1181                        .decrypt = ssi_ablkcipher_decrypt,
1182                        .min_keysize = DES_KEY_SIZE,
1183                        .max_keysize = DES_KEY_SIZE,
1184                        .ivsize = DES_BLOCK_SIZE,
1185                        },
1186                .cipher_mode = DRV_CIPHER_CBC,
1187                .flow_mode = S_DIN_to_DES,
1188        },
1189        {
1190                .name = "ecb(des)",
1191                .driver_name = "ecb-des-dx",
1192                .blocksize = DES_BLOCK_SIZE,
1193                .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
1194                .template_ablkcipher = {
1195                        .setkey = ssi_ablkcipher_setkey,
1196                        .encrypt = ssi_ablkcipher_encrypt,
1197                        .decrypt = ssi_ablkcipher_decrypt,
1198                        .min_keysize = DES_KEY_SIZE,
1199                        .max_keysize = DES_KEY_SIZE,
1200                        .ivsize = 0,
1201                        },
1202                .cipher_mode = DRV_CIPHER_ECB,
1203                .flow_mode = S_DIN_to_DES,
1204        },
1205#if SSI_CC_HAS_MULTI2
1206        {
1207                .name = "cbc(multi2)",
1208                .driver_name = "cbc-multi2-dx",
1209                .blocksize = CC_MULTI2_BLOCK_SIZE,
1210                .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
1211                .template_ablkcipher = {
1212                        .setkey = ssi_ablkcipher_setkey,
1213                        .encrypt = ssi_ablkcipher_encrypt,
1214                        .decrypt = ssi_ablkcipher_decrypt,
1215                        .min_keysize = CC_MULTI2_SYSTEM_N_DATA_KEY_SIZE + 1,
1216                        .max_keysize = CC_MULTI2_SYSTEM_N_DATA_KEY_SIZE + 1,
1217                        .ivsize = CC_MULTI2_IV_SIZE,
1218                        },
1219                .cipher_mode = DRV_MULTI2_CBC,
1220                .flow_mode = S_DIN_to_MULTI2,
1221        },
1222        {
1223                .name = "ofb(multi2)",
1224                .driver_name = "ofb-multi2-dx",
1225                .blocksize = 1,
1226                .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
1227                .template_ablkcipher = {
1228                        .setkey = ssi_ablkcipher_setkey,
1229                        .encrypt = ssi_ablkcipher_encrypt,
1230                        .decrypt = ssi_ablkcipher_encrypt,
1231                        .min_keysize = CC_MULTI2_SYSTEM_N_DATA_KEY_SIZE + 1,
1232                        .max_keysize = CC_MULTI2_SYSTEM_N_DATA_KEY_SIZE + 1,
1233                        .ivsize = CC_MULTI2_IV_SIZE,
1234                        },
1235                .cipher_mode = DRV_MULTI2_OFB,
1236                .flow_mode = S_DIN_to_MULTI2,
1237        },
1238#endif /*SSI_CC_HAS_MULTI2*/
1239};
1240
1241static
1242struct ssi_crypto_alg *ssi_ablkcipher_create_alg(struct ssi_alg_template *template)
1243{
1244        struct ssi_crypto_alg *t_alg;
1245        struct crypto_alg *alg;
1246
1247        t_alg = kzalloc(sizeof(struct ssi_crypto_alg), GFP_KERNEL);
1248        if (!t_alg) {
1249                SSI_LOG_ERR("failed to allocate t_alg\n");
1250                return ERR_PTR(-ENOMEM);
1251        }
1252
1253        alg = &t_alg->crypto_alg;
1254
1255        snprintf(alg->cra_name, CRYPTO_MAX_ALG_NAME, "%s", template->name);
1256        snprintf(alg->cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s",
1257                 template->driver_name);
1258        alg->cra_module = THIS_MODULE;
1259        alg->cra_priority = SSI_CRA_PRIO;
1260        alg->cra_blocksize = template->blocksize;
1261        alg->cra_alignmask = 0;
1262        alg->cra_ctxsize = sizeof(struct ssi_ablkcipher_ctx);
1263
1264        alg->cra_init = ssi_ablkcipher_init;
1265        alg->cra_exit = ssi_blkcipher_exit;
1266        alg->cra_type = &crypto_ablkcipher_type;
1267        alg->cra_ablkcipher = template->template_ablkcipher;
1268        alg->cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY |
1269                                template->type;
1270
1271        t_alg->cipher_mode = template->cipher_mode;
1272        t_alg->flow_mode = template->flow_mode;
1273
1274        return t_alg;
1275}
1276
1277int ssi_ablkcipher_free(struct ssi_drvdata *drvdata)
1278{
1279        struct ssi_crypto_alg *t_alg, *n;
1280        struct ssi_blkcipher_handle *blkcipher_handle =
1281                                                drvdata->blkcipher_handle;
1282        struct device *dev;
1283
1284        dev = &drvdata->plat_dev->dev;
1285
1286        if (blkcipher_handle) {
1287                /* Remove registered algs */
1288                list_for_each_entry_safe(t_alg, n,
1289                                &blkcipher_handle->blkcipher_alg_list,
1290                                         entry) {
1291                        crypto_unregister_alg(&t_alg->crypto_alg);
1292                        list_del(&t_alg->entry);
1293                        kfree(t_alg);
1294                }
1295                kfree(blkcipher_handle);
1296                drvdata->blkcipher_handle = NULL;
1297        }
1298        return 0;
1299}
1300
1301int ssi_ablkcipher_alloc(struct ssi_drvdata *drvdata)
1302{
1303        struct ssi_blkcipher_handle *ablkcipher_handle;
1304        struct ssi_crypto_alg *t_alg;
1305        int rc = -ENOMEM;
1306        int alg;
1307
1308        ablkcipher_handle = kmalloc(sizeof(struct ssi_blkcipher_handle),
1309                GFP_KERNEL);
1310        if (!ablkcipher_handle)
1311                return -ENOMEM;
1312
1313        drvdata->blkcipher_handle = ablkcipher_handle;
1314
1315        INIT_LIST_HEAD(&ablkcipher_handle->blkcipher_alg_list);
1316
1317        /* Linux crypto */
1318        SSI_LOG_DEBUG("Number of algorithms = %zu\n", ARRAY_SIZE(blkcipher_algs));
1319        for (alg = 0; alg < ARRAY_SIZE(blkcipher_algs); alg++) {
1320                SSI_LOG_DEBUG("creating %s\n", blkcipher_algs[alg].driver_name);
1321                t_alg = ssi_ablkcipher_create_alg(&blkcipher_algs[alg]);
1322                if (IS_ERR(t_alg)) {
1323                        rc = PTR_ERR(t_alg);
1324                        SSI_LOG_ERR("%s alg allocation failed\n",
1325                                 blkcipher_algs[alg].driver_name);
1326                        goto fail0;
1327                }
1328                t_alg->drvdata = drvdata;
1329
1330                SSI_LOG_DEBUG("registering %s\n", blkcipher_algs[alg].driver_name);
1331                rc = crypto_register_alg(&t_alg->crypto_alg);
1332                SSI_LOG_DEBUG("%s alg registration rc = %x\n",
1333                        t_alg->crypto_alg.cra_driver_name, rc);
1334                if (unlikely(rc != 0)) {
1335                        SSI_LOG_ERR("%s alg registration failed\n",
1336                                t_alg->crypto_alg.cra_driver_name);
1337                        kfree(t_alg);
1338                        goto fail0;
1339                } else {
1340                        list_add_tail(&t_alg->entry,
1341                                      &ablkcipher_handle->blkcipher_alg_list);
1342                        SSI_LOG_DEBUG("Registered %s\n",
1343                                        t_alg->crypto_alg.cra_driver_name);
1344                }
1345        }
1346        return 0;
1347
1348fail0:
1349        ssi_ablkcipher_free(drvdata);
1350        return rc;
1351}
1352