linux/drivers/staging/ccree/ssi_cipher.c
<<
>>
Prefs
   1/*
   2 * Copyright (C) 2012-2017 ARM Limited or its affiliates.
   3 * 
   4 * This program is free software; you can redistribute it and/or modify
   5 * it under the terms of the GNU General Public License version 2 as
   6 * published by the Free Software Foundation.
   7 * 
   8 * This program is distributed in the hope that it will be useful,
   9 * but WITHOUT ANY WARRANTY; without even the implied warranty of
  10 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
  11 * GNU General Public License for more details.
  12 * 
  13 * You should have received a copy of the GNU General Public License
  14 * along with this program; if not, see <http://www.gnu.org/licenses/>.
  15 */
  16
  17#include <linux/kernel.h>
  18#include <linux/module.h>
  19#include <linux/platform_device.h>
  20#include <linux/semaphore.h>
  21#include <crypto/algapi.h>
  22#include <crypto/internal/skcipher.h>
  23#include <crypto/aes.h>
  24#include <crypto/ctr.h>
  25#include <crypto/des.h>
  26
  27#include "ssi_config.h"
  28#include "ssi_driver.h"
  29#include "cc_lli_defs.h"
  30#include "ssi_buffer_mgr.h"
  31#include "ssi_cipher.h"
  32#include "ssi_request_mgr.h"
  33#include "ssi_sysfs.h"
  34#include "ssi_fips_local.h"
  35
  36#define MAX_ABLKCIPHER_SEQ_LEN 6
  37
  38#define template_ablkcipher     template_u.ablkcipher
  39#define template_sblkcipher     template_u.blkcipher
  40
  41#define SSI_MIN_AES_XTS_SIZE 0x10
  42#define SSI_MAX_AES_XTS_SIZE 0x2000
  43struct ssi_blkcipher_handle {
  44        struct list_head blkcipher_alg_list;
  45};
  46
  47struct cc_user_key_info {
  48        uint8_t *key;
  49        dma_addr_t key_dma_addr;
  50};
  51struct cc_hw_key_info {
  52        enum HwCryptoKey key1_slot;
  53        enum HwCryptoKey key2_slot;
  54};
  55
  56struct ssi_ablkcipher_ctx {
  57        struct ssi_drvdata *drvdata;
  58        int keylen;
  59        int key_round_number;
  60        int cipher_mode;
  61        int flow_mode;
  62        unsigned int flags;
  63        struct blkcipher_req_ctx *sync_ctx;
  64        struct cc_user_key_info user;
  65        struct cc_hw_key_info hw;
  66        struct crypto_shash *shash_tfm;
  67};
  68
  69static void ssi_ablkcipher_complete(struct device *dev, void *ssi_req, void __iomem *cc_base);
  70
  71
  72static int validate_keys_sizes(struct ssi_ablkcipher_ctx *ctx_p, uint32_t size) {
  73        switch (ctx_p->flow_mode){
  74        case S_DIN_to_AES:
  75                switch (size){
  76                case CC_AES_128_BIT_KEY_SIZE:
  77                case CC_AES_192_BIT_KEY_SIZE:
  78                        if (likely((ctx_p->cipher_mode != DRV_CIPHER_XTS) &&
  79                                   (ctx_p->cipher_mode != DRV_CIPHER_ESSIV) &&
  80                                   (ctx_p->cipher_mode != DRV_CIPHER_BITLOCKER)))
  81                                return 0;
  82                        break;
  83                case CC_AES_256_BIT_KEY_SIZE:
  84                        return 0;
  85                case (CC_AES_192_BIT_KEY_SIZE*2):
  86                case (CC_AES_256_BIT_KEY_SIZE*2):
  87                        if (likely((ctx_p->cipher_mode == DRV_CIPHER_XTS) ||
  88                                   (ctx_p->cipher_mode == DRV_CIPHER_ESSIV) ||
  89                                   (ctx_p->cipher_mode == DRV_CIPHER_BITLOCKER)))
  90                                return 0;
  91                        break;
  92                default:
  93                        break;
  94                }
  95        case S_DIN_to_DES:
  96                if (likely(size == DES3_EDE_KEY_SIZE ||
  97                    size == DES_KEY_SIZE))
  98                        return 0;
  99                break;
 100#if SSI_CC_HAS_MULTI2
 101        case S_DIN_to_MULTI2:
 102                if (likely(size == CC_MULTI2_SYSTEM_N_DATA_KEY_SIZE))
 103                        return 0;
 104                break;
 105#endif
 106        default:
 107                break;
 108
 109        }
 110        return -EINVAL;
 111}
 112
 113
 114static int validate_data_size(struct ssi_ablkcipher_ctx *ctx_p, unsigned int size) {
 115        switch (ctx_p->flow_mode){
 116        case S_DIN_to_AES:
 117                switch (ctx_p->cipher_mode){
 118                case DRV_CIPHER_XTS:
 119                        if ((size >= SSI_MIN_AES_XTS_SIZE) &&
 120                            (size <= SSI_MAX_AES_XTS_SIZE) && 
 121                            IS_ALIGNED(size, AES_BLOCK_SIZE))
 122                                return 0;
 123                        break;
 124                case DRV_CIPHER_CBC_CTS:
 125                        if (likely(size >= AES_BLOCK_SIZE))
 126                                return 0;
 127                        break;
 128                case DRV_CIPHER_OFB:
 129                case DRV_CIPHER_CTR:
 130                                return 0;
 131                case DRV_CIPHER_ECB:
 132                case DRV_CIPHER_CBC:
 133                case DRV_CIPHER_ESSIV:
 134                case DRV_CIPHER_BITLOCKER:
 135                        if (likely(IS_ALIGNED(size, AES_BLOCK_SIZE)))
 136                                return 0;
 137                        break;
 138                default:
 139                        break;
 140                }
 141                break;
 142        case S_DIN_to_DES:
 143                if (likely(IS_ALIGNED(size, DES_BLOCK_SIZE)))
 144                                return 0;
 145                break;
 146#if SSI_CC_HAS_MULTI2
 147        case S_DIN_to_MULTI2:
 148                switch (ctx_p->cipher_mode) {
 149                case DRV_MULTI2_CBC:
 150                        if (likely(IS_ALIGNED(size, CC_MULTI2_BLOCK_SIZE)))
 151                                return 0;
 152                        break;
 153                case DRV_MULTI2_OFB:
 154                        return 0;
 155                default:
 156                        break;
 157                }
 158                break;
 159#endif /*SSI_CC_HAS_MULTI2*/
 160        default:
 161                break;
 162
 163        }
 164        return -EINVAL;
 165}
 166
 167static unsigned int get_max_keysize(struct crypto_tfm *tfm)
 168{
 169        struct ssi_crypto_alg *ssi_alg = container_of(tfm->__crt_alg, struct ssi_crypto_alg, crypto_alg);
 170
 171        if ((ssi_alg->crypto_alg.cra_flags & CRYPTO_ALG_TYPE_MASK) == CRYPTO_ALG_TYPE_ABLKCIPHER) {
 172                return ssi_alg->crypto_alg.cra_ablkcipher.max_keysize;
 173        }
 174
 175        if ((ssi_alg->crypto_alg.cra_flags & CRYPTO_ALG_TYPE_MASK) == CRYPTO_ALG_TYPE_BLKCIPHER) {
 176                return ssi_alg->crypto_alg.cra_blkcipher.max_keysize;
 177        }
 178
 179        return 0;
 180}
 181
 182static int ssi_blkcipher_init(struct crypto_tfm *tfm)
 183{
 184        struct ssi_ablkcipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
 185        struct crypto_alg *alg = tfm->__crt_alg;
 186        struct ssi_crypto_alg *ssi_alg =
 187                        container_of(alg, struct ssi_crypto_alg, crypto_alg);
 188        struct device *dev;
 189        int rc = 0;
 190        unsigned int max_key_buf_size = get_max_keysize(tfm);
 191
 192        SSI_LOG_DEBUG("Initializing context @%p for %s\n", ctx_p, 
 193                                                crypto_tfm_alg_name(tfm));
 194
 195        CHECK_AND_RETURN_UPON_FIPS_ERROR();
 196        ctx_p->cipher_mode = ssi_alg->cipher_mode;
 197        ctx_p->flow_mode = ssi_alg->flow_mode;
 198        ctx_p->drvdata = ssi_alg->drvdata;
 199        dev = &ctx_p->drvdata->plat_dev->dev;
 200
 201        /* Allocate key buffer, cache line aligned */
 202        ctx_p->user.key = kmalloc(max_key_buf_size, GFP_KERNEL|GFP_DMA);
 203        if (!ctx_p->user.key) {
 204                SSI_LOG_ERR("Allocating key buffer in context failed\n");
 205                rc = -ENOMEM;
 206        }
 207        SSI_LOG_DEBUG("Allocated key buffer in context. key=@%p\n",
 208                      ctx_p->user.key);
 209
 210        /* Map key buffer */
 211        ctx_p->user.key_dma_addr = dma_map_single(dev, (void *)ctx_p->user.key,
 212                                             max_key_buf_size, DMA_TO_DEVICE);
 213        if (dma_mapping_error(dev, ctx_p->user.key_dma_addr)) {
 214                SSI_LOG_ERR("Mapping Key %u B at va=%pK for DMA failed\n",
 215                        max_key_buf_size, ctx_p->user.key);
 216                return -ENOMEM;
 217        }
 218        SSI_UPDATE_DMA_ADDR_TO_48BIT(ctx_p->user.key_dma_addr, max_key_buf_size);
 219        SSI_LOG_DEBUG("Mapped key %u B at va=%pK to dma=0x%llX\n",
 220                max_key_buf_size, ctx_p->user.key,
 221                (unsigned long long)ctx_p->user.key_dma_addr);
 222
 223        if (ctx_p->cipher_mode == DRV_CIPHER_ESSIV) {
 224                /* Alloc hash tfm for essiv */
 225                ctx_p->shash_tfm = crypto_alloc_shash("sha256-generic", 0, 0);
 226                if (IS_ERR(ctx_p->shash_tfm)) {
 227                        SSI_LOG_ERR("Error allocating hash tfm for ESSIV.\n");
 228                        return PTR_ERR(ctx_p->shash_tfm);
 229                }
 230        }
 231
 232        return rc;
 233}
 234
 235static void ssi_blkcipher_exit(struct crypto_tfm *tfm)
 236{
 237        struct ssi_ablkcipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
 238        struct device *dev = &ctx_p->drvdata->plat_dev->dev;
 239        unsigned int max_key_buf_size = get_max_keysize(tfm);
 240
 241        SSI_LOG_DEBUG("Clearing context @%p for %s\n",
 242                crypto_tfm_ctx(tfm), crypto_tfm_alg_name(tfm));
 243
 244        if (ctx_p->cipher_mode == DRV_CIPHER_ESSIV) {
 245                /* Free hash tfm for essiv */
 246                crypto_free_shash(ctx_p->shash_tfm);
 247                ctx_p->shash_tfm = NULL;
 248        }
 249
 250        /* Unmap key buffer */
 251        SSI_RESTORE_DMA_ADDR_TO_48BIT(ctx_p->user.key_dma_addr);
 252        dma_unmap_single(dev, ctx_p->user.key_dma_addr, max_key_buf_size,
 253                                                                DMA_TO_DEVICE);
 254        SSI_LOG_DEBUG("Unmapped key buffer key_dma_addr=0x%llX\n", 
 255                (unsigned long long)ctx_p->user.key_dma_addr);
 256
 257        /* Free key buffer in context */
 258        kfree(ctx_p->user.key);
 259        SSI_LOG_DEBUG("Free key buffer in context. key=@%p\n", ctx_p->user.key);
 260}
 261
 262
 263typedef struct tdes_keys{
 264        u8      key1[DES_KEY_SIZE];
 265        u8      key2[DES_KEY_SIZE];
 266        u8      key3[DES_KEY_SIZE];
 267}tdes_keys_t;
 268
 269static const u8 zero_buff[] = {0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 
 270                               0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
 271                               0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 
 272                               0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0};
 273
 274/* The function verifies that tdes keys are not weak.*/
 275static int ssi_fips_verify_3des_keys(const u8 *key, unsigned int keylen)
 276{
 277#ifdef CCREE_FIPS_SUPPORT
 278        tdes_keys_t *tdes_key = (tdes_keys_t*)key;
 279
 280        /* verify key1 != key2 and key3 != key2*/
 281        if (unlikely( (memcmp((u8*)tdes_key->key1, (u8*)tdes_key->key2, sizeof(tdes_key->key1)) == 0) || 
 282                      (memcmp((u8*)tdes_key->key3, (u8*)tdes_key->key2, sizeof(tdes_key->key3)) == 0) )) {
 283                return -ENOEXEC;
 284        }
 285#endif /* CCREE_FIPS_SUPPORT */
 286
 287        return 0;
 288}
 289
 290/* The function verifies that xts keys are not weak.*/
 291static int ssi_fips_verify_xts_keys(const u8 *key, unsigned int keylen)
 292{
 293#ifdef CCREE_FIPS_SUPPORT
 294        /* Weak key is define as key that its first half (128/256 lsb) equals its second half (128/256 msb) */
 295        int singleKeySize = keylen >> 1;
 296
 297        if (unlikely(memcmp(key, &key[singleKeySize], singleKeySize) == 0)) {
 298                return -ENOEXEC;
 299        }
 300#endif /* CCREE_FIPS_SUPPORT */
 301
 302        return 0;
 303}
 304
 305static enum HwCryptoKey hw_key_to_cc_hw_key(int slot_num)
 306{
 307        switch (slot_num) {
 308        case 0:
 309                return KFDE0_KEY;
 310        case 1:
 311                return KFDE1_KEY;
 312        case 2:
 313                return KFDE2_KEY;
 314        case 3:
 315                return KFDE3_KEY;
 316        }
 317        return END_OF_KEYS;
 318}
 319
 320static int ssi_blkcipher_setkey(struct crypto_tfm *tfm, 
 321                                const u8 *key, 
 322                                unsigned int keylen)
 323{
 324        struct ssi_ablkcipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
 325        struct device *dev = &ctx_p->drvdata->plat_dev->dev;
 326        u32 tmp[DES_EXPKEY_WORDS];
 327        unsigned int max_key_buf_size = get_max_keysize(tfm);
 328        DECL_CYCLE_COUNT_RESOURCES;
 329
 330        SSI_LOG_DEBUG("Setting key in context @%p for %s. keylen=%u\n",
 331                ctx_p, crypto_tfm_alg_name(tfm), keylen);
 332        dump_byte_array("key", (uint8_t *)key, keylen);
 333
 334        CHECK_AND_RETURN_UPON_FIPS_ERROR();
 335
 336        SSI_LOG_DEBUG("ssi_blkcipher_setkey: after FIPS check");
 337        
 338        /* STAT_PHASE_0: Init and sanity checks */
 339        START_CYCLE_COUNT();
 340
 341#if SSI_CC_HAS_MULTI2
 342        /*last byte of key buffer is round number and should not be a part of key size*/
 343        if (ctx_p->flow_mode == S_DIN_to_MULTI2) {
 344                keylen -=1;
 345        }
 346#endif /*SSI_CC_HAS_MULTI2*/
 347
 348        if (unlikely(validate_keys_sizes(ctx_p,keylen) != 0)) {
 349                SSI_LOG_ERR("Unsupported key size %d.\n", keylen);
 350                crypto_tfm_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
 351                return -EINVAL;
 352        }
 353
 354        if (ssi_is_hw_key(tfm)) {
 355                /* setting HW key slots */
 356                struct arm_hw_key_info *hki = (struct arm_hw_key_info*)key;
 357
 358                if (unlikely(ctx_p->flow_mode != S_DIN_to_AES)) {
 359                        SSI_LOG_ERR("HW key not supported for non-AES flows\n");
 360                        return -EINVAL;
 361                }
 362
 363                ctx_p->hw.key1_slot = hw_key_to_cc_hw_key(hki->hw_key1);
 364                if (unlikely(ctx_p->hw.key1_slot == END_OF_KEYS)) {
 365                        SSI_LOG_ERR("Unsupported hw key1 number (%d)\n", hki->hw_key1);
 366                        return -EINVAL;
 367                }
 368
 369                if ((ctx_p->cipher_mode == DRV_CIPHER_XTS) ||
 370                    (ctx_p->cipher_mode == DRV_CIPHER_ESSIV) ||
 371                    (ctx_p->cipher_mode == DRV_CIPHER_BITLOCKER)) {
 372                        if (unlikely(hki->hw_key1 == hki->hw_key2)) {
 373                                SSI_LOG_ERR("Illegal hw key numbers (%d,%d)\n", hki->hw_key1, hki->hw_key2);
 374                                return -EINVAL;
 375                        }
 376                        ctx_p->hw.key2_slot = hw_key_to_cc_hw_key(hki->hw_key2);
 377                        if (unlikely(ctx_p->hw.key2_slot == END_OF_KEYS)) {
 378                                SSI_LOG_ERR("Unsupported hw key2 number (%d)\n", hki->hw_key2);
 379                                return -EINVAL;
 380                        }
 381                }
 382
 383                ctx_p->keylen = keylen;
 384                END_CYCLE_COUNT(STAT_OP_TYPE_SETKEY, STAT_PHASE_0);
 385                SSI_LOG_DEBUG("ssi_blkcipher_setkey: ssi_is_hw_key ret 0");
 386
 387                return 0;
 388        }
 389
 390        // verify weak keys
 391        if (ctx_p->flow_mode == S_DIN_to_DES) {
 392                if (unlikely(!des_ekey(tmp, key)) &&
 393                    (crypto_tfm_get_flags(tfm) & CRYPTO_TFM_REQ_WEAK_KEY)) {
 394                        tfm->crt_flags |= CRYPTO_TFM_RES_WEAK_KEY;
 395                        SSI_LOG_DEBUG("ssi_blkcipher_setkey:  weak DES key");
 396                        return -EINVAL;
 397                }
 398        }
 399        if ((ctx_p->cipher_mode == DRV_CIPHER_XTS) && 
 400            ssi_fips_verify_xts_keys(key, keylen) != 0) {
 401                SSI_LOG_DEBUG("ssi_blkcipher_setkey: weak XTS key");
 402                return -EINVAL;
 403        }
 404        if ((ctx_p->flow_mode == S_DIN_to_DES) && 
 405            (keylen == DES3_EDE_KEY_SIZE) && 
 406            ssi_fips_verify_3des_keys(key, keylen) != 0) {
 407                SSI_LOG_DEBUG("ssi_blkcipher_setkey: weak 3DES key");
 408                return -EINVAL;
 409        }
 410
 411
 412        END_CYCLE_COUNT(STAT_OP_TYPE_SETKEY, STAT_PHASE_0);
 413
 414        /* STAT_PHASE_1: Copy key to ctx */
 415        START_CYCLE_COUNT();
 416        SSI_RESTORE_DMA_ADDR_TO_48BIT(ctx_p->user.key_dma_addr);
 417        dma_sync_single_for_cpu(dev, ctx_p->user.key_dma_addr, 
 418                                        max_key_buf_size, DMA_TO_DEVICE);
 419#if SSI_CC_HAS_MULTI2
 420        if (ctx_p->flow_mode == S_DIN_to_MULTI2) {
 421                memcpy(ctx_p->user.key, key, CC_MULTI2_SYSTEM_N_DATA_KEY_SIZE);
 422                ctx_p->key_round_number = key[CC_MULTI2_SYSTEM_N_DATA_KEY_SIZE];
 423                if (ctx_p->key_round_number < CC_MULTI2_MIN_NUM_ROUNDS ||
 424                    ctx_p->key_round_number > CC_MULTI2_MAX_NUM_ROUNDS) {
 425                        crypto_tfm_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
 426                        SSI_LOG_DEBUG("ssi_blkcipher_setkey: SSI_CC_HAS_MULTI2 einval");
 427                        return -EINVAL;
 428                }
 429        } else 
 430#endif /*SSI_CC_HAS_MULTI2*/
 431        {
 432                memcpy(ctx_p->user.key, key, keylen);
 433                if (keylen == 24)
 434                        memset(ctx_p->user.key + 24, 0, CC_AES_KEY_SIZE_MAX - 24);
 435
 436                if (ctx_p->cipher_mode == DRV_CIPHER_ESSIV) {
 437                        /* sha256 for key2 - use sw implementation */
 438                        int key_len = keylen >> 1;
 439                        int err;
 440                        SHASH_DESC_ON_STACK(desc, ctx_p->shash_tfm);
 441                        desc->tfm = ctx_p->shash_tfm;
 442
 443                        err = crypto_shash_digest(desc, ctx_p->user.key, key_len, ctx_p->user.key + key_len);
 444                        if (err) {
 445                                SSI_LOG_ERR("Failed to hash ESSIV key.\n");
 446                                return err;
 447                        }
 448                }
 449        }
 450        dma_sync_single_for_device(dev, ctx_p->user.key_dma_addr, 
 451                                        max_key_buf_size, DMA_TO_DEVICE);
 452        SSI_UPDATE_DMA_ADDR_TO_48BIT(ctx_p->user.key_dma_addr ,max_key_buf_size);
 453        ctx_p->keylen = keylen;
 454        
 455        END_CYCLE_COUNT(STAT_OP_TYPE_SETKEY, STAT_PHASE_1);
 456
 457         SSI_LOG_DEBUG("ssi_blkcipher_setkey: return safely");
 458        return 0;
 459}
 460
 461static inline void
 462ssi_blkcipher_create_setup_desc(
 463        struct crypto_tfm *tfm,
 464        struct blkcipher_req_ctx *req_ctx,
 465        unsigned int ivsize,
 466        unsigned int nbytes,
 467        HwDesc_s desc[],
 468        unsigned int *seq_size)
 469{
 470        struct ssi_ablkcipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
 471        int cipher_mode = ctx_p->cipher_mode;
 472        int flow_mode = ctx_p->flow_mode;
 473        int direction = req_ctx->gen_ctx.op_type;
 474        dma_addr_t key_dma_addr = ctx_p->user.key_dma_addr;
 475        unsigned int key_len = ctx_p->keylen;
 476        dma_addr_t iv_dma_addr = req_ctx->gen_ctx.iv_dma_addr;
 477        unsigned int du_size = nbytes;
 478
 479        struct ssi_crypto_alg *ssi_alg = container_of(tfm->__crt_alg, struct ssi_crypto_alg, crypto_alg);
 480
 481        if ((ssi_alg->crypto_alg.cra_flags & CRYPTO_ALG_BULK_MASK) == CRYPTO_ALG_BULK_DU_512)
 482                du_size = 512;
 483        if ((ssi_alg->crypto_alg.cra_flags & CRYPTO_ALG_BULK_MASK) == CRYPTO_ALG_BULK_DU_4096)
 484                du_size = 4096;
 485
 486        switch (cipher_mode) {
 487        case DRV_CIPHER_CBC:
 488        case DRV_CIPHER_CBC_CTS:
 489        case DRV_CIPHER_CTR:
 490        case DRV_CIPHER_OFB:
 491                /* Load cipher state */
 492                HW_DESC_INIT(&desc[*seq_size]);
 493                HW_DESC_SET_DIN_TYPE(&desc[*seq_size], DMA_DLLI,
 494                                     iv_dma_addr, ivsize,
 495                                     NS_BIT);
 496                HW_DESC_SET_CIPHER_CONFIG0(&desc[*seq_size], direction);
 497                HW_DESC_SET_FLOW_MODE(&desc[*seq_size], flow_mode);
 498                HW_DESC_SET_CIPHER_MODE(&desc[*seq_size], cipher_mode);
 499                if ((cipher_mode == DRV_CIPHER_CTR) || 
 500                    (cipher_mode == DRV_CIPHER_OFB) ) {
 501                        HW_DESC_SET_SETUP_MODE(&desc[*seq_size],
 502                                               SETUP_LOAD_STATE1);
 503                } else {
 504                        HW_DESC_SET_SETUP_MODE(&desc[*seq_size],
 505                                               SETUP_LOAD_STATE0);
 506                }
 507                (*seq_size)++;
 508                /*FALLTHROUGH*/
 509        case DRV_CIPHER_ECB:
 510                /* Load key */
 511                HW_DESC_INIT(&desc[*seq_size]);
 512                HW_DESC_SET_CIPHER_MODE(&desc[*seq_size], cipher_mode);
 513                HW_DESC_SET_CIPHER_CONFIG0(&desc[*seq_size], direction);
 514                if (flow_mode == S_DIN_to_AES) {
 515
 516                        if (ssi_is_hw_key(tfm)) {
 517                                HW_DESC_SET_HW_CRYPTO_KEY(&desc[*seq_size], ctx_p->hw.key1_slot);
 518                        } else {
 519                                HW_DESC_SET_DIN_TYPE(&desc[*seq_size], DMA_DLLI,
 520                                                     key_dma_addr, 
 521                                                     ((key_len == 24) ? AES_MAX_KEY_SIZE : key_len),
 522                                                     NS_BIT);
 523                        }
 524                        HW_DESC_SET_KEY_SIZE_AES(&desc[*seq_size], key_len);
 525                } else {
 526                        /*des*/
 527                        HW_DESC_SET_DIN_TYPE(&desc[*seq_size], DMA_DLLI,
 528                                             key_dma_addr, key_len,
 529                                             NS_BIT);
 530                        HW_DESC_SET_KEY_SIZE_DES(&desc[*seq_size], key_len);
 531                }
 532                HW_DESC_SET_FLOW_MODE(&desc[*seq_size], flow_mode);
 533                HW_DESC_SET_SETUP_MODE(&desc[*seq_size], SETUP_LOAD_KEY0);
 534                (*seq_size)++;
 535                break;
 536        case DRV_CIPHER_XTS:
 537        case DRV_CIPHER_ESSIV:
 538        case DRV_CIPHER_BITLOCKER:
 539                /* Load AES key */
 540                HW_DESC_INIT(&desc[*seq_size]);
 541                HW_DESC_SET_CIPHER_MODE(&desc[*seq_size], cipher_mode);
 542                HW_DESC_SET_CIPHER_CONFIG0(&desc[*seq_size], direction);
 543                if (ssi_is_hw_key(tfm)) {
 544                        HW_DESC_SET_HW_CRYPTO_KEY(&desc[*seq_size], ctx_p->hw.key1_slot);
 545                } else {
 546                        HW_DESC_SET_DIN_TYPE(&desc[*seq_size], DMA_DLLI,
 547                                             key_dma_addr, key_len/2,
 548                                             NS_BIT);
 549                }
 550                HW_DESC_SET_KEY_SIZE_AES(&desc[*seq_size], key_len/2);
 551                HW_DESC_SET_FLOW_MODE(&desc[*seq_size], flow_mode);
 552                HW_DESC_SET_SETUP_MODE(&desc[*seq_size], SETUP_LOAD_KEY0);
 553                (*seq_size)++;
 554
 555                /* load XEX key */
 556                HW_DESC_INIT(&desc[*seq_size]);
 557                HW_DESC_SET_CIPHER_MODE(&desc[*seq_size], cipher_mode);
 558                HW_DESC_SET_CIPHER_CONFIG0(&desc[*seq_size], direction);
 559                if (ssi_is_hw_key(tfm)) {
 560                        HW_DESC_SET_HW_CRYPTO_KEY(&desc[*seq_size], ctx_p->hw.key2_slot);
 561                } else {
 562                        HW_DESC_SET_DIN_TYPE(&desc[*seq_size], DMA_DLLI, 
 563                                             (key_dma_addr+key_len/2), key_len/2,
 564                                             NS_BIT);
 565                }
 566                HW_DESC_SET_XEX_DATA_UNIT_SIZE(&desc[*seq_size], du_size);
 567                HW_DESC_SET_FLOW_MODE(&desc[*seq_size], S_DIN_to_AES2);
 568                HW_DESC_SET_KEY_SIZE_AES(&desc[*seq_size], key_len/2);
 569                HW_DESC_SET_SETUP_MODE(&desc[*seq_size], SETUP_LOAD_XEX_KEY);
 570                (*seq_size)++;
 571        
 572                /* Set state */
 573                HW_DESC_INIT(&desc[*seq_size]);
 574                HW_DESC_SET_SETUP_MODE(&desc[*seq_size], SETUP_LOAD_STATE1);
 575                HW_DESC_SET_CIPHER_MODE(&desc[*seq_size], cipher_mode);
 576                HW_DESC_SET_CIPHER_CONFIG0(&desc[*seq_size], direction);
 577                HW_DESC_SET_KEY_SIZE_AES(&desc[*seq_size], key_len/2);
 578                HW_DESC_SET_FLOW_MODE(&desc[*seq_size], flow_mode);
 579                HW_DESC_SET_DIN_TYPE(&desc[*seq_size], DMA_DLLI,
 580                                     iv_dma_addr, CC_AES_BLOCK_SIZE,
 581                                     NS_BIT);
 582                (*seq_size)++;
 583                break;
 584        default:
 585                SSI_LOG_ERR("Unsupported cipher mode (%d)\n", cipher_mode);
 586                BUG();
 587        }
 588}
 589
 590#if SSI_CC_HAS_MULTI2
 591static inline void ssi_blkcipher_create_multi2_setup_desc(
 592        struct crypto_tfm *tfm,
 593        struct blkcipher_req_ctx *req_ctx,
 594        unsigned int ivsize,
 595        HwDesc_s desc[],
 596        unsigned int *seq_size)
 597{
 598        struct ssi_ablkcipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
 599        
 600        int direction = req_ctx->gen_ctx.op_type;
 601        /* Load system key */
 602        HW_DESC_INIT(&desc[*seq_size]);
 603        HW_DESC_SET_CIPHER_MODE(&desc[*seq_size], ctx_p->cipher_mode);
 604        HW_DESC_SET_CIPHER_CONFIG0(&desc[*seq_size], direction);
 605        HW_DESC_SET_DIN_TYPE(&desc[*seq_size], DMA_DLLI, ctx_p->user.key_dma_addr,
 606                                                CC_MULTI2_SYSTEM_KEY_SIZE,
 607                                                NS_BIT);
 608        HW_DESC_SET_FLOW_MODE(&desc[*seq_size], ctx_p->flow_mode);
 609        HW_DESC_SET_SETUP_MODE(&desc[*seq_size], SETUP_LOAD_KEY0);
 610        (*seq_size)++;
 611
 612        /* load data key */
 613        HW_DESC_INIT(&desc[*seq_size]);
 614        HW_DESC_SET_DIN_TYPE(&desc[*seq_size], DMA_DLLI, 
 615                                        (ctx_p->user.key_dma_addr + 
 616                                                CC_MULTI2_SYSTEM_KEY_SIZE),
 617                                CC_MULTI2_DATA_KEY_SIZE, NS_BIT);
 618        HW_DESC_SET_MULTI2_NUM_ROUNDS(&desc[*seq_size],
 619                                                ctx_p->key_round_number);
 620        HW_DESC_SET_FLOW_MODE(&desc[*seq_size], ctx_p->flow_mode);
 621        HW_DESC_SET_CIPHER_MODE(&desc[*seq_size], ctx_p->cipher_mode);
 622        HW_DESC_SET_CIPHER_CONFIG0(&desc[*seq_size], direction);
 623        HW_DESC_SET_SETUP_MODE(&desc[*seq_size], SETUP_LOAD_STATE0 );
 624        (*seq_size)++;
 625        
 626        
 627        /* Set state */
 628        HW_DESC_INIT(&desc[*seq_size]);
 629        HW_DESC_SET_DIN_TYPE(&desc[*seq_size], DMA_DLLI,
 630                             req_ctx->gen_ctx.iv_dma_addr,
 631                             ivsize, NS_BIT);
 632        HW_DESC_SET_CIPHER_CONFIG0(&desc[*seq_size], direction);
 633        HW_DESC_SET_FLOW_MODE(&desc[*seq_size], ctx_p->flow_mode);
 634        HW_DESC_SET_CIPHER_MODE(&desc[*seq_size], ctx_p->cipher_mode);
 635        HW_DESC_SET_SETUP_MODE(&desc[*seq_size], SETUP_LOAD_STATE1);    
 636        (*seq_size)++;
 637        
 638}
 639#endif /*SSI_CC_HAS_MULTI2*/
 640
 641static inline void
 642ssi_blkcipher_create_data_desc(
 643        struct crypto_tfm *tfm,
 644        struct blkcipher_req_ctx *req_ctx,
 645        struct scatterlist *dst, struct scatterlist *src,
 646        unsigned int nbytes,
 647        void *areq,
 648        HwDesc_s desc[],
 649        unsigned int *seq_size)
 650{
 651        struct ssi_ablkcipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
 652        unsigned int flow_mode = ctx_p->flow_mode;
 653
 654        switch (ctx_p->flow_mode) {
 655        case S_DIN_to_AES:
 656                flow_mode = DIN_AES_DOUT;
 657                break;
 658        case S_DIN_to_DES:
 659                flow_mode = DIN_DES_DOUT;
 660                break;
 661#if SSI_CC_HAS_MULTI2
 662        case S_DIN_to_MULTI2:
 663                flow_mode = DIN_MULTI2_DOUT;
 664                break;
 665#endif /*SSI_CC_HAS_MULTI2*/
 666        default:
 667                SSI_LOG_ERR("invalid flow mode, flow_mode = %d \n", flow_mode);
 668                return;
 669        }
 670        /* Process */
 671        if (likely(req_ctx->dma_buf_type == SSI_DMA_BUF_DLLI)){
 672                SSI_LOG_DEBUG(" data params addr 0x%llX length 0x%X \n",
 673                             (unsigned long long)sg_dma_address(src),
 674                             nbytes);
 675                SSI_LOG_DEBUG(" data params addr 0x%llX length 0x%X \n",
 676                             (unsigned long long)sg_dma_address(dst),
 677                             nbytes);
 678                HW_DESC_INIT(&desc[*seq_size]);
 679                HW_DESC_SET_DIN_TYPE(&desc[*seq_size], DMA_DLLI,
 680                                     sg_dma_address(src),
 681                                     nbytes, NS_BIT);
 682                HW_DESC_SET_DOUT_DLLI(&desc[*seq_size],
 683                                      sg_dma_address(dst),
 684                                      nbytes,
 685                                      NS_BIT, (areq == NULL)? 0:1);
 686                if (areq != NULL) {
 687                        HW_DESC_SET_QUEUE_LAST_IND(&desc[*seq_size]);
 688                }
 689                HW_DESC_SET_FLOW_MODE(&desc[*seq_size], flow_mode);
 690                (*seq_size)++;
 691        } else {
 692                /* bypass */
 693                SSI_LOG_DEBUG(" bypass params addr 0x%llX "
 694                             "length 0x%X addr 0x%08X\n",
 695                        (unsigned long long)req_ctx->mlli_params.mlli_dma_addr,
 696                        req_ctx->mlli_params.mlli_len,
 697                        (unsigned int)ctx_p->drvdata->mlli_sram_addr);
 698                HW_DESC_INIT(&desc[*seq_size]);
 699                HW_DESC_SET_DIN_TYPE(&desc[*seq_size], DMA_DLLI,
 700                                     req_ctx->mlli_params.mlli_dma_addr,
 701                                     req_ctx->mlli_params.mlli_len,
 702                                     NS_BIT);
 703                HW_DESC_SET_DOUT_SRAM(&desc[*seq_size],
 704                                      ctx_p->drvdata->mlli_sram_addr,
 705                                      req_ctx->mlli_params.mlli_len);
 706                HW_DESC_SET_FLOW_MODE(&desc[*seq_size], BYPASS);
 707                (*seq_size)++;
 708
 709                HW_DESC_INIT(&desc[*seq_size]);
 710                HW_DESC_SET_DIN_TYPE(&desc[*seq_size], DMA_MLLI,
 711                        ctx_p->drvdata->mlli_sram_addr,
 712                                     req_ctx->in_mlli_nents, NS_BIT);
 713                if (req_ctx->out_nents == 0) {
 714                        SSI_LOG_DEBUG(" din/dout params addr 0x%08X "
 715                                     "addr 0x%08X\n",
 716                        (unsigned int)ctx_p->drvdata->mlli_sram_addr,
 717                        (unsigned int)ctx_p->drvdata->mlli_sram_addr);
 718                        HW_DESC_SET_DOUT_MLLI(&desc[*seq_size], 
 719                        ctx_p->drvdata->mlli_sram_addr,
 720                                              req_ctx->in_mlli_nents,
 721                                              NS_BIT,(areq == NULL)? 0:1);
 722                } else {
 723                        SSI_LOG_DEBUG(" din/dout params "
 724                                     "addr 0x%08X addr 0x%08X\n",
 725                                (unsigned int)ctx_p->drvdata->mlli_sram_addr,
 726                                (unsigned int)ctx_p->drvdata->mlli_sram_addr + 
 727                                (uint32_t)LLI_ENTRY_BYTE_SIZE * 
 728                                                        req_ctx->in_nents);
 729                        HW_DESC_SET_DOUT_MLLI(&desc[*seq_size], 
 730                                (ctx_p->drvdata->mlli_sram_addr +
 731                                LLI_ENTRY_BYTE_SIZE * 
 732                                                req_ctx->in_mlli_nents), 
 733                                req_ctx->out_mlli_nents, NS_BIT,(areq == NULL)? 0:1);
 734                }
 735                if (areq != NULL) {
 736                        HW_DESC_SET_QUEUE_LAST_IND(&desc[*seq_size]);
 737                }
 738                HW_DESC_SET_FLOW_MODE(&desc[*seq_size], flow_mode);
 739                (*seq_size)++;
 740        }
 741}
 742
 743static int ssi_blkcipher_complete(struct device *dev,
 744                                  struct ssi_ablkcipher_ctx *ctx_p, 
 745                                  struct blkcipher_req_ctx *req_ctx,
 746                                  struct scatterlist *dst, struct scatterlist *src,
 747                                  void *info, //req info
 748                                  unsigned int ivsize,
 749                                  void *areq,
 750                                  void __iomem *cc_base)
 751{
 752        int completion_error = 0;
 753        uint32_t inflight_counter;
 754        DECL_CYCLE_COUNT_RESOURCES;
 755
 756        START_CYCLE_COUNT();
 757        ssi_buffer_mgr_unmap_blkcipher_request(dev, req_ctx, ivsize, src, dst);
 758        info = req_ctx->backup_info;
 759        END_CYCLE_COUNT(STAT_OP_TYPE_GENERIC, STAT_PHASE_4);
 760
 761
 762        /*Set the inflight couter value to local variable*/
 763        inflight_counter =  ctx_p->drvdata->inflight_counter;
 764        /*Decrease the inflight counter*/
 765        if(ctx_p->flow_mode == BYPASS && ctx_p->drvdata->inflight_counter > 0)
 766                ctx_p->drvdata->inflight_counter--;
 767
 768        if(areq){
 769                ablkcipher_request_complete(areq, completion_error);
 770                return 0;
 771        }
 772        return completion_error;
 773}
 774
 775static int ssi_blkcipher_process(
 776        struct crypto_tfm *tfm,
 777        struct blkcipher_req_ctx *req_ctx,
 778        struct scatterlist *dst, struct scatterlist *src,
 779        unsigned int nbytes,
 780        void *info, //req info
 781        unsigned int ivsize,
 782        void *areq, 
 783        enum drv_crypto_direction direction)
 784{
 785        struct ssi_ablkcipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
 786        struct device *dev = &ctx_p->drvdata->plat_dev->dev;
 787        HwDesc_s desc[MAX_ABLKCIPHER_SEQ_LEN];
 788        struct ssi_crypto_req ssi_req = {};
 789        int rc, seq_len = 0,cts_restore_flag = 0;
 790        DECL_CYCLE_COUNT_RESOURCES;
 791
 792        SSI_LOG_DEBUG("%s areq=%p info=%p nbytes=%d\n",
 793                ((direction==DRV_CRYPTO_DIRECTION_ENCRYPT)?"Encrypt":"Decrypt"),
 794                     areq, info, nbytes);
 795
 796        CHECK_AND_RETURN_UPON_FIPS_ERROR();
 797        /* STAT_PHASE_0: Init and sanity checks */
 798        START_CYCLE_COUNT();
 799        
 800        /* TODO: check data length according to mode */
 801        if (unlikely(validate_data_size(ctx_p, nbytes))) {
 802                SSI_LOG_ERR("Unsupported data size %d.\n", nbytes);
 803                crypto_tfm_set_flags(tfm, CRYPTO_TFM_RES_BAD_BLOCK_LEN);
 804                return -EINVAL;
 805        }
 806        if (nbytes == 0) {
 807                /* No data to process is valid */
 808                return 0;
 809        }
 810        /*For CTS in case of data size aligned to 16 use CBC mode*/
 811        if (((nbytes % AES_BLOCK_SIZE) == 0) && (ctx_p->cipher_mode == DRV_CIPHER_CBC_CTS)){
 812
 813                ctx_p->cipher_mode = DRV_CIPHER_CBC;
 814                cts_restore_flag = 1;
 815        }
 816
 817        /* Setup DX request structure */
 818        ssi_req.user_cb = (void *)ssi_ablkcipher_complete;
 819        ssi_req.user_arg = (void *)areq;
 820
 821#ifdef ENABLE_CYCLE_COUNT
 822        ssi_req.op_type = (direction == DRV_CRYPTO_DIRECTION_DECRYPT) ?
 823                STAT_OP_TYPE_DECODE : STAT_OP_TYPE_ENCODE;
 824
 825#endif
 826
 827        /* Setup request context */
 828        req_ctx->gen_ctx.op_type = direction;
 829        
 830        END_CYCLE_COUNT(ssi_req.op_type, STAT_PHASE_0);
 831
 832        /* STAT_PHASE_1: Map buffers */
 833        START_CYCLE_COUNT();
 834        
 835        rc = ssi_buffer_mgr_map_blkcipher_request(ctx_p->drvdata, req_ctx, ivsize, nbytes, info, src, dst);
 836        if (unlikely(rc != 0)) {
 837                SSI_LOG_ERR("map_request() failed\n");
 838                goto exit_process;
 839        }
 840
 841        END_CYCLE_COUNT(ssi_req.op_type, STAT_PHASE_1);
 842
 843        /* STAT_PHASE_2: Create sequence */
 844        START_CYCLE_COUNT();
 845
 846        /* Setup processing */
 847#if SSI_CC_HAS_MULTI2
 848        if (ctx_p->flow_mode == S_DIN_to_MULTI2) {
 849                ssi_blkcipher_create_multi2_setup_desc(tfm,
 850                                                       req_ctx,
 851                                                       ivsize,
 852                                                       desc,
 853                                                       &seq_len);
 854        } else
 855#endif /*SSI_CC_HAS_MULTI2*/
 856        {
 857                ssi_blkcipher_create_setup_desc(tfm,
 858                                                req_ctx,
 859                                                ivsize,
 860                                                nbytes,
 861                                                desc,
 862                                                &seq_len);
 863        }
 864        /* Data processing */
 865        ssi_blkcipher_create_data_desc(tfm,
 866                              req_ctx, 
 867                              dst, src,
 868                              nbytes,
 869                              areq,
 870                              desc, &seq_len);
 871
 872        /* do we need to generate IV? */
 873        if (req_ctx->is_giv == true) {
 874                ssi_req.ivgen_dma_addr[0] = req_ctx->gen_ctx.iv_dma_addr;
 875                ssi_req.ivgen_dma_addr_len = 1;
 876                /* set the IV size (8/16 B long)*/
 877                ssi_req.ivgen_size = ivsize;
 878        }
 879        END_CYCLE_COUNT(ssi_req.op_type, STAT_PHASE_2);
 880
 881        /* STAT_PHASE_3: Lock HW and push sequence */
 882        START_CYCLE_COUNT();
 883        
 884        rc = send_request(ctx_p->drvdata, &ssi_req, desc, seq_len, (areq == NULL)? 0:1);
 885        if(areq != NULL) {
 886                if (unlikely(rc != -EINPROGRESS)) {
 887                        /* Failed to send the request or request completed synchronously */
 888                        ssi_buffer_mgr_unmap_blkcipher_request(dev, req_ctx, ivsize, src, dst);
 889                }
 890
 891                END_CYCLE_COUNT(ssi_req.op_type, STAT_PHASE_3);
 892        } else {
 893                if (rc != 0) {
 894                        ssi_buffer_mgr_unmap_blkcipher_request(dev, req_ctx, ivsize, src, dst);
 895                        END_CYCLE_COUNT(ssi_req.op_type, STAT_PHASE_3);            
 896                } else {
 897                        END_CYCLE_COUNT(ssi_req.op_type, STAT_PHASE_3);
 898                        rc = ssi_blkcipher_complete(dev, ctx_p, req_ctx, dst, src, info, ivsize, NULL, ctx_p->drvdata->cc_base);
 899                } 
 900        }
 901
 902exit_process:
 903        if (cts_restore_flag != 0)
 904                ctx_p->cipher_mode = DRV_CIPHER_CBC_CTS;
 905        
 906        return rc;
 907}
 908
 909static void ssi_ablkcipher_complete(struct device *dev, void *ssi_req, void __iomem *cc_base)
 910{
 911        struct ablkcipher_request *areq = (struct ablkcipher_request *)ssi_req;
 912        struct blkcipher_req_ctx *req_ctx = ablkcipher_request_ctx(areq);
 913        struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(areq);
 914        struct ssi_ablkcipher_ctx *ctx_p = crypto_ablkcipher_ctx(tfm);
 915        unsigned int ivsize = crypto_ablkcipher_ivsize(tfm);
 916
 917        CHECK_AND_RETURN_VOID_UPON_FIPS_ERROR();
 918
 919        ssi_blkcipher_complete(dev, ctx_p, req_ctx, areq->dst, areq->src, areq->info, ivsize, areq, cc_base);
 920}
 921
 922
 923
 924static int ssi_sblkcipher_init(struct crypto_tfm *tfm)
 925{
 926        struct ssi_ablkcipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
 927
 928        /* Allocate sync ctx buffer */
 929        ctx_p->sync_ctx = kmalloc(sizeof(struct blkcipher_req_ctx), GFP_KERNEL|GFP_DMA);
 930        if (!ctx_p->sync_ctx) {
 931                SSI_LOG_ERR("Allocating sync ctx buffer in context failed\n");
 932                return -ENOMEM;
 933        }
 934        SSI_LOG_DEBUG("Allocated sync ctx buffer in context ctx_p->sync_ctx=@%p\n",
 935                                                                ctx_p->sync_ctx);
 936
 937        return ssi_blkcipher_init(tfm);
 938}
 939
 940
 941static void ssi_sblkcipher_exit(struct crypto_tfm *tfm)
 942{
 943        struct ssi_ablkcipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
 944        
 945        kfree(ctx_p->sync_ctx);
 946        SSI_LOG_DEBUG("Free sync ctx buffer in context ctx_p->sync_ctx=@%p\n", ctx_p->sync_ctx);
 947
 948        ssi_blkcipher_exit(tfm);
 949}
 950
 951#ifdef SYNC_ALGS
 952static int ssi_sblkcipher_encrypt(struct blkcipher_desc *desc,
 953                        struct scatterlist *dst, struct scatterlist *src,
 954                        unsigned int nbytes)
 955{
 956        struct crypto_blkcipher *blk_tfm = desc->tfm;
 957        struct crypto_tfm *tfm = crypto_blkcipher_tfm(blk_tfm);
 958        struct ssi_ablkcipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
 959        struct blkcipher_req_ctx *req_ctx = ctx_p->sync_ctx;
 960        unsigned int ivsize = crypto_blkcipher_ivsize(blk_tfm);
 961
 962        req_ctx->backup_info = desc->info;
 963        req_ctx->is_giv = false;
 964
 965        return ssi_blkcipher_process(tfm, req_ctx, dst, src, nbytes, desc->info, ivsize, NULL, DRV_CRYPTO_DIRECTION_ENCRYPT);
 966}
 967
 968static int ssi_sblkcipher_decrypt(struct blkcipher_desc *desc,
 969                        struct scatterlist *dst, struct scatterlist *src,
 970                        unsigned int nbytes)
 971{
 972        struct crypto_blkcipher *blk_tfm = desc->tfm;
 973        struct crypto_tfm *tfm = crypto_blkcipher_tfm(blk_tfm);
 974        struct ssi_ablkcipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
 975        struct blkcipher_req_ctx *req_ctx = ctx_p->sync_ctx;
 976        unsigned int ivsize = crypto_blkcipher_ivsize(blk_tfm);
 977
 978        req_ctx->backup_info = desc->info;
 979        req_ctx->is_giv = false;
 980
 981        return ssi_blkcipher_process(tfm, req_ctx, dst, src, nbytes, desc->info, ivsize, NULL, DRV_CRYPTO_DIRECTION_DECRYPT);
 982}
 983#endif
 984
 985/* Async wrap functions */
 986
 987static int ssi_ablkcipher_init(struct crypto_tfm *tfm)
 988{
 989        struct ablkcipher_tfm *ablktfm = &tfm->crt_ablkcipher;
 990        
 991        ablktfm->reqsize = sizeof(struct blkcipher_req_ctx);
 992
 993        return ssi_blkcipher_init(tfm);
 994}
 995
 996
 997static int ssi_ablkcipher_setkey(struct crypto_ablkcipher *tfm, 
 998                                const u8 *key, 
 999                                unsigned int keylen)
1000{
1001        return ssi_blkcipher_setkey(crypto_ablkcipher_tfm(tfm), key, keylen);
1002}
1003
1004static int ssi_ablkcipher_encrypt(struct ablkcipher_request *req)
1005{
1006        struct crypto_ablkcipher *ablk_tfm = crypto_ablkcipher_reqtfm(req);
1007        struct crypto_tfm *tfm = crypto_ablkcipher_tfm(ablk_tfm);
1008        struct blkcipher_req_ctx *req_ctx = ablkcipher_request_ctx(req);
1009        unsigned int ivsize = crypto_ablkcipher_ivsize(ablk_tfm);
1010
1011        req_ctx->backup_info = req->info;
1012        req_ctx->is_giv = false;
1013
1014        return ssi_blkcipher_process(tfm, req_ctx, req->dst, req->src, req->nbytes, req->info, ivsize, (void *)req, DRV_CRYPTO_DIRECTION_ENCRYPT);
1015}
1016
1017static int ssi_ablkcipher_decrypt(struct ablkcipher_request *req)
1018{
1019        struct crypto_ablkcipher *ablk_tfm = crypto_ablkcipher_reqtfm(req);
1020        struct crypto_tfm *tfm = crypto_ablkcipher_tfm(ablk_tfm);
1021        struct blkcipher_req_ctx *req_ctx = ablkcipher_request_ctx(req);
1022        unsigned int ivsize = crypto_ablkcipher_ivsize(ablk_tfm);
1023
1024        req_ctx->backup_info = req->info;
1025        req_ctx->is_giv = false;
1026        return ssi_blkcipher_process(tfm, req_ctx, req->dst, req->src, req->nbytes, req->info, ivsize, (void *)req, DRV_CRYPTO_DIRECTION_DECRYPT);
1027}
1028
1029
1030/* DX Block cipher alg */
1031static struct ssi_alg_template blkcipher_algs[] = {
1032/* Async template */
1033#if SSI_CC_HAS_AES_XTS
1034        {
1035                .name = "xts(aes)",
1036                .driver_name = "xts-aes-dx",
1037                .blocksize = AES_BLOCK_SIZE,
1038                .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
1039                .template_ablkcipher = {
1040                        .setkey = ssi_ablkcipher_setkey,
1041                        .encrypt = ssi_ablkcipher_encrypt,
1042                        .decrypt = ssi_ablkcipher_decrypt,
1043                        .min_keysize = AES_MIN_KEY_SIZE * 2,
1044                        .max_keysize = AES_MAX_KEY_SIZE * 2,
1045                        .ivsize = AES_BLOCK_SIZE,
1046                        .geniv = "eseqiv",
1047                        },
1048                .cipher_mode = DRV_CIPHER_XTS,
1049                .flow_mode = S_DIN_to_AES,
1050        .synchronous = false,
1051        },
1052        {
1053                .name = "xts(aes)",
1054                .driver_name = "xts-aes-du512-dx",
1055                .blocksize = AES_BLOCK_SIZE,
1056                .type = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_BULK_DU_512,
1057                .template_ablkcipher = {
1058                        .setkey = ssi_ablkcipher_setkey,
1059                        .encrypt = ssi_ablkcipher_encrypt,
1060                        .decrypt = ssi_ablkcipher_decrypt,
1061                        .min_keysize = AES_MIN_KEY_SIZE * 2,
1062                        .max_keysize = AES_MAX_KEY_SIZE * 2,
1063                        .ivsize = AES_BLOCK_SIZE,
1064                        },
1065                .cipher_mode = DRV_CIPHER_XTS,
1066                .flow_mode = S_DIN_to_AES,
1067        .synchronous = false,
1068        },
1069        {
1070                .name = "xts(aes)",
1071                .driver_name = "xts-aes-du4096-dx",
1072                .blocksize = AES_BLOCK_SIZE,
1073                .type = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_BULK_DU_4096,
1074                .template_ablkcipher = {
1075                        .setkey = ssi_ablkcipher_setkey,
1076                        .encrypt = ssi_ablkcipher_encrypt,
1077                        .decrypt = ssi_ablkcipher_decrypt,
1078                        .min_keysize = AES_MIN_KEY_SIZE * 2,
1079                        .max_keysize = AES_MAX_KEY_SIZE * 2,
1080                        .ivsize = AES_BLOCK_SIZE,
1081                        },
1082                .cipher_mode = DRV_CIPHER_XTS,
1083                .flow_mode = S_DIN_to_AES,
1084        .synchronous = false,
1085        },
1086#endif /*SSI_CC_HAS_AES_XTS*/
1087#if SSI_CC_HAS_AES_ESSIV
1088        {
1089                .name = "essiv(aes)",
1090                .driver_name = "essiv-aes-dx",
1091                .blocksize = AES_BLOCK_SIZE,
1092                .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
1093                .template_ablkcipher = {
1094                        .setkey = ssi_ablkcipher_setkey,
1095                        .encrypt = ssi_ablkcipher_encrypt,
1096                        .decrypt = ssi_ablkcipher_decrypt,
1097                        .min_keysize = AES_MIN_KEY_SIZE * 2,
1098                        .max_keysize = AES_MAX_KEY_SIZE * 2,
1099                        .ivsize = AES_BLOCK_SIZE,
1100                        },
1101                .cipher_mode = DRV_CIPHER_ESSIV,
1102                .flow_mode = S_DIN_to_AES,
1103                .synchronous = false,
1104        },
1105        {
1106                .name = "essiv(aes)",
1107                .driver_name = "essiv-aes-du512-dx",
1108                .blocksize = AES_BLOCK_SIZE,
1109                .type = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_BULK_DU_512,
1110                .template_ablkcipher = {
1111                        .setkey = ssi_ablkcipher_setkey,
1112                        .encrypt = ssi_ablkcipher_encrypt,
1113                        .decrypt = ssi_ablkcipher_decrypt,
1114                        .min_keysize = AES_MIN_KEY_SIZE * 2,
1115                        .max_keysize = AES_MAX_KEY_SIZE * 2,
1116                        .ivsize = AES_BLOCK_SIZE,
1117                        },
1118                .cipher_mode = DRV_CIPHER_ESSIV,
1119                .flow_mode = S_DIN_to_AES,
1120                .synchronous = false,
1121        },
1122        {
1123                .name = "essiv(aes)",
1124                .driver_name = "essiv-aes-du4096-dx",
1125                .blocksize = AES_BLOCK_SIZE,
1126                .type = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_BULK_DU_4096,
1127                .template_ablkcipher = {
1128                        .setkey = ssi_ablkcipher_setkey,
1129                        .encrypt = ssi_ablkcipher_encrypt,
1130                        .decrypt = ssi_ablkcipher_decrypt,
1131                        .min_keysize = AES_MIN_KEY_SIZE * 2,
1132                        .max_keysize = AES_MAX_KEY_SIZE * 2,
1133                        .ivsize = AES_BLOCK_SIZE,
1134                        },
1135                .cipher_mode = DRV_CIPHER_ESSIV,
1136                .flow_mode = S_DIN_to_AES,
1137                .synchronous = false,
1138        },
1139#endif /*SSI_CC_HAS_AES_ESSIV*/
1140#if SSI_CC_HAS_AES_BITLOCKER
1141        {
1142                .name = "bitlocker(aes)",
1143                .driver_name = "bitlocker-aes-dx",
1144                .blocksize = AES_BLOCK_SIZE,
1145                .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
1146                .template_ablkcipher = {
1147                        .setkey = ssi_ablkcipher_setkey,
1148                        .encrypt = ssi_ablkcipher_encrypt,
1149                        .decrypt = ssi_ablkcipher_decrypt,
1150                        .min_keysize = AES_MIN_KEY_SIZE * 2,
1151                        .max_keysize = AES_MAX_KEY_SIZE * 2,
1152                        .ivsize = AES_BLOCK_SIZE,
1153                        },
1154                .cipher_mode = DRV_CIPHER_BITLOCKER,
1155                .flow_mode = S_DIN_to_AES,
1156                .synchronous = false,
1157        },
1158        {
1159                .name = "bitlocker(aes)",
1160                .driver_name = "bitlocker-aes-du512-dx",
1161                .blocksize = AES_BLOCK_SIZE,
1162                .type = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_BULK_DU_512,
1163                .template_ablkcipher = {
1164                        .setkey = ssi_ablkcipher_setkey,
1165                        .encrypt = ssi_ablkcipher_encrypt,
1166                        .decrypt = ssi_ablkcipher_decrypt,
1167                        .min_keysize = AES_MIN_KEY_SIZE * 2,
1168                        .max_keysize = AES_MAX_KEY_SIZE * 2,
1169                        .ivsize = AES_BLOCK_SIZE,
1170                        },
1171                .cipher_mode = DRV_CIPHER_BITLOCKER,
1172                .flow_mode = S_DIN_to_AES,
1173                .synchronous = false,
1174        },
1175        {
1176                .name = "bitlocker(aes)",
1177                .driver_name = "bitlocker-aes-du4096-dx",
1178                .blocksize = AES_BLOCK_SIZE,
1179                .type = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_BULK_DU_4096,
1180                .template_ablkcipher = {
1181                        .setkey = ssi_ablkcipher_setkey,
1182                        .encrypt = ssi_ablkcipher_encrypt,
1183                        .decrypt = ssi_ablkcipher_decrypt,
1184                        .min_keysize = AES_MIN_KEY_SIZE * 2,
1185                        .max_keysize = AES_MAX_KEY_SIZE * 2,
1186                        .ivsize = AES_BLOCK_SIZE,
1187                        },
1188                .cipher_mode = DRV_CIPHER_BITLOCKER,
1189                .flow_mode = S_DIN_to_AES,
1190                .synchronous = false,
1191        },
1192#endif /*SSI_CC_HAS_AES_BITLOCKER*/
1193        {
1194                .name = "ecb(aes)",
1195                .driver_name = "ecb-aes-dx",
1196                .blocksize = AES_BLOCK_SIZE,
1197                .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
1198                .template_ablkcipher = {
1199                        .setkey = ssi_ablkcipher_setkey,
1200                        .encrypt = ssi_ablkcipher_encrypt,
1201                        .decrypt = ssi_ablkcipher_decrypt,
1202                        .min_keysize = AES_MIN_KEY_SIZE,
1203                        .max_keysize = AES_MAX_KEY_SIZE,
1204                        .ivsize = 0,
1205                        },
1206                .cipher_mode = DRV_CIPHER_ECB,
1207                .flow_mode = S_DIN_to_AES,
1208        .synchronous = false,
1209        },
1210        {
1211                .name = "cbc(aes)",
1212                .driver_name = "cbc-aes-dx",
1213                .blocksize = AES_BLOCK_SIZE,
1214                .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
1215                .template_ablkcipher = {
1216                        .setkey = ssi_ablkcipher_setkey,
1217                        .encrypt = ssi_ablkcipher_encrypt,
1218                        .decrypt = ssi_ablkcipher_decrypt,
1219                        .min_keysize = AES_MIN_KEY_SIZE,
1220                        .max_keysize = AES_MAX_KEY_SIZE,
1221                        .ivsize = AES_BLOCK_SIZE,
1222                        },
1223                .cipher_mode = DRV_CIPHER_CBC,
1224                .flow_mode = S_DIN_to_AES,
1225        .synchronous = false,
1226        },
1227        {
1228                .name = "ofb(aes)",
1229                .driver_name = "ofb-aes-dx",
1230                .blocksize = AES_BLOCK_SIZE,
1231                .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
1232                .template_ablkcipher = {
1233                        .setkey = ssi_ablkcipher_setkey,
1234                        .encrypt = ssi_ablkcipher_encrypt,
1235                        .decrypt = ssi_ablkcipher_decrypt,
1236                        .min_keysize = AES_MIN_KEY_SIZE,
1237                        .max_keysize = AES_MAX_KEY_SIZE,
1238                        .ivsize = AES_BLOCK_SIZE,
1239                        },
1240                .cipher_mode = DRV_CIPHER_OFB,
1241                .flow_mode = S_DIN_to_AES,
1242        .synchronous = false,
1243        },
1244#if SSI_CC_HAS_AES_CTS
1245        {
1246                .name = "cts1(cbc(aes))",
1247                .driver_name = "cts1-cbc-aes-dx",
1248                .blocksize = AES_BLOCK_SIZE,
1249                .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
1250                .template_ablkcipher = {
1251                        .setkey = ssi_ablkcipher_setkey,
1252                        .encrypt = ssi_ablkcipher_encrypt,
1253                        .decrypt = ssi_ablkcipher_decrypt,
1254                        .min_keysize = AES_MIN_KEY_SIZE,
1255                        .max_keysize = AES_MAX_KEY_SIZE,
1256                        .ivsize = AES_BLOCK_SIZE,
1257                        },
1258                .cipher_mode = DRV_CIPHER_CBC_CTS,
1259                .flow_mode = S_DIN_to_AES,
1260        .synchronous = false,
1261        },
1262#endif
1263        {
1264                .name = "ctr(aes)",
1265                .driver_name = "ctr-aes-dx",
1266                .blocksize = 1,
1267                .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
1268                .template_ablkcipher = {
1269                        .setkey = ssi_ablkcipher_setkey,
1270                        .encrypt = ssi_ablkcipher_encrypt,
1271                        .decrypt = ssi_ablkcipher_decrypt,
1272                        .min_keysize = AES_MIN_KEY_SIZE,
1273                        .max_keysize = AES_MAX_KEY_SIZE,
1274                        .ivsize = AES_BLOCK_SIZE,
1275                        },
1276                .cipher_mode = DRV_CIPHER_CTR,
1277                .flow_mode = S_DIN_to_AES,
1278        .synchronous = false,
1279        },
1280        {
1281                .name = "cbc(des3_ede)",
1282                .driver_name = "cbc-3des-dx",
1283                .blocksize = DES3_EDE_BLOCK_SIZE,
1284                .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
1285                .template_ablkcipher = {
1286                        .setkey = ssi_ablkcipher_setkey,
1287                        .encrypt = ssi_ablkcipher_encrypt,
1288                        .decrypt = ssi_ablkcipher_decrypt,
1289                        .min_keysize = DES3_EDE_KEY_SIZE,
1290                        .max_keysize = DES3_EDE_KEY_SIZE,
1291                        .ivsize = DES3_EDE_BLOCK_SIZE,
1292                        },
1293                .cipher_mode = DRV_CIPHER_CBC,
1294                .flow_mode = S_DIN_to_DES,
1295        .synchronous = false,
1296        },
1297        {
1298                .name = "ecb(des3_ede)",
1299                .driver_name = "ecb-3des-dx",
1300                .blocksize = DES3_EDE_BLOCK_SIZE,
1301                .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
1302                .template_ablkcipher = {
1303                        .setkey = ssi_ablkcipher_setkey,
1304                        .encrypt = ssi_ablkcipher_encrypt,
1305                        .decrypt = ssi_ablkcipher_decrypt,
1306                        .min_keysize = DES3_EDE_KEY_SIZE,
1307                        .max_keysize = DES3_EDE_KEY_SIZE,
1308                        .ivsize = 0,
1309                        },
1310                .cipher_mode = DRV_CIPHER_ECB,
1311                .flow_mode = S_DIN_to_DES,
1312        .synchronous = false,
1313        },
1314        {
1315                .name = "cbc(des)",
1316                .driver_name = "cbc-des-dx",
1317                .blocksize = DES_BLOCK_SIZE,
1318                .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
1319                .template_ablkcipher = {
1320                        .setkey = ssi_ablkcipher_setkey,
1321                        .encrypt = ssi_ablkcipher_encrypt,
1322                        .decrypt = ssi_ablkcipher_decrypt,
1323                        .min_keysize = DES_KEY_SIZE,
1324                        .max_keysize = DES_KEY_SIZE,
1325                        .ivsize = DES_BLOCK_SIZE,
1326                        },
1327                .cipher_mode = DRV_CIPHER_CBC,
1328                .flow_mode = S_DIN_to_DES,
1329        .synchronous = false,
1330        },
1331        {
1332                .name = "ecb(des)",
1333                .driver_name = "ecb-des-dx",
1334                .blocksize = DES_BLOCK_SIZE,
1335                .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
1336                .template_ablkcipher = {
1337                        .setkey = ssi_ablkcipher_setkey,
1338                        .encrypt = ssi_ablkcipher_encrypt,
1339                        .decrypt = ssi_ablkcipher_decrypt,
1340                        .min_keysize = DES_KEY_SIZE,
1341                        .max_keysize = DES_KEY_SIZE,
1342                        .ivsize = 0,
1343                        },
1344                .cipher_mode = DRV_CIPHER_ECB,
1345                .flow_mode = S_DIN_to_DES,
1346        .synchronous = false,
1347        },
1348#if SSI_CC_HAS_MULTI2
1349        {
1350                .name = "cbc(multi2)",
1351                .driver_name = "cbc-multi2-dx",
1352                .blocksize = CC_MULTI2_BLOCK_SIZE,
1353                .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
1354                .template_ablkcipher = {
1355                        .setkey = ssi_ablkcipher_setkey,
1356                        .encrypt = ssi_ablkcipher_encrypt,
1357                        .decrypt = ssi_ablkcipher_decrypt,
1358                        .min_keysize = CC_MULTI2_SYSTEM_N_DATA_KEY_SIZE + 1,
1359                        .max_keysize = CC_MULTI2_SYSTEM_N_DATA_KEY_SIZE + 1,
1360                        .ivsize = CC_MULTI2_IV_SIZE,
1361                        },
1362                .cipher_mode = DRV_MULTI2_CBC,
1363                .flow_mode = S_DIN_to_MULTI2,
1364        .synchronous = false,
1365        },
1366        {
1367                .name = "ofb(multi2)",
1368                .driver_name = "ofb-multi2-dx",
1369                .blocksize = 1,
1370                .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
1371                .template_ablkcipher = {
1372                        .setkey = ssi_ablkcipher_setkey,
1373                        .encrypt = ssi_ablkcipher_encrypt,
1374                        .decrypt = ssi_ablkcipher_encrypt,
1375                        .min_keysize = CC_MULTI2_SYSTEM_N_DATA_KEY_SIZE + 1,
1376                        .max_keysize = CC_MULTI2_SYSTEM_N_DATA_KEY_SIZE + 1,
1377                        .ivsize = CC_MULTI2_IV_SIZE,
1378                        },
1379                .cipher_mode = DRV_MULTI2_OFB,
1380                .flow_mode = S_DIN_to_MULTI2,
1381        .synchronous = false,
1382        },
1383#endif /*SSI_CC_HAS_MULTI2*/
1384};
1385
1386static 
1387struct ssi_crypto_alg *ssi_ablkcipher_create_alg(struct ssi_alg_template *template)
1388{
1389        struct ssi_crypto_alg *t_alg;
1390        struct crypto_alg *alg;
1391
1392        t_alg = kzalloc(sizeof(struct ssi_crypto_alg), GFP_KERNEL);
1393        if (!t_alg) {
1394                SSI_LOG_ERR("failed to allocate t_alg\n");
1395                return ERR_PTR(-ENOMEM);
1396        }
1397
1398        alg = &t_alg->crypto_alg;
1399
1400        snprintf(alg->cra_name, CRYPTO_MAX_ALG_NAME, "%s", template->name);
1401        snprintf(alg->cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s",
1402                 template->driver_name);
1403        alg->cra_module = THIS_MODULE;
1404        alg->cra_priority = SSI_CRA_PRIO;
1405        alg->cra_blocksize = template->blocksize;
1406        alg->cra_alignmask = 0;
1407        alg->cra_ctxsize = sizeof(struct ssi_ablkcipher_ctx);
1408        
1409        alg->cra_init = template->synchronous? ssi_sblkcipher_init:ssi_ablkcipher_init;
1410        alg->cra_exit = template->synchronous? ssi_sblkcipher_exit:ssi_blkcipher_exit;
1411        alg->cra_type = template->synchronous? &crypto_blkcipher_type:&crypto_ablkcipher_type;
1412        if(template->synchronous) {
1413                alg->cra_blkcipher = template->template_sblkcipher;
1414                alg->cra_flags = CRYPTO_ALG_KERN_DRIVER_ONLY |
1415                                template->type;
1416        } else {
1417                alg->cra_ablkcipher = template->template_ablkcipher;
1418                alg->cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY |
1419                                template->type;
1420        }
1421
1422        t_alg->cipher_mode = template->cipher_mode;
1423        t_alg->flow_mode = template->flow_mode;
1424
1425        return t_alg;
1426}
1427
1428int ssi_ablkcipher_free(struct ssi_drvdata *drvdata)
1429{
1430        struct ssi_crypto_alg *t_alg, *n;
1431        struct ssi_blkcipher_handle *blkcipher_handle = 
1432                                                drvdata->blkcipher_handle;
1433        struct device *dev;
1434        dev = &drvdata->plat_dev->dev;
1435
1436        if (blkcipher_handle != NULL) {
1437                /* Remove registered algs */
1438                list_for_each_entry_safe(t_alg, n,
1439                                &blkcipher_handle->blkcipher_alg_list,
1440                                         entry) {
1441                        crypto_unregister_alg(&t_alg->crypto_alg);
1442                        list_del(&t_alg->entry);
1443                        kfree(t_alg);
1444                }
1445                kfree(blkcipher_handle);
1446                drvdata->blkcipher_handle = NULL;
1447        }
1448        return 0;
1449}
1450
1451
1452
1453int ssi_ablkcipher_alloc(struct ssi_drvdata *drvdata)
1454{
1455        struct ssi_blkcipher_handle *ablkcipher_handle;
1456        struct ssi_crypto_alg *t_alg;
1457        int rc = -ENOMEM;
1458        int alg;
1459
1460        ablkcipher_handle = kmalloc(sizeof(struct ssi_blkcipher_handle),
1461                GFP_KERNEL);
1462        if (ablkcipher_handle == NULL)
1463                return -ENOMEM;
1464
1465        drvdata->blkcipher_handle = ablkcipher_handle;
1466
1467        INIT_LIST_HEAD(&ablkcipher_handle->blkcipher_alg_list);
1468
1469        /* Linux crypto */
1470        SSI_LOG_DEBUG("Number of algorithms = %zu\n", ARRAY_SIZE(blkcipher_algs));
1471        for (alg = 0; alg < ARRAY_SIZE(blkcipher_algs); alg++) {
1472                SSI_LOG_DEBUG("creating %s\n", blkcipher_algs[alg].driver_name);
1473                t_alg = ssi_ablkcipher_create_alg(&blkcipher_algs[alg]);
1474                if (IS_ERR(t_alg)) {
1475                        rc = PTR_ERR(t_alg);
1476                        SSI_LOG_ERR("%s alg allocation failed\n",
1477                                 blkcipher_algs[alg].driver_name);
1478                        goto fail0;
1479                }
1480                t_alg->drvdata = drvdata;
1481
1482                SSI_LOG_DEBUG("registering %s\n", blkcipher_algs[alg].driver_name);
1483                rc = crypto_register_alg(&t_alg->crypto_alg);
1484                SSI_LOG_DEBUG("%s alg registration rc = %x\n",
1485                        t_alg->crypto_alg.cra_driver_name, rc);
1486                if (unlikely(rc != 0)) {
1487                        SSI_LOG_ERR("%s alg registration failed\n",
1488                                t_alg->crypto_alg.cra_driver_name);
1489                        kfree(t_alg);
1490                        goto fail0;
1491                } else {
1492                        list_add_tail(&t_alg->entry, 
1493                                      &ablkcipher_handle->blkcipher_alg_list);
1494                        SSI_LOG_DEBUG("Registered %s\n", 
1495                                        t_alg->crypto_alg.cra_driver_name);
1496                }
1497        }
1498        return 0;
1499
1500fail0:
1501        ssi_ablkcipher_free(drvdata);
1502        return rc;
1503}
1504