linux/drivers/crypto/ux500/cryp/cryp_core.c
<<
>>
Prefs
   1// SPDX-License-Identifier: GPL-2.0-only
   2/**
   3 * Copyright (C) ST-Ericsson SA 2010
   4 * Author: Shujuan Chen <shujuan.chen@stericsson.com> for ST-Ericsson.
   5 * Author: Joakim Bech <joakim.xx.bech@stericsson.com> for ST-Ericsson.
   6 * Author: Berne Hebark <berne.herbark@stericsson.com> for ST-Ericsson.
   7 * Author: Niklas Hernaeus <niklas.hernaeus@stericsson.com> for ST-Ericsson.
   8 * Author: Jonas Linde <jonas.linde@stericsson.com> for ST-Ericsson.
   9 * Author: Andreas Westin <andreas.westin@stericsson.com> for ST-Ericsson.
  10 */
  11
  12#include <linux/clk.h>
  13#include <linux/completion.h>
  14#include <linux/crypto.h>
  15#include <linux/dmaengine.h>
  16#include <linux/err.h>
  17#include <linux/errno.h>
  18#include <linux/interrupt.h>
  19#include <linux/io.h>
  20#include <linux/irqreturn.h>
  21#include <linux/klist.h>
  22#include <linux/module.h>
  23#include <linux/mod_devicetable.h>
  24#include <linux/platform_device.h>
  25#include <linux/regulator/consumer.h>
  26#include <linux/semaphore.h>
  27#include <linux/platform_data/dma-ste-dma40.h>
  28
  29#include <crypto/aes.h>
  30#include <crypto/algapi.h>
  31#include <crypto/ctr.h>
  32#include <crypto/des.h>
  33#include <crypto/scatterwalk.h>
  34
  35#include <linux/platform_data/crypto-ux500.h>
  36
  37#include "cryp_p.h"
  38#include "cryp.h"
  39
  40#define CRYP_MAX_KEY_SIZE       32
  41#define BYTES_PER_WORD          4
  42
  43static int cryp_mode;
  44static atomic_t session_id;
  45
  46static struct stedma40_chan_cfg *mem_to_engine;
  47static struct stedma40_chan_cfg *engine_to_mem;
  48
  49/**
  50 * struct cryp_driver_data - data specific to the driver.
  51 *
  52 * @device_list: A list of registered devices to choose from.
  53 * @device_allocation: A semaphore initialized with number of devices.
  54 */
  55struct cryp_driver_data {
  56        struct klist device_list;
  57        struct semaphore device_allocation;
  58};
  59
  60/**
  61 * struct cryp_ctx - Crypto context
  62 * @config: Crypto mode.
  63 * @key[CRYP_MAX_KEY_SIZE]: Key.
  64 * @keylen: Length of key.
  65 * @iv: Pointer to initialization vector.
  66 * @indata: Pointer to indata.
  67 * @outdata: Pointer to outdata.
  68 * @datalen: Length of indata.
  69 * @outlen: Length of outdata.
  70 * @blocksize: Size of blocks.
  71 * @updated: Updated flag.
  72 * @dev_ctx: Device dependent context.
  73 * @device: Pointer to the device.
  74 */
  75struct cryp_ctx {
  76        struct cryp_config config;
  77        u8 key[CRYP_MAX_KEY_SIZE];
  78        u32 keylen;
  79        u8 *iv;
  80        const u8 *indata;
  81        u8 *outdata;
  82        u32 datalen;
  83        u32 outlen;
  84        u32 blocksize;
  85        u8 updated;
  86        struct cryp_device_context dev_ctx;
  87        struct cryp_device_data *device;
  88        u32 session_id;
  89};
  90
  91static struct cryp_driver_data driver_data;
  92
  93/**
  94 * uint8p_to_uint32_be - 4*uint8 to uint32 big endian
  95 * @in: Data to convert.
  96 */
  97static inline u32 uint8p_to_uint32_be(u8 *in)
  98{
  99        u32 *data = (u32 *)in;
 100
 101        return cpu_to_be32p(data);
 102}
 103
 104/**
 105 * swap_bits_in_byte - mirror the bits in a byte
 106 * @b: the byte to be mirrored
 107 *
 108 * The bits are swapped the following way:
 109 *  Byte b include bits 0-7, nibble 1 (n1) include bits 0-3 and
 110 *  nibble 2 (n2) bits 4-7.
 111 *
 112 *  Nibble 1 (n1):
 113 *  (The "old" (moved) bit is replaced with a zero)
 114 *  1. Move bit 6 and 7, 4 positions to the left.
 115 *  2. Move bit 3 and 5, 2 positions to the left.
 116 *  3. Move bit 1-4, 1 position to the left.
 117 *
 118 *  Nibble 2 (n2):
 119 *  1. Move bit 0 and 1, 4 positions to the right.
 120 *  2. Move bit 2 and 4, 2 positions to the right.
 121 *  3. Move bit 3-6, 1 position to the right.
 122 *
 123 *  Combine the two nibbles to a complete and swapped byte.
 124 */
 125
 126static inline u8 swap_bits_in_byte(u8 b)
 127{
 128#define R_SHIFT_4_MASK  0xc0 /* Bits 6 and 7, right shift 4 */
 129#define R_SHIFT_2_MASK  0x28 /* (After right shift 4) Bits 3 and 5,
 130                                  right shift 2 */
 131#define R_SHIFT_1_MASK  0x1e /* (After right shift 2) Bits 1-4,
 132                                  right shift 1 */
 133#define L_SHIFT_4_MASK  0x03 /* Bits 0 and 1, left shift 4 */
 134#define L_SHIFT_2_MASK  0x14 /* (After left shift 4) Bits 2 and 4,
 135                                  left shift 2 */
 136#define L_SHIFT_1_MASK  0x78 /* (After left shift 1) Bits 3-6,
 137                                  left shift 1 */
 138
 139        u8 n1;
 140        u8 n2;
 141
 142        /* Swap most significant nibble */
 143        /* Right shift 4, bits 6 and 7 */
 144        n1 = ((b  & R_SHIFT_4_MASK) >> 4) | (b  & ~(R_SHIFT_4_MASK >> 4));
 145        /* Right shift 2, bits 3 and 5 */
 146        n1 = ((n1 & R_SHIFT_2_MASK) >> 2) | (n1 & ~(R_SHIFT_2_MASK >> 2));
 147        /* Right shift 1, bits 1-4 */
 148        n1 = (n1  & R_SHIFT_1_MASK) >> 1;
 149
 150        /* Swap least significant nibble */
 151        /* Left shift 4, bits 0 and 1 */
 152        n2 = ((b  & L_SHIFT_4_MASK) << 4) | (b  & ~(L_SHIFT_4_MASK << 4));
 153        /* Left shift 2, bits 2 and 4 */
 154        n2 = ((n2 & L_SHIFT_2_MASK) << 2) | (n2 & ~(L_SHIFT_2_MASK << 2));
 155        /* Left shift 1, bits 3-6 */
 156        n2 = (n2  & L_SHIFT_1_MASK) << 1;
 157
 158        return n1 | n2;
 159}
 160
 161static inline void swap_words_in_key_and_bits_in_byte(const u8 *in,
 162                                                      u8 *out, u32 len)
 163{
 164        unsigned int i = 0;
 165        int j;
 166        int index = 0;
 167
 168        j = len - BYTES_PER_WORD;
 169        while (j >= 0) {
 170                for (i = 0; i < BYTES_PER_WORD; i++) {
 171                        index = len - j - BYTES_PER_WORD + i;
 172                        out[j + i] =
 173                                swap_bits_in_byte(in[index]);
 174                }
 175                j -= BYTES_PER_WORD;
 176        }
 177}
 178
 179static void add_session_id(struct cryp_ctx *ctx)
 180{
 181        /*
 182         * We never want 0 to be a valid value, since this is the default value
 183         * for the software context.
 184         */
 185        if (unlikely(atomic_inc_and_test(&session_id)))
 186                atomic_inc(&session_id);
 187
 188        ctx->session_id = atomic_read(&session_id);
 189}
 190
 191static irqreturn_t cryp_interrupt_handler(int irq, void *param)
 192{
 193        struct cryp_ctx *ctx;
 194        int count;
 195        struct cryp_device_data *device_data;
 196
 197        if (param == NULL) {
 198                BUG_ON(!param);
 199                return IRQ_HANDLED;
 200        }
 201
 202        /* The device is coming from the one found in hw_crypt_noxts. */
 203        device_data = (struct cryp_device_data *)param;
 204
 205        ctx = device_data->current_ctx;
 206
 207        if (ctx == NULL) {
 208                BUG_ON(!ctx);
 209                return IRQ_HANDLED;
 210        }
 211
 212        dev_dbg(ctx->device->dev, "[%s] (len: %d) %s, ", __func__, ctx->outlen,
 213                cryp_pending_irq_src(device_data, CRYP_IRQ_SRC_OUTPUT_FIFO) ?
 214                "out" : "in");
 215
 216        if (cryp_pending_irq_src(device_data,
 217                                 CRYP_IRQ_SRC_OUTPUT_FIFO)) {
 218                if (ctx->outlen / ctx->blocksize > 0) {
 219                        count = ctx->blocksize / 4;
 220
 221                        readsl(&device_data->base->dout, ctx->outdata, count);
 222                        ctx->outdata += count;
 223                        ctx->outlen -= count;
 224
 225                        if (ctx->outlen == 0) {
 226                                cryp_disable_irq_src(device_data,
 227                                                     CRYP_IRQ_SRC_OUTPUT_FIFO);
 228                        }
 229                }
 230        } else if (cryp_pending_irq_src(device_data,
 231                                        CRYP_IRQ_SRC_INPUT_FIFO)) {
 232                if (ctx->datalen / ctx->blocksize > 0) {
 233                        count = ctx->blocksize / 4;
 234
 235                        writesl(&device_data->base->din, ctx->indata, count);
 236
 237                        ctx->indata += count;
 238                        ctx->datalen -= count;
 239
 240                        if (ctx->datalen == 0)
 241                                cryp_disable_irq_src(device_data,
 242                                                   CRYP_IRQ_SRC_INPUT_FIFO);
 243
 244                        if (ctx->config.algomode == CRYP_ALGO_AES_XTS) {
 245                                CRYP_PUT_BITS(&device_data->base->cr,
 246                                              CRYP_START_ENABLE,
 247                                              CRYP_CR_START_POS,
 248                                              CRYP_CR_START_MASK);
 249
 250                                cryp_wait_until_done(device_data);
 251                        }
 252                }
 253        }
 254
 255        return IRQ_HANDLED;
 256}
 257
 258static int mode_is_aes(enum cryp_algo_mode mode)
 259{
 260        return  CRYP_ALGO_AES_ECB == mode ||
 261                CRYP_ALGO_AES_CBC == mode ||
 262                CRYP_ALGO_AES_CTR == mode ||
 263                CRYP_ALGO_AES_XTS == mode;
 264}
 265
 266static int cfg_iv(struct cryp_device_data *device_data, u32 left, u32 right,
 267                  enum cryp_init_vector_index index)
 268{
 269        struct cryp_init_vector_value vector_value;
 270
 271        dev_dbg(device_data->dev, "[%s]", __func__);
 272
 273        vector_value.init_value_left = left;
 274        vector_value.init_value_right = right;
 275
 276        return cryp_configure_init_vector(device_data,
 277                                          index,
 278                                          vector_value);
 279}
 280
 281static int cfg_ivs(struct cryp_device_data *device_data, struct cryp_ctx *ctx)
 282{
 283        int i;
 284        int status = 0;
 285        int num_of_regs = ctx->blocksize / 8;
 286        u32 iv[AES_BLOCK_SIZE / 4];
 287
 288        dev_dbg(device_data->dev, "[%s]", __func__);
 289
 290        /*
 291         * Since we loop on num_of_regs we need to have a check in case
 292         * someone provides an incorrect blocksize which would force calling
 293         * cfg_iv with i greater than 2 which is an error.
 294         */
 295        if (num_of_regs > 2) {
 296                dev_err(device_data->dev, "[%s] Incorrect blocksize %d",
 297                        __func__, ctx->blocksize);
 298                return -EINVAL;
 299        }
 300
 301        for (i = 0; i < ctx->blocksize / 4; i++)
 302                iv[i] = uint8p_to_uint32_be(ctx->iv + i*4);
 303
 304        for (i = 0; i < num_of_regs; i++) {
 305                status = cfg_iv(device_data, iv[i*2], iv[i*2+1],
 306                                (enum cryp_init_vector_index) i);
 307                if (status != 0)
 308                        return status;
 309        }
 310        return status;
 311}
 312
 313static int set_key(struct cryp_device_data *device_data,
 314                   u32 left_key,
 315                   u32 right_key,
 316                   enum cryp_key_reg_index index)
 317{
 318        struct cryp_key_value key_value;
 319        int cryp_error;
 320
 321        dev_dbg(device_data->dev, "[%s]", __func__);
 322
 323        key_value.key_value_left = left_key;
 324        key_value.key_value_right = right_key;
 325
 326        cryp_error = cryp_configure_key_values(device_data,
 327                                               index,
 328                                               key_value);
 329        if (cryp_error != 0)
 330                dev_err(device_data->dev, "[%s]: "
 331                        "cryp_configure_key_values() failed!", __func__);
 332
 333        return cryp_error;
 334}
 335
 336static int cfg_keys(struct cryp_ctx *ctx)
 337{
 338        int i;
 339        int num_of_regs = ctx->keylen / 8;
 340        u32 swapped_key[CRYP_MAX_KEY_SIZE / 4];
 341        int cryp_error = 0;
 342
 343        dev_dbg(ctx->device->dev, "[%s]", __func__);
 344
 345        if (mode_is_aes(ctx->config.algomode)) {
 346                swap_words_in_key_and_bits_in_byte((u8 *)ctx->key,
 347                                                   (u8 *)swapped_key,
 348                                                   ctx->keylen);
 349        } else {
 350                for (i = 0; i < ctx->keylen / 4; i++)
 351                        swapped_key[i] = uint8p_to_uint32_be(ctx->key + i*4);
 352        }
 353
 354        for (i = 0; i < num_of_regs; i++) {
 355                cryp_error = set_key(ctx->device,
 356                                     *(((u32 *)swapped_key)+i*2),
 357                                     *(((u32 *)swapped_key)+i*2+1),
 358                                     (enum cryp_key_reg_index) i);
 359
 360                if (cryp_error != 0) {
 361                        dev_err(ctx->device->dev, "[%s]: set_key() failed!",
 362                                        __func__);
 363                        return cryp_error;
 364                }
 365        }
 366        return cryp_error;
 367}
 368
 369static int cryp_setup_context(struct cryp_ctx *ctx,
 370                              struct cryp_device_data *device_data)
 371{
 372        u32 control_register = CRYP_CR_DEFAULT;
 373
 374        switch (cryp_mode) {
 375        case CRYP_MODE_INTERRUPT:
 376                writel_relaxed(CRYP_IMSC_DEFAULT, &device_data->base->imsc);
 377                break;
 378
 379        case CRYP_MODE_DMA:
 380                writel_relaxed(CRYP_DMACR_DEFAULT, &device_data->base->dmacr);
 381                break;
 382
 383        default:
 384                break;
 385        }
 386
 387        if (ctx->updated == 0) {
 388                cryp_flush_inoutfifo(device_data);
 389                if (cfg_keys(ctx) != 0) {
 390                        dev_err(ctx->device->dev, "[%s]: cfg_keys failed!",
 391                                __func__);
 392                        return -EINVAL;
 393                }
 394
 395                if (ctx->iv &&
 396                    CRYP_ALGO_AES_ECB != ctx->config.algomode &&
 397                    CRYP_ALGO_DES_ECB != ctx->config.algomode &&
 398                    CRYP_ALGO_TDES_ECB != ctx->config.algomode) {
 399                        if (cfg_ivs(device_data, ctx) != 0)
 400                                return -EPERM;
 401                }
 402
 403                cryp_set_configuration(device_data, &ctx->config,
 404                                       &control_register);
 405                add_session_id(ctx);
 406        } else if (ctx->updated == 1 &&
 407                   ctx->session_id != atomic_read(&session_id)) {
 408                cryp_flush_inoutfifo(device_data);
 409                cryp_restore_device_context(device_data, &ctx->dev_ctx);
 410
 411                add_session_id(ctx);
 412                control_register = ctx->dev_ctx.cr;
 413        } else
 414                control_register = ctx->dev_ctx.cr;
 415
 416        writel(control_register |
 417               (CRYP_CRYPEN_ENABLE << CRYP_CR_CRYPEN_POS),
 418               &device_data->base->cr);
 419
 420        return 0;
 421}
 422
 423static int cryp_get_device_data(struct cryp_ctx *ctx,
 424                                struct cryp_device_data **device_data)
 425{
 426        int ret;
 427        struct klist_iter device_iterator;
 428        struct klist_node *device_node;
 429        struct cryp_device_data *local_device_data = NULL;
 430        pr_debug(DEV_DBG_NAME " [%s]", __func__);
 431
 432        /* Wait until a device is available */
 433        ret = down_interruptible(&driver_data.device_allocation);
 434        if (ret)
 435                return ret;  /* Interrupted */
 436
 437        /* Select a device */
 438        klist_iter_init(&driver_data.device_list, &device_iterator);
 439
 440        device_node = klist_next(&device_iterator);
 441        while (device_node) {
 442                local_device_data = container_of(device_node,
 443                                           struct cryp_device_data, list_node);
 444                spin_lock(&local_device_data->ctx_lock);
 445                /* current_ctx allocates a device, NULL = unallocated */
 446                if (local_device_data->current_ctx) {
 447                        device_node = klist_next(&device_iterator);
 448                } else {
 449                        local_device_data->current_ctx = ctx;
 450                        ctx->device = local_device_data;
 451                        spin_unlock(&local_device_data->ctx_lock);
 452                        break;
 453                }
 454                spin_unlock(&local_device_data->ctx_lock);
 455        }
 456        klist_iter_exit(&device_iterator);
 457
 458        if (!device_node) {
 459                /**
 460                 * No free device found.
 461                 * Since we allocated a device with down_interruptible, this
 462                 * should not be able to happen.
 463                 * Number of available devices, which are contained in
 464                 * device_allocation, is therefore decremented by not doing
 465                 * an up(device_allocation).
 466                 */
 467                return -EBUSY;
 468        }
 469
 470        *device_data = local_device_data;
 471
 472        return 0;
 473}
 474
 475static void cryp_dma_setup_channel(struct cryp_device_data *device_data,
 476                                   struct device *dev)
 477{
 478        struct dma_slave_config mem2cryp = {
 479                .direction = DMA_MEM_TO_DEV,
 480                .dst_addr = device_data->phybase + CRYP_DMA_TX_FIFO,
 481                .dst_addr_width = DMA_SLAVE_BUSWIDTH_2_BYTES,
 482                .dst_maxburst = 4,
 483        };
 484        struct dma_slave_config cryp2mem = {
 485                .direction = DMA_DEV_TO_MEM,
 486                .src_addr = device_data->phybase + CRYP_DMA_RX_FIFO,
 487                .src_addr_width = DMA_SLAVE_BUSWIDTH_2_BYTES,
 488                .src_maxburst = 4,
 489        };
 490
 491        dma_cap_zero(device_data->dma.mask);
 492        dma_cap_set(DMA_SLAVE, device_data->dma.mask);
 493
 494        device_data->dma.cfg_mem2cryp = mem_to_engine;
 495        device_data->dma.chan_mem2cryp =
 496                dma_request_channel(device_data->dma.mask,
 497                                    stedma40_filter,
 498                                    device_data->dma.cfg_mem2cryp);
 499
 500        device_data->dma.cfg_cryp2mem = engine_to_mem;
 501        device_data->dma.chan_cryp2mem =
 502                dma_request_channel(device_data->dma.mask,
 503                                    stedma40_filter,
 504                                    device_data->dma.cfg_cryp2mem);
 505
 506        dmaengine_slave_config(device_data->dma.chan_mem2cryp, &mem2cryp);
 507        dmaengine_slave_config(device_data->dma.chan_cryp2mem, &cryp2mem);
 508
 509        init_completion(&device_data->dma.cryp_dma_complete);
 510}
 511
 512static void cryp_dma_out_callback(void *data)
 513{
 514        struct cryp_ctx *ctx = (struct cryp_ctx *) data;
 515        dev_dbg(ctx->device->dev, "[%s]: ", __func__);
 516
 517        complete(&ctx->device->dma.cryp_dma_complete);
 518}
 519
 520static int cryp_set_dma_transfer(struct cryp_ctx *ctx,
 521                                 struct scatterlist *sg,
 522                                 int len,
 523                                 enum dma_data_direction direction)
 524{
 525        struct dma_async_tx_descriptor *desc;
 526        struct dma_chan *channel = NULL;
 527        dma_cookie_t cookie;
 528
 529        dev_dbg(ctx->device->dev, "[%s]: ", __func__);
 530
 531        if (unlikely(!IS_ALIGNED((u32)sg, 4))) {
 532                dev_err(ctx->device->dev, "[%s]: Data in sg list isn't "
 533                        "aligned! Addr: 0x%08x", __func__, (u32)sg);
 534                return -EFAULT;
 535        }
 536
 537        switch (direction) {
 538        case DMA_TO_DEVICE:
 539                channel = ctx->device->dma.chan_mem2cryp;
 540                ctx->device->dma.sg_src = sg;
 541                ctx->device->dma.sg_src_len = dma_map_sg(channel->device->dev,
 542                                                 ctx->device->dma.sg_src,
 543                                                 ctx->device->dma.nents_src,
 544                                                 direction);
 545
 546                if (!ctx->device->dma.sg_src_len) {
 547                        dev_dbg(ctx->device->dev,
 548                                "[%s]: Could not map the sg list (TO_DEVICE)",
 549                                __func__);
 550                        return -EFAULT;
 551                }
 552
 553                dev_dbg(ctx->device->dev, "[%s]: Setting up DMA for buffer "
 554                        "(TO_DEVICE)", __func__);
 555
 556                desc = dmaengine_prep_slave_sg(channel,
 557                                ctx->device->dma.sg_src,
 558                                ctx->device->dma.sg_src_len,
 559                                DMA_MEM_TO_DEV, DMA_CTRL_ACK);
 560                break;
 561
 562        case DMA_FROM_DEVICE:
 563                channel = ctx->device->dma.chan_cryp2mem;
 564                ctx->device->dma.sg_dst = sg;
 565                ctx->device->dma.sg_dst_len = dma_map_sg(channel->device->dev,
 566                                                 ctx->device->dma.sg_dst,
 567                                                 ctx->device->dma.nents_dst,
 568                                                 direction);
 569
 570                if (!ctx->device->dma.sg_dst_len) {
 571                        dev_dbg(ctx->device->dev,
 572                                "[%s]: Could not map the sg list (FROM_DEVICE)",
 573                                __func__);
 574                        return -EFAULT;
 575                }
 576
 577                dev_dbg(ctx->device->dev, "[%s]: Setting up DMA for buffer "
 578                        "(FROM_DEVICE)", __func__);
 579
 580                desc = dmaengine_prep_slave_sg(channel,
 581                                ctx->device->dma.sg_dst,
 582                                ctx->device->dma.sg_dst_len,
 583                                DMA_DEV_TO_MEM,
 584                                DMA_CTRL_ACK |
 585                                DMA_PREP_INTERRUPT);
 586
 587                desc->callback = cryp_dma_out_callback;
 588                desc->callback_param = ctx;
 589                break;
 590
 591        default:
 592                dev_dbg(ctx->device->dev, "[%s]: Invalid DMA direction",
 593                        __func__);
 594                return -EFAULT;
 595        }
 596
 597        cookie = dmaengine_submit(desc);
 598        if (dma_submit_error(cookie)) {
 599                dev_dbg(ctx->device->dev, "[%s]: DMA submission failed\n",
 600                        __func__);
 601                return cookie;
 602        }
 603
 604        dma_async_issue_pending(channel);
 605
 606        return 0;
 607}
 608
 609static void cryp_dma_done(struct cryp_ctx *ctx)
 610{
 611        struct dma_chan *chan;
 612
 613        dev_dbg(ctx->device->dev, "[%s]: ", __func__);
 614
 615        chan = ctx->device->dma.chan_mem2cryp;
 616        dmaengine_terminate_all(chan);
 617        dma_unmap_sg(chan->device->dev, ctx->device->dma.sg_src,
 618                     ctx->device->dma.sg_src_len, DMA_TO_DEVICE);
 619
 620        chan = ctx->device->dma.chan_cryp2mem;
 621        dmaengine_terminate_all(chan);
 622        dma_unmap_sg(chan->device->dev, ctx->device->dma.sg_dst,
 623                     ctx->device->dma.sg_dst_len, DMA_FROM_DEVICE);
 624}
 625
 626static int cryp_dma_write(struct cryp_ctx *ctx, struct scatterlist *sg,
 627                          int len)
 628{
 629        int error = cryp_set_dma_transfer(ctx, sg, len, DMA_TO_DEVICE);
 630        dev_dbg(ctx->device->dev, "[%s]: ", __func__);
 631
 632        if (error) {
 633                dev_dbg(ctx->device->dev, "[%s]: cryp_set_dma_transfer() "
 634                        "failed", __func__);
 635                return error;
 636        }
 637
 638        return len;
 639}
 640
 641static int cryp_dma_read(struct cryp_ctx *ctx, struct scatterlist *sg, int len)
 642{
 643        int error = cryp_set_dma_transfer(ctx, sg, len, DMA_FROM_DEVICE);
 644        if (error) {
 645                dev_dbg(ctx->device->dev, "[%s]: cryp_set_dma_transfer() "
 646                        "failed", __func__);
 647                return error;
 648        }
 649
 650        return len;
 651}
 652
 653static void cryp_polling_mode(struct cryp_ctx *ctx,
 654                              struct cryp_device_data *device_data)
 655{
 656        int len = ctx->blocksize / BYTES_PER_WORD;
 657        int remaining_length = ctx->datalen;
 658        u32 *indata = (u32 *)ctx->indata;
 659        u32 *outdata = (u32 *)ctx->outdata;
 660
 661        while (remaining_length > 0) {
 662                writesl(&device_data->base->din, indata, len);
 663                indata += len;
 664                remaining_length -= (len * BYTES_PER_WORD);
 665                cryp_wait_until_done(device_data);
 666
 667                readsl(&device_data->base->dout, outdata, len);
 668                outdata += len;
 669                cryp_wait_until_done(device_data);
 670        }
 671}
 672
 673static int cryp_disable_power(struct device *dev,
 674                              struct cryp_device_data *device_data,
 675                              bool save_device_context)
 676{
 677        int ret = 0;
 678
 679        dev_dbg(dev, "[%s]", __func__);
 680
 681        spin_lock(&device_data->power_state_spinlock);
 682        if (!device_data->power_state)
 683                goto out;
 684
 685        spin_lock(&device_data->ctx_lock);
 686        if (save_device_context && device_data->current_ctx) {
 687                cryp_save_device_context(device_data,
 688                                &device_data->current_ctx->dev_ctx,
 689                                cryp_mode);
 690                device_data->restore_dev_ctx = true;
 691        }
 692        spin_unlock(&device_data->ctx_lock);
 693
 694        clk_disable(device_data->clk);
 695        ret = regulator_disable(device_data->pwr_regulator);
 696        if (ret)
 697                dev_err(dev, "[%s]: "
 698                                "regulator_disable() failed!",
 699                                __func__);
 700
 701        device_data->power_state = false;
 702
 703out:
 704        spin_unlock(&device_data->power_state_spinlock);
 705
 706        return ret;
 707}
 708
 709static int cryp_enable_power(
 710                struct device *dev,
 711                struct cryp_device_data *device_data,
 712                bool restore_device_context)
 713{
 714        int ret = 0;
 715
 716        dev_dbg(dev, "[%s]", __func__);
 717
 718        spin_lock(&device_data->power_state_spinlock);
 719        if (!device_data->power_state) {
 720                ret = regulator_enable(device_data->pwr_regulator);
 721                if (ret) {
 722                        dev_err(dev, "[%s]: regulator_enable() failed!",
 723                                        __func__);
 724                        goto out;
 725                }
 726
 727                ret = clk_enable(device_data->clk);
 728                if (ret) {
 729                        dev_err(dev, "[%s]: clk_enable() failed!",
 730                                        __func__);
 731                        regulator_disable(device_data->pwr_regulator);
 732                        goto out;
 733                }
 734                device_data->power_state = true;
 735        }
 736
 737        if (device_data->restore_dev_ctx) {
 738                spin_lock(&device_data->ctx_lock);
 739                if (restore_device_context && device_data->current_ctx) {
 740                        device_data->restore_dev_ctx = false;
 741                        cryp_restore_device_context(device_data,
 742                                        &device_data->current_ctx->dev_ctx);
 743                }
 744                spin_unlock(&device_data->ctx_lock);
 745        }
 746out:
 747        spin_unlock(&device_data->power_state_spinlock);
 748
 749        return ret;
 750}
 751
 752static int hw_crypt_noxts(struct cryp_ctx *ctx,
 753                          struct cryp_device_data *device_data)
 754{
 755        int ret = 0;
 756
 757        const u8 *indata = ctx->indata;
 758        u8 *outdata = ctx->outdata;
 759        u32 datalen = ctx->datalen;
 760        u32 outlen = datalen;
 761
 762        pr_debug(DEV_DBG_NAME " [%s]", __func__);
 763
 764        ctx->outlen = ctx->datalen;
 765
 766        if (unlikely(!IS_ALIGNED((u32)indata, 4))) {
 767                pr_debug(DEV_DBG_NAME " [%s]: Data isn't aligned! Addr: "
 768                         "0x%08x", __func__, (u32)indata);
 769                return -EINVAL;
 770        }
 771
 772        ret = cryp_setup_context(ctx, device_data);
 773
 774        if (ret)
 775                goto out;
 776
 777        if (cryp_mode == CRYP_MODE_INTERRUPT) {
 778                cryp_enable_irq_src(device_data, CRYP_IRQ_SRC_INPUT_FIFO |
 779                                    CRYP_IRQ_SRC_OUTPUT_FIFO);
 780
 781                /*
 782                 * ctx->outlen is decremented in the cryp_interrupt_handler
 783                 * function. We had to add cpu_relax() (barrier) to make sure
 784                 * that gcc didn't optimze away this variable.
 785                 */
 786                while (ctx->outlen > 0)
 787                        cpu_relax();
 788        } else if (cryp_mode == CRYP_MODE_POLLING ||
 789                   cryp_mode == CRYP_MODE_DMA) {
 790                /*
 791                 * The reason for having DMA in this if case is that if we are
 792                 * running cryp_mode = 2, then we separate DMA routines for
 793                 * handling cipher/plaintext > blocksize, except when
 794                 * running the normal CRYPTO_ALG_TYPE_CIPHER, then we still use
 795                 * the polling mode. Overhead of doing DMA setup eats up the
 796                 * benefits using it.
 797                 */
 798                cryp_polling_mode(ctx, device_data);
 799        } else {
 800                dev_err(ctx->device->dev, "[%s]: Invalid operation mode!",
 801                        __func__);
 802                ret = -EPERM;
 803                goto out;
 804        }
 805
 806        cryp_save_device_context(device_data, &ctx->dev_ctx, cryp_mode);
 807        ctx->updated = 1;
 808
 809out:
 810        ctx->indata = indata;
 811        ctx->outdata = outdata;
 812        ctx->datalen = datalen;
 813        ctx->outlen = outlen;
 814
 815        return ret;
 816}
 817
 818static int get_nents(struct scatterlist *sg, int nbytes)
 819{
 820        int nents = 0;
 821
 822        while (nbytes > 0) {
 823                nbytes -= sg->length;
 824                sg = sg_next(sg);
 825                nents++;
 826        }
 827
 828        return nents;
 829}
 830
 831static int ablk_dma_crypt(struct ablkcipher_request *areq)
 832{
 833        struct crypto_ablkcipher *cipher = crypto_ablkcipher_reqtfm(areq);
 834        struct cryp_ctx *ctx = crypto_ablkcipher_ctx(cipher);
 835        struct cryp_device_data *device_data;
 836
 837        int bytes_written = 0;
 838        int bytes_read = 0;
 839        int ret;
 840
 841        pr_debug(DEV_DBG_NAME " [%s]", __func__);
 842
 843        ctx->datalen = areq->nbytes;
 844        ctx->outlen = areq->nbytes;
 845
 846        ret = cryp_get_device_data(ctx, &device_data);
 847        if (ret)
 848                return ret;
 849
 850        ret = cryp_setup_context(ctx, device_data);
 851        if (ret)
 852                goto out;
 853
 854        /* We have the device now, so store the nents in the dma struct. */
 855        ctx->device->dma.nents_src = get_nents(areq->src, ctx->datalen);
 856        ctx->device->dma.nents_dst = get_nents(areq->dst, ctx->outlen);
 857
 858        /* Enable DMA in- and output. */
 859        cryp_configure_for_dma(device_data, CRYP_DMA_ENABLE_BOTH_DIRECTIONS);
 860
 861        bytes_written = cryp_dma_write(ctx, areq->src, ctx->datalen);
 862        bytes_read = cryp_dma_read(ctx, areq->dst, bytes_written);
 863
 864        wait_for_completion(&ctx->device->dma.cryp_dma_complete);
 865        cryp_dma_done(ctx);
 866
 867        cryp_save_device_context(device_data, &ctx->dev_ctx, cryp_mode);
 868        ctx->updated = 1;
 869
 870out:
 871        spin_lock(&device_data->ctx_lock);
 872        device_data->current_ctx = NULL;
 873        ctx->device = NULL;
 874        spin_unlock(&device_data->ctx_lock);
 875
 876        /*
 877         * The down_interruptible part for this semaphore is called in
 878         * cryp_get_device_data.
 879         */
 880        up(&driver_data.device_allocation);
 881
 882        if (unlikely(bytes_written != bytes_read))
 883                return -EPERM;
 884
 885        return 0;
 886}
 887
 888static int ablk_crypt(struct ablkcipher_request *areq)
 889{
 890        struct ablkcipher_walk walk;
 891        struct crypto_ablkcipher *cipher = crypto_ablkcipher_reqtfm(areq);
 892        struct cryp_ctx *ctx = crypto_ablkcipher_ctx(cipher);
 893        struct cryp_device_data *device_data;
 894        unsigned long src_paddr;
 895        unsigned long dst_paddr;
 896        int ret;
 897        int nbytes;
 898
 899        pr_debug(DEV_DBG_NAME " [%s]", __func__);
 900
 901        ret = cryp_get_device_data(ctx, &device_data);
 902        if (ret)
 903                goto out;
 904
 905        ablkcipher_walk_init(&walk, areq->dst, areq->src, areq->nbytes);
 906        ret = ablkcipher_walk_phys(areq, &walk);
 907
 908        if (ret) {
 909                pr_err(DEV_DBG_NAME "[%s]: ablkcipher_walk_phys() failed!",
 910                        __func__);
 911                goto out;
 912        }
 913
 914        while ((nbytes = walk.nbytes) > 0) {
 915                ctx->iv = walk.iv;
 916                src_paddr = (page_to_phys(walk.src.page) + walk.src.offset);
 917                ctx->indata = phys_to_virt(src_paddr);
 918
 919                dst_paddr = (page_to_phys(walk.dst.page) + walk.dst.offset);
 920                ctx->outdata = phys_to_virt(dst_paddr);
 921
 922                ctx->datalen = nbytes - (nbytes % ctx->blocksize);
 923
 924                ret = hw_crypt_noxts(ctx, device_data);
 925                if (ret)
 926                        goto out;
 927
 928                nbytes -= ctx->datalen;
 929                ret = ablkcipher_walk_done(areq, &walk, nbytes);
 930                if (ret)
 931                        goto out;
 932        }
 933        ablkcipher_walk_complete(&walk);
 934
 935out:
 936        /* Release the device */
 937        spin_lock(&device_data->ctx_lock);
 938        device_data->current_ctx = NULL;
 939        ctx->device = NULL;
 940        spin_unlock(&device_data->ctx_lock);
 941
 942        /*
 943         * The down_interruptible part for this semaphore is called in
 944         * cryp_get_device_data.
 945         */
 946        up(&driver_data.device_allocation);
 947
 948        return ret;
 949}
 950
 951static int aes_ablkcipher_setkey(struct crypto_ablkcipher *cipher,
 952                                 const u8 *key, unsigned int keylen)
 953{
 954        struct cryp_ctx *ctx = crypto_ablkcipher_ctx(cipher);
 955        u32 *flags = &cipher->base.crt_flags;
 956
 957        pr_debug(DEV_DBG_NAME " [%s]", __func__);
 958
 959        switch (keylen) {
 960        case AES_KEYSIZE_128:
 961                ctx->config.keysize = CRYP_KEY_SIZE_128;
 962                break;
 963
 964        case AES_KEYSIZE_192:
 965                ctx->config.keysize = CRYP_KEY_SIZE_192;
 966                break;
 967
 968        case AES_KEYSIZE_256:
 969                ctx->config.keysize = CRYP_KEY_SIZE_256;
 970                break;
 971
 972        default:
 973                pr_err(DEV_DBG_NAME "[%s]: Unknown keylen!", __func__);
 974                *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
 975                return -EINVAL;
 976        }
 977
 978        memcpy(ctx->key, key, keylen);
 979        ctx->keylen = keylen;
 980
 981        ctx->updated = 0;
 982
 983        return 0;
 984}
 985
 986static int des_ablkcipher_setkey(struct crypto_ablkcipher *cipher,
 987                                 const u8 *key, unsigned int keylen)
 988{
 989        struct cryp_ctx *ctx = crypto_ablkcipher_ctx(cipher);
 990        u32 *flags = &cipher->base.crt_flags;
 991        u32 tmp[DES_EXPKEY_WORDS];
 992        int ret;
 993
 994        pr_debug(DEV_DBG_NAME " [%s]", __func__);
 995        if (keylen != DES_KEY_SIZE) {
 996                *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
 997                pr_debug(DEV_DBG_NAME " [%s]: CRYPTO_TFM_RES_BAD_KEY_LEN",
 998                                __func__);
 999                return -EINVAL;
1000        }
1001
1002        ret = des_ekey(tmp, key);
1003        if (unlikely(ret == 0) &&
1004            (*flags & CRYPTO_TFM_REQ_FORBID_WEAK_KEYS)) {
1005                *flags |= CRYPTO_TFM_RES_WEAK_KEY;
1006                pr_debug(DEV_DBG_NAME " [%s]: CRYPTO_TFM_RES_WEAK_KEY",
1007                         __func__);
1008                return -EINVAL;
1009        }
1010
1011        memcpy(ctx->key, key, keylen);
1012        ctx->keylen = keylen;
1013
1014        ctx->updated = 0;
1015        return 0;
1016}
1017
1018static int des3_ablkcipher_setkey(struct crypto_ablkcipher *cipher,
1019                                  const u8 *key, unsigned int keylen)
1020{
1021        struct cryp_ctx *ctx = crypto_ablkcipher_ctx(cipher);
1022        u32 flags;
1023        int err;
1024
1025        pr_debug(DEV_DBG_NAME " [%s]", __func__);
1026
1027        flags = crypto_ablkcipher_get_flags(cipher);
1028        err = __des3_verify_key(&flags, key);
1029        if (unlikely(err)) {
1030                crypto_ablkcipher_set_flags(cipher, flags);
1031                return err;
1032        }
1033
1034        memcpy(ctx->key, key, keylen);
1035        ctx->keylen = keylen;
1036
1037        ctx->updated = 0;
1038        return 0;
1039}
1040
1041static int cryp_blk_encrypt(struct ablkcipher_request *areq)
1042{
1043        struct crypto_ablkcipher *cipher = crypto_ablkcipher_reqtfm(areq);
1044        struct cryp_ctx *ctx = crypto_ablkcipher_ctx(cipher);
1045
1046        pr_debug(DEV_DBG_NAME " [%s]", __func__);
1047
1048        ctx->config.algodir = CRYP_ALGORITHM_ENCRYPT;
1049
1050        /*
1051         * DMA does not work for DES due to a hw bug */
1052        if (cryp_mode == CRYP_MODE_DMA && mode_is_aes(ctx->config.algomode))
1053                return ablk_dma_crypt(areq);
1054
1055        /* For everything except DMA, we run the non DMA version. */
1056        return ablk_crypt(areq);
1057}
1058
1059static int cryp_blk_decrypt(struct ablkcipher_request *areq)
1060{
1061        struct crypto_ablkcipher *cipher = crypto_ablkcipher_reqtfm(areq);
1062        struct cryp_ctx *ctx = crypto_ablkcipher_ctx(cipher);
1063
1064        pr_debug(DEV_DBG_NAME " [%s]", __func__);
1065
1066        ctx->config.algodir = CRYP_ALGORITHM_DECRYPT;
1067
1068        /* DMA does not work for DES due to a hw bug */
1069        if (cryp_mode == CRYP_MODE_DMA && mode_is_aes(ctx->config.algomode))
1070                return ablk_dma_crypt(areq);
1071
1072        /* For everything except DMA, we run the non DMA version. */
1073        return ablk_crypt(areq);
1074}
1075
1076struct cryp_algo_template {
1077        enum cryp_algo_mode algomode;
1078        struct crypto_alg crypto;
1079};
1080
1081static int cryp_cra_init(struct crypto_tfm *tfm)
1082{
1083        struct cryp_ctx *ctx = crypto_tfm_ctx(tfm);
1084        struct crypto_alg *alg = tfm->__crt_alg;
1085        struct cryp_algo_template *cryp_alg = container_of(alg,
1086                        struct cryp_algo_template,
1087                        crypto);
1088
1089        ctx->config.algomode = cryp_alg->algomode;
1090        ctx->blocksize = crypto_tfm_alg_blocksize(tfm);
1091
1092        return 0;
1093}
1094
1095static struct cryp_algo_template cryp_algs[] = {
1096        {
1097                .algomode = CRYP_ALGO_AES_ECB,
1098                .crypto = {
1099                        .cra_name = "aes",
1100                        .cra_driver_name = "aes-ux500",
1101                        .cra_priority = 300,
1102                        .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
1103                                        CRYPTO_ALG_ASYNC,
1104                        .cra_blocksize = AES_BLOCK_SIZE,
1105                        .cra_ctxsize = sizeof(struct cryp_ctx),
1106                        .cra_alignmask = 3,
1107                        .cra_type = &crypto_ablkcipher_type,
1108                        .cra_init = cryp_cra_init,
1109                        .cra_module = THIS_MODULE,
1110                        .cra_u = {
1111                                .ablkcipher = {
1112                                        .min_keysize = AES_MIN_KEY_SIZE,
1113                                        .max_keysize = AES_MAX_KEY_SIZE,
1114                                        .setkey = aes_ablkcipher_setkey,
1115                                        .encrypt = cryp_blk_encrypt,
1116                                        .decrypt = cryp_blk_decrypt
1117                                }
1118                        }
1119                }
1120        },
1121        {
1122                .algomode = CRYP_ALGO_AES_ECB,
1123                .crypto = {
1124                        .cra_name = "ecb(aes)",
1125                        .cra_driver_name = "ecb-aes-ux500",
1126                        .cra_priority = 300,
1127                        .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
1128                                        CRYPTO_ALG_ASYNC,
1129                        .cra_blocksize = AES_BLOCK_SIZE,
1130                        .cra_ctxsize = sizeof(struct cryp_ctx),
1131                        .cra_alignmask = 3,
1132                        .cra_type = &crypto_ablkcipher_type,
1133                        .cra_init = cryp_cra_init,
1134                        .cra_module = THIS_MODULE,
1135                        .cra_u = {
1136                                .ablkcipher = {
1137                                        .min_keysize = AES_MIN_KEY_SIZE,
1138                                        .max_keysize = AES_MAX_KEY_SIZE,
1139                                        .setkey = aes_ablkcipher_setkey,
1140                                        .encrypt = cryp_blk_encrypt,
1141                                        .decrypt = cryp_blk_decrypt,
1142                                }
1143                        }
1144                }
1145        },
1146        {
1147                .algomode = CRYP_ALGO_AES_CBC,
1148                .crypto = {
1149                        .cra_name = "cbc(aes)",
1150                        .cra_driver_name = "cbc-aes-ux500",
1151                        .cra_priority = 300,
1152                        .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
1153                                        CRYPTO_ALG_ASYNC,
1154                        .cra_blocksize = AES_BLOCK_SIZE,
1155                        .cra_ctxsize = sizeof(struct cryp_ctx),
1156                        .cra_alignmask = 3,
1157                        .cra_type = &crypto_ablkcipher_type,
1158                        .cra_init = cryp_cra_init,
1159                        .cra_module = THIS_MODULE,
1160                        .cra_u = {
1161                                .ablkcipher = {
1162                                        .min_keysize = AES_MIN_KEY_SIZE,
1163                                        .max_keysize = AES_MAX_KEY_SIZE,
1164                                        .setkey = aes_ablkcipher_setkey,
1165                                        .encrypt = cryp_blk_encrypt,
1166                                        .decrypt = cryp_blk_decrypt,
1167                                        .ivsize = AES_BLOCK_SIZE,
1168                                }
1169                        }
1170                }
1171        },
1172        {
1173                .algomode = CRYP_ALGO_AES_CTR,
1174                .crypto = {
1175                        .cra_name = "ctr(aes)",
1176                        .cra_driver_name = "ctr-aes-ux500",
1177                        .cra_priority = 300,
1178                        .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
1179                                                CRYPTO_ALG_ASYNC,
1180                        .cra_blocksize = AES_BLOCK_SIZE,
1181                        .cra_ctxsize = sizeof(struct cryp_ctx),
1182                        .cra_alignmask = 3,
1183                        .cra_type = &crypto_ablkcipher_type,
1184                        .cra_init = cryp_cra_init,
1185                        .cra_module = THIS_MODULE,
1186                        .cra_u = {
1187                                .ablkcipher = {
1188                                        .min_keysize = AES_MIN_KEY_SIZE,
1189                                        .max_keysize = AES_MAX_KEY_SIZE,
1190                                        .setkey = aes_ablkcipher_setkey,
1191                                        .encrypt = cryp_blk_encrypt,
1192                                        .decrypt = cryp_blk_decrypt,
1193                                        .ivsize = AES_BLOCK_SIZE,
1194                                }
1195                        }
1196                }
1197        },
1198        {
1199                .algomode = CRYP_ALGO_DES_ECB,
1200                .crypto = {
1201                        .cra_name = "ecb(des)",
1202                        .cra_driver_name = "ecb-des-ux500",
1203                        .cra_priority = 300,
1204                        .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
1205                                        CRYPTO_ALG_ASYNC,
1206                        .cra_blocksize = DES_BLOCK_SIZE,
1207                        .cra_ctxsize = sizeof(struct cryp_ctx),
1208                        .cra_alignmask = 3,
1209                        .cra_type = &crypto_ablkcipher_type,
1210                        .cra_init = cryp_cra_init,
1211                        .cra_module = THIS_MODULE,
1212                        .cra_u = {
1213                                .ablkcipher = {
1214                                        .min_keysize = DES_KEY_SIZE,
1215                                        .max_keysize = DES_KEY_SIZE,
1216                                        .setkey = des_ablkcipher_setkey,
1217                                        .encrypt = cryp_blk_encrypt,
1218                                        .decrypt = cryp_blk_decrypt,
1219                                }
1220                        }
1221                }
1222        },
1223        {
1224                .algomode = CRYP_ALGO_TDES_ECB,
1225                .crypto = {
1226                        .cra_name = "ecb(des3_ede)",
1227                        .cra_driver_name = "ecb-des3_ede-ux500",
1228                        .cra_priority = 300,
1229                        .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
1230                                        CRYPTO_ALG_ASYNC,
1231                        .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1232                        .cra_ctxsize = sizeof(struct cryp_ctx),
1233                        .cra_alignmask = 3,
1234                        .cra_type = &crypto_ablkcipher_type,
1235                        .cra_init = cryp_cra_init,
1236                        .cra_module = THIS_MODULE,
1237                        .cra_u = {
1238                                .ablkcipher = {
1239                                        .min_keysize = DES3_EDE_KEY_SIZE,
1240                                        .max_keysize = DES3_EDE_KEY_SIZE,
1241                                        .setkey = des3_ablkcipher_setkey,
1242                                        .encrypt = cryp_blk_encrypt,
1243                                        .decrypt = cryp_blk_decrypt,
1244                                }
1245                        }
1246                }
1247        },
1248        {
1249                .algomode = CRYP_ALGO_DES_CBC,
1250                .crypto = {
1251                        .cra_name = "cbc(des)",
1252                        .cra_driver_name = "cbc-des-ux500",
1253                        .cra_priority = 300,
1254                        .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
1255                                        CRYPTO_ALG_ASYNC,
1256                        .cra_blocksize = DES_BLOCK_SIZE,
1257                        .cra_ctxsize = sizeof(struct cryp_ctx),
1258                        .cra_alignmask = 3,
1259                        .cra_type = &crypto_ablkcipher_type,
1260                        .cra_init = cryp_cra_init,
1261                        .cra_module = THIS_MODULE,
1262                        .cra_u = {
1263                                .ablkcipher = {
1264                                        .min_keysize = DES_KEY_SIZE,
1265                                        .max_keysize = DES_KEY_SIZE,
1266                                        .setkey = des_ablkcipher_setkey,
1267                                        .encrypt = cryp_blk_encrypt,
1268                                        .decrypt = cryp_blk_decrypt,
1269                                }
1270                        }
1271                }
1272        },
1273        {
1274                .algomode = CRYP_ALGO_TDES_CBC,
1275                .crypto = {
1276                        .cra_name = "cbc(des3_ede)",
1277                        .cra_driver_name = "cbc-des3_ede-ux500",
1278                        .cra_priority = 300,
1279                        .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
1280                                        CRYPTO_ALG_ASYNC,
1281                        .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1282                        .cra_ctxsize = sizeof(struct cryp_ctx),
1283                        .cra_alignmask = 3,
1284                        .cra_type = &crypto_ablkcipher_type,
1285                        .cra_init = cryp_cra_init,
1286                        .cra_module = THIS_MODULE,
1287                        .cra_u = {
1288                                .ablkcipher = {
1289                                        .min_keysize = DES3_EDE_KEY_SIZE,
1290                                        .max_keysize = DES3_EDE_KEY_SIZE,
1291                                        .setkey = des3_ablkcipher_setkey,
1292                                        .encrypt = cryp_blk_encrypt,
1293                                        .decrypt = cryp_blk_decrypt,
1294                                        .ivsize = DES3_EDE_BLOCK_SIZE,
1295                                }
1296                        }
1297                }
1298        }
1299};
1300
1301/**
1302 * cryp_algs_register_all -
1303 */
1304static int cryp_algs_register_all(void)
1305{
1306        int ret;
1307        int i;
1308        int count;
1309
1310        pr_debug("[%s]", __func__);
1311
1312        for (i = 0; i < ARRAY_SIZE(cryp_algs); i++) {
1313                ret = crypto_register_alg(&cryp_algs[i].crypto);
1314                if (ret) {
1315                        count = i;
1316                        pr_err("[%s] alg registration failed",
1317                                        cryp_algs[i].crypto.cra_driver_name);
1318                        goto unreg;
1319                }
1320        }
1321        return 0;
1322unreg:
1323        for (i = 0; i < count; i++)
1324                crypto_unregister_alg(&cryp_algs[i].crypto);
1325        return ret;
1326}
1327
1328/**
1329 * cryp_algs_unregister_all -
1330 */
1331static void cryp_algs_unregister_all(void)
1332{
1333        int i;
1334
1335        pr_debug(DEV_DBG_NAME " [%s]", __func__);
1336
1337        for (i = 0; i < ARRAY_SIZE(cryp_algs); i++)
1338                crypto_unregister_alg(&cryp_algs[i].crypto);
1339}
1340
1341static int ux500_cryp_probe(struct platform_device *pdev)
1342{
1343        int ret;
1344        struct resource *res;
1345        struct resource *res_irq;
1346        struct cryp_device_data *device_data;
1347        struct cryp_protection_config prot = {
1348                .privilege_access = CRYP_STATE_ENABLE
1349        };
1350        struct device *dev = &pdev->dev;
1351
1352        dev_dbg(dev, "[%s]", __func__);
1353        device_data = devm_kzalloc(dev, sizeof(*device_data), GFP_ATOMIC);
1354        if (!device_data) {
1355                ret = -ENOMEM;
1356                goto out;
1357        }
1358
1359        device_data->dev = dev;
1360        device_data->current_ctx = NULL;
1361
1362        /* Grab the DMA configuration from platform data. */
1363        mem_to_engine = &((struct cryp_platform_data *)
1364                         dev->platform_data)->mem_to_engine;
1365        engine_to_mem = &((struct cryp_platform_data *)
1366                         dev->platform_data)->engine_to_mem;
1367
1368        res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
1369        if (!res) {
1370                dev_err(dev, "[%s]: platform_get_resource() failed",
1371                                __func__);
1372                ret = -ENODEV;
1373                goto out;
1374        }
1375
1376        device_data->phybase = res->start;
1377        device_data->base = devm_ioremap_resource(dev, res);
1378        if (IS_ERR(device_data->base)) {
1379                dev_err(dev, "[%s]: ioremap failed!", __func__);
1380                ret = PTR_ERR(device_data->base);
1381                goto out;
1382        }
1383
1384        spin_lock_init(&device_data->ctx_lock);
1385        spin_lock_init(&device_data->power_state_spinlock);
1386
1387        /* Enable power for CRYP hardware block */
1388        device_data->pwr_regulator = regulator_get(&pdev->dev, "v-ape");
1389        if (IS_ERR(device_data->pwr_regulator)) {
1390                dev_err(dev, "[%s]: could not get cryp regulator", __func__);
1391                ret = PTR_ERR(device_data->pwr_regulator);
1392                device_data->pwr_regulator = NULL;
1393                goto out;
1394        }
1395
1396        /* Enable the clk for CRYP hardware block */
1397        device_data->clk = devm_clk_get(&pdev->dev, NULL);
1398        if (IS_ERR(device_data->clk)) {
1399                dev_err(dev, "[%s]: clk_get() failed!", __func__);
1400                ret = PTR_ERR(device_data->clk);
1401                goto out_regulator;
1402        }
1403
1404        ret = clk_prepare(device_data->clk);
1405        if (ret) {
1406                dev_err(dev, "[%s]: clk_prepare() failed!", __func__);
1407                goto out_regulator;
1408        }
1409
1410        /* Enable device power (and clock) */
1411        ret = cryp_enable_power(device_data->dev, device_data, false);
1412        if (ret) {
1413                dev_err(dev, "[%s]: cryp_enable_power() failed!", __func__);
1414                goto out_clk_unprepare;
1415        }
1416
1417        if (cryp_check(device_data)) {
1418                dev_err(dev, "[%s]: cryp_check() failed!", __func__);
1419                ret = -EINVAL;
1420                goto out_power;
1421        }
1422
1423        if (cryp_configure_protection(device_data, &prot)) {
1424                dev_err(dev, "[%s]: cryp_configure_protection() failed!",
1425                        __func__);
1426                ret = -EINVAL;
1427                goto out_power;
1428        }
1429
1430        res_irq = platform_get_resource(pdev, IORESOURCE_IRQ, 0);
1431        if (!res_irq) {
1432                dev_err(dev, "[%s]: IORESOURCE_IRQ unavailable",
1433                        __func__);
1434                ret = -ENODEV;
1435                goto out_power;
1436        }
1437
1438        ret = devm_request_irq(&pdev->dev, res_irq->start,
1439                               cryp_interrupt_handler, 0, "cryp1", device_data);
1440        if (ret) {
1441                dev_err(dev, "[%s]: Unable to request IRQ", __func__);
1442                goto out_power;
1443        }
1444
1445        if (cryp_mode == CRYP_MODE_DMA)
1446                cryp_dma_setup_channel(device_data, dev);
1447
1448        platform_set_drvdata(pdev, device_data);
1449
1450        /* Put the new device into the device list... */
1451        klist_add_tail(&device_data->list_node, &driver_data.device_list);
1452
1453        /* ... and signal that a new device is available. */
1454        up(&driver_data.device_allocation);
1455
1456        atomic_set(&session_id, 1);
1457
1458        ret = cryp_algs_register_all();
1459        if (ret) {
1460                dev_err(dev, "[%s]: cryp_algs_register_all() failed!",
1461                        __func__);
1462                goto out_power;
1463        }
1464
1465        dev_info(dev, "successfully registered\n");
1466
1467        return 0;
1468
1469out_power:
1470        cryp_disable_power(device_data->dev, device_data, false);
1471
1472out_clk_unprepare:
1473        clk_unprepare(device_data->clk);
1474
1475out_regulator:
1476        regulator_put(device_data->pwr_regulator);
1477
1478out:
1479        return ret;
1480}
1481
1482static int ux500_cryp_remove(struct platform_device *pdev)
1483{
1484        struct cryp_device_data *device_data;
1485
1486        dev_dbg(&pdev->dev, "[%s]", __func__);
1487        device_data = platform_get_drvdata(pdev);
1488        if (!device_data) {
1489                dev_err(&pdev->dev, "[%s]: platform_get_drvdata() failed!",
1490                        __func__);
1491                return -ENOMEM;
1492        }
1493
1494        /* Try to decrease the number of available devices. */
1495        if (down_trylock(&driver_data.device_allocation))
1496                return -EBUSY;
1497
1498        /* Check that the device is free */
1499        spin_lock(&device_data->ctx_lock);
1500        /* current_ctx allocates a device, NULL = unallocated */
1501        if (device_data->current_ctx) {
1502                /* The device is busy */
1503                spin_unlock(&device_data->ctx_lock);
1504                /* Return the device to the pool. */
1505                up(&driver_data.device_allocation);
1506                return -EBUSY;
1507        }
1508
1509        spin_unlock(&device_data->ctx_lock);
1510
1511        /* Remove the device from the list */
1512        if (klist_node_attached(&device_data->list_node))
1513                klist_remove(&device_data->list_node);
1514
1515        /* If this was the last device, remove the services */
1516        if (list_empty(&driver_data.device_list.k_list))
1517                cryp_algs_unregister_all();
1518
1519        if (cryp_disable_power(&pdev->dev, device_data, false))
1520                dev_err(&pdev->dev, "[%s]: cryp_disable_power() failed",
1521                        __func__);
1522
1523        clk_unprepare(device_data->clk);
1524        regulator_put(device_data->pwr_regulator);
1525
1526        return 0;
1527}
1528
1529static void ux500_cryp_shutdown(struct platform_device *pdev)
1530{
1531        struct cryp_device_data *device_data;
1532
1533        dev_dbg(&pdev->dev, "[%s]", __func__);
1534
1535        device_data = platform_get_drvdata(pdev);
1536        if (!device_data) {
1537                dev_err(&pdev->dev, "[%s]: platform_get_drvdata() failed!",
1538                        __func__);
1539                return;
1540        }
1541
1542        /* Check that the device is free */
1543        spin_lock(&device_data->ctx_lock);
1544        /* current_ctx allocates a device, NULL = unallocated */
1545        if (!device_data->current_ctx) {
1546                if (down_trylock(&driver_data.device_allocation))
1547                        dev_dbg(&pdev->dev, "[%s]: Cryp still in use!"
1548                                "Shutting down anyway...", __func__);
1549                /**
1550                 * (Allocate the device)
1551                 * Need to set this to non-null (dummy) value,
1552                 * to avoid usage if context switching.
1553                 */
1554                device_data->current_ctx++;
1555        }
1556        spin_unlock(&device_data->ctx_lock);
1557
1558        /* Remove the device from the list */
1559        if (klist_node_attached(&device_data->list_node))
1560                klist_remove(&device_data->list_node);
1561
1562        /* If this was the last device, remove the services */
1563        if (list_empty(&driver_data.device_list.k_list))
1564                cryp_algs_unregister_all();
1565
1566        if (cryp_disable_power(&pdev->dev, device_data, false))
1567                dev_err(&pdev->dev, "[%s]: cryp_disable_power() failed",
1568                        __func__);
1569
1570}
1571
1572#ifdef CONFIG_PM_SLEEP
1573static int ux500_cryp_suspend(struct device *dev)
1574{
1575        int ret;
1576        struct platform_device *pdev = to_platform_device(dev);
1577        struct cryp_device_data *device_data;
1578        struct resource *res_irq;
1579        struct cryp_ctx *temp_ctx = NULL;
1580
1581        dev_dbg(dev, "[%s]", __func__);
1582
1583        /* Handle state? */
1584        device_data = platform_get_drvdata(pdev);
1585        if (!device_data) {
1586                dev_err(dev, "[%s]: platform_get_drvdata() failed!", __func__);
1587                return -ENOMEM;
1588        }
1589
1590        res_irq = platform_get_resource(pdev, IORESOURCE_IRQ, 0);
1591        if (!res_irq)
1592                dev_err(dev, "[%s]: IORESOURCE_IRQ, unavailable", __func__);
1593        else
1594                disable_irq(res_irq->start);
1595
1596        spin_lock(&device_data->ctx_lock);
1597        if (!device_data->current_ctx)
1598                device_data->current_ctx++;
1599        spin_unlock(&device_data->ctx_lock);
1600
1601        if (device_data->current_ctx == ++temp_ctx) {
1602                if (down_interruptible(&driver_data.device_allocation))
1603                        dev_dbg(dev, "[%s]: down_interruptible() failed",
1604                                __func__);
1605                ret = cryp_disable_power(dev, device_data, false);
1606
1607        } else
1608                ret = cryp_disable_power(dev, device_data, true);
1609
1610        if (ret)
1611                dev_err(dev, "[%s]: cryp_disable_power()", __func__);
1612
1613        return ret;
1614}
1615
1616static int ux500_cryp_resume(struct device *dev)
1617{
1618        int ret = 0;
1619        struct platform_device *pdev = to_platform_device(dev);
1620        struct cryp_device_data *device_data;
1621        struct resource *res_irq;
1622        struct cryp_ctx *temp_ctx = NULL;
1623
1624        dev_dbg(dev, "[%s]", __func__);
1625
1626        device_data = platform_get_drvdata(pdev);
1627        if (!device_data) {
1628                dev_err(dev, "[%s]: platform_get_drvdata() failed!", __func__);
1629                return -ENOMEM;
1630        }
1631
1632        spin_lock(&device_data->ctx_lock);
1633        if (device_data->current_ctx == ++temp_ctx)
1634                device_data->current_ctx = NULL;
1635        spin_unlock(&device_data->ctx_lock);
1636
1637
1638        if (!device_data->current_ctx)
1639                up(&driver_data.device_allocation);
1640        else
1641                ret = cryp_enable_power(dev, device_data, true);
1642
1643        if (ret)
1644                dev_err(dev, "[%s]: cryp_enable_power() failed!", __func__);
1645        else {
1646                res_irq = platform_get_resource(pdev, IORESOURCE_IRQ, 0);
1647                if (res_irq)
1648                        enable_irq(res_irq->start);
1649        }
1650
1651        return ret;
1652}
1653#endif
1654
1655static SIMPLE_DEV_PM_OPS(ux500_cryp_pm, ux500_cryp_suspend, ux500_cryp_resume);
1656
1657static const struct of_device_id ux500_cryp_match[] = {
1658        { .compatible = "stericsson,ux500-cryp" },
1659        { },
1660};
1661MODULE_DEVICE_TABLE(of, ux500_cryp_match);
1662
1663static struct platform_driver cryp_driver = {
1664        .probe  = ux500_cryp_probe,
1665        .remove = ux500_cryp_remove,
1666        .shutdown = ux500_cryp_shutdown,
1667        .driver = {
1668                .name  = "cryp1",
1669                .of_match_table = ux500_cryp_match,
1670                .pm    = &ux500_cryp_pm,
1671        }
1672};
1673
1674static int __init ux500_cryp_mod_init(void)
1675{
1676        pr_debug("[%s] is called!", __func__);
1677        klist_init(&driver_data.device_list, NULL, NULL);
1678        /* Initialize the semaphore to 0 devices (locked state) */
1679        sema_init(&driver_data.device_allocation, 0);
1680        return platform_driver_register(&cryp_driver);
1681}
1682
1683static void __exit ux500_cryp_mod_fini(void)
1684{
1685        pr_debug("[%s] is called!", __func__);
1686        platform_driver_unregister(&cryp_driver);
1687}
1688
1689module_init(ux500_cryp_mod_init);
1690module_exit(ux500_cryp_mod_fini);
1691
1692module_param(cryp_mode, int, 0);
1693
1694MODULE_DESCRIPTION("Driver for ST-Ericsson UX500 CRYP crypto engine.");
1695MODULE_ALIAS_CRYPTO("aes-all");
1696MODULE_ALIAS_CRYPTO("des-all");
1697
1698MODULE_LICENSE("GPL");
1699