linux/drivers/crypto/ux500/cryp/cryp_core.c
<<
>>
Prefs
   1/**
   2 * Copyright (C) ST-Ericsson SA 2010
   3 * Author: Shujuan Chen <shujuan.chen@stericsson.com> for ST-Ericsson.
   4 * Author: Joakim Bech <joakim.xx.bech@stericsson.com> for ST-Ericsson.
   5 * Author: Berne Hebark <berne.herbark@stericsson.com> for ST-Ericsson.
   6 * Author: Niklas Hernaeus <niklas.hernaeus@stericsson.com> for ST-Ericsson.
   7 * Author: Jonas Linde <jonas.linde@stericsson.com> for ST-Ericsson.
   8 * Author: Andreas Westin <andreas.westin@stericsson.com> for ST-Ericsson.
   9 * License terms: GNU General Public License (GPL) version 2
  10 */
  11
  12#include <linux/clk.h>
  13#include <linux/completion.h>
  14#include <linux/crypto.h>
  15#include <linux/dmaengine.h>
  16#include <linux/err.h>
  17#include <linux/errno.h>
  18#include <linux/interrupt.h>
  19#include <linux/io.h>
  20#include <linux/irqreturn.h>
  21#include <linux/klist.h>
  22#include <linux/module.h>
  23#include <linux/mod_devicetable.h>
  24#include <linux/platform_device.h>
  25#include <linux/regulator/consumer.h>
  26#include <linux/semaphore.h>
  27#include <linux/platform_data/dma-ste-dma40.h>
  28
  29#include <crypto/aes.h>
  30#include <crypto/algapi.h>
  31#include <crypto/ctr.h>
  32#include <crypto/des.h>
  33#include <crypto/scatterwalk.h>
  34
  35#include <linux/platform_data/crypto-ux500.h>
  36
  37#include "cryp_p.h"
  38#include "cryp.h"
  39
  40#define CRYP_MAX_KEY_SIZE       32
  41#define BYTES_PER_WORD          4
  42
  43static int cryp_mode;
  44static atomic_t session_id;
  45
  46static struct stedma40_chan_cfg *mem_to_engine;
  47static struct stedma40_chan_cfg *engine_to_mem;
  48
  49/**
  50 * struct cryp_driver_data - data specific to the driver.
  51 *
  52 * @device_list: A list of registered devices to choose from.
  53 * @device_allocation: A semaphore initialized with number of devices.
  54 */
  55struct cryp_driver_data {
  56        struct klist device_list;
  57        struct semaphore device_allocation;
  58};
  59
  60/**
  61 * struct cryp_ctx - Crypto context
  62 * @config: Crypto mode.
  63 * @key[CRYP_MAX_KEY_SIZE]: Key.
  64 * @keylen: Length of key.
  65 * @iv: Pointer to initialization vector.
  66 * @indata: Pointer to indata.
  67 * @outdata: Pointer to outdata.
  68 * @datalen: Length of indata.
  69 * @outlen: Length of outdata.
  70 * @blocksize: Size of blocks.
  71 * @updated: Updated flag.
  72 * @dev_ctx: Device dependent context.
  73 * @device: Pointer to the device.
  74 */
  75struct cryp_ctx {
  76        struct cryp_config config;
  77        u8 key[CRYP_MAX_KEY_SIZE];
  78        u32 keylen;
  79        u8 *iv;
  80        const u8 *indata;
  81        u8 *outdata;
  82        u32 datalen;
  83        u32 outlen;
  84        u32 blocksize;
  85        u8 updated;
  86        struct cryp_device_context dev_ctx;
  87        struct cryp_device_data *device;
  88        u32 session_id;
  89};
  90
  91static struct cryp_driver_data driver_data;
  92
  93/**
  94 * uint8p_to_uint32_be - 4*uint8 to uint32 big endian
  95 * @in: Data to convert.
  96 */
  97static inline u32 uint8p_to_uint32_be(u8 *in)
  98{
  99        u32 *data = (u32 *)in;
 100
 101        return cpu_to_be32p(data);
 102}
 103
 104/**
 105 * swap_bits_in_byte - mirror the bits in a byte
 106 * @b: the byte to be mirrored
 107 *
 108 * The bits are swapped the following way:
 109 *  Byte b include bits 0-7, nibble 1 (n1) include bits 0-3 and
 110 *  nibble 2 (n2) bits 4-7.
 111 *
 112 *  Nibble 1 (n1):
 113 *  (The "old" (moved) bit is replaced with a zero)
 114 *  1. Move bit 6 and 7, 4 positions to the left.
 115 *  2. Move bit 3 and 5, 2 positions to the left.
 116 *  3. Move bit 1-4, 1 position to the left.
 117 *
 118 *  Nibble 2 (n2):
 119 *  1. Move bit 0 and 1, 4 positions to the right.
 120 *  2. Move bit 2 and 4, 2 positions to the right.
 121 *  3. Move bit 3-6, 1 position to the right.
 122 *
 123 *  Combine the two nibbles to a complete and swapped byte.
 124 */
 125
 126static inline u8 swap_bits_in_byte(u8 b)
 127{
 128#define R_SHIFT_4_MASK  0xc0 /* Bits 6 and 7, right shift 4 */
 129#define R_SHIFT_2_MASK  0x28 /* (After right shift 4) Bits 3 and 5,
 130                                  right shift 2 */
 131#define R_SHIFT_1_MASK  0x1e /* (After right shift 2) Bits 1-4,
 132                                  right shift 1 */
 133#define L_SHIFT_4_MASK  0x03 /* Bits 0 and 1, left shift 4 */
 134#define L_SHIFT_2_MASK  0x14 /* (After left shift 4) Bits 2 and 4,
 135                                  left shift 2 */
 136#define L_SHIFT_1_MASK  0x78 /* (After left shift 1) Bits 3-6,
 137                                  left shift 1 */
 138
 139        u8 n1;
 140        u8 n2;
 141
 142        /* Swap most significant nibble */
 143        /* Right shift 4, bits 6 and 7 */
 144        n1 = ((b  & R_SHIFT_4_MASK) >> 4) | (b  & ~(R_SHIFT_4_MASK >> 4));
 145        /* Right shift 2, bits 3 and 5 */
 146        n1 = ((n1 & R_SHIFT_2_MASK) >> 2) | (n1 & ~(R_SHIFT_2_MASK >> 2));
 147        /* Right shift 1, bits 1-4 */
 148        n1 = (n1  & R_SHIFT_1_MASK) >> 1;
 149
 150        /* Swap least significant nibble */
 151        /* Left shift 4, bits 0 and 1 */
 152        n2 = ((b  & L_SHIFT_4_MASK) << 4) | (b  & ~(L_SHIFT_4_MASK << 4));
 153        /* Left shift 2, bits 2 and 4 */
 154        n2 = ((n2 & L_SHIFT_2_MASK) << 2) | (n2 & ~(L_SHIFT_2_MASK << 2));
 155        /* Left shift 1, bits 3-6 */
 156        n2 = (n2  & L_SHIFT_1_MASK) << 1;
 157
 158        return n1 | n2;
 159}
 160
 161static inline void swap_words_in_key_and_bits_in_byte(const u8 *in,
 162                                                      u8 *out, u32 len)
 163{
 164        unsigned int i = 0;
 165        int j;
 166        int index = 0;
 167
 168        j = len - BYTES_PER_WORD;
 169        while (j >= 0) {
 170                for (i = 0; i < BYTES_PER_WORD; i++) {
 171                        index = len - j - BYTES_PER_WORD + i;
 172                        out[j + i] =
 173                                swap_bits_in_byte(in[index]);
 174                }
 175                j -= BYTES_PER_WORD;
 176        }
 177}
 178
 179static void add_session_id(struct cryp_ctx *ctx)
 180{
 181        /*
 182         * We never want 0 to be a valid value, since this is the default value
 183         * for the software context.
 184         */
 185        if (unlikely(atomic_inc_and_test(&session_id)))
 186                atomic_inc(&session_id);
 187
 188        ctx->session_id = atomic_read(&session_id);
 189}
 190
 191static irqreturn_t cryp_interrupt_handler(int irq, void *param)
 192{
 193        struct cryp_ctx *ctx;
 194        int count;
 195        struct cryp_device_data *device_data;
 196
 197        if (param == NULL) {
 198                BUG_ON(!param);
 199                return IRQ_HANDLED;
 200        }
 201
 202        /* The device is coming from the one found in hw_crypt_noxts. */
 203        device_data = (struct cryp_device_data *)param;
 204
 205        ctx = device_data->current_ctx;
 206
 207        if (ctx == NULL) {
 208                BUG_ON(!ctx);
 209                return IRQ_HANDLED;
 210        }
 211
 212        dev_dbg(ctx->device->dev, "[%s] (len: %d) %s, ", __func__, ctx->outlen,
 213                cryp_pending_irq_src(device_data, CRYP_IRQ_SRC_OUTPUT_FIFO) ?
 214                "out" : "in");
 215
 216        if (cryp_pending_irq_src(device_data,
 217                                 CRYP_IRQ_SRC_OUTPUT_FIFO)) {
 218                if (ctx->outlen / ctx->blocksize > 0) {
 219                        count = ctx->blocksize / 4;
 220
 221                        readsl(&device_data->base->dout, ctx->outdata, count);
 222                        ctx->outdata += count;
 223                        ctx->outlen -= count;
 224
 225                        if (ctx->outlen == 0) {
 226                                cryp_disable_irq_src(device_data,
 227                                                     CRYP_IRQ_SRC_OUTPUT_FIFO);
 228                        }
 229                }
 230        } else if (cryp_pending_irq_src(device_data,
 231                                        CRYP_IRQ_SRC_INPUT_FIFO)) {
 232                if (ctx->datalen / ctx->blocksize > 0) {
 233                        count = ctx->blocksize / 4;
 234
 235                        writesl(&device_data->base->din, ctx->indata, count);
 236
 237                        ctx->indata += count;
 238                        ctx->datalen -= count;
 239
 240                        if (ctx->datalen == 0)
 241                                cryp_disable_irq_src(device_data,
 242                                                   CRYP_IRQ_SRC_INPUT_FIFO);
 243
 244                        if (ctx->config.algomode == CRYP_ALGO_AES_XTS) {
 245                                CRYP_PUT_BITS(&device_data->base->cr,
 246                                              CRYP_START_ENABLE,
 247                                              CRYP_CR_START_POS,
 248                                              CRYP_CR_START_MASK);
 249
 250                                cryp_wait_until_done(device_data);
 251                        }
 252                }
 253        }
 254
 255        return IRQ_HANDLED;
 256}
 257
 258static int mode_is_aes(enum cryp_algo_mode mode)
 259{
 260        return  CRYP_ALGO_AES_ECB == mode ||
 261                CRYP_ALGO_AES_CBC == mode ||
 262                CRYP_ALGO_AES_CTR == mode ||
 263                CRYP_ALGO_AES_XTS == mode;
 264}
 265
 266static int cfg_iv(struct cryp_device_data *device_data, u32 left, u32 right,
 267                  enum cryp_init_vector_index index)
 268{
 269        struct cryp_init_vector_value vector_value;
 270
 271        dev_dbg(device_data->dev, "[%s]", __func__);
 272
 273        vector_value.init_value_left = left;
 274        vector_value.init_value_right = right;
 275
 276        return cryp_configure_init_vector(device_data,
 277                                          index,
 278                                          vector_value);
 279}
 280
 281static int cfg_ivs(struct cryp_device_data *device_data, struct cryp_ctx *ctx)
 282{
 283        int i;
 284        int status = 0;
 285        int num_of_regs = ctx->blocksize / 8;
 286        u32 iv[AES_BLOCK_SIZE / 4];
 287
 288        dev_dbg(device_data->dev, "[%s]", __func__);
 289
 290        /*
 291         * Since we loop on num_of_regs we need to have a check in case
 292         * someone provides an incorrect blocksize which would force calling
 293         * cfg_iv with i greater than 2 which is an error.
 294         */
 295        if (num_of_regs > 2) {
 296                dev_err(device_data->dev, "[%s] Incorrect blocksize %d",
 297                        __func__, ctx->blocksize);
 298                return -EINVAL;
 299        }
 300
 301        for (i = 0; i < ctx->blocksize / 4; i++)
 302                iv[i] = uint8p_to_uint32_be(ctx->iv + i*4);
 303
 304        for (i = 0; i < num_of_regs; i++) {
 305                status = cfg_iv(device_data, iv[i*2], iv[i*2+1],
 306                                (enum cryp_init_vector_index) i);
 307                if (status != 0)
 308                        return status;
 309        }
 310        return status;
 311}
 312
 313static int set_key(struct cryp_device_data *device_data,
 314                   u32 left_key,
 315                   u32 right_key,
 316                   enum cryp_key_reg_index index)
 317{
 318        struct cryp_key_value key_value;
 319        int cryp_error;
 320
 321        dev_dbg(device_data->dev, "[%s]", __func__);
 322
 323        key_value.key_value_left = left_key;
 324        key_value.key_value_right = right_key;
 325
 326        cryp_error = cryp_configure_key_values(device_data,
 327                                               index,
 328                                               key_value);
 329        if (cryp_error != 0)
 330                dev_err(device_data->dev, "[%s]: "
 331                        "cryp_configure_key_values() failed!", __func__);
 332
 333        return cryp_error;
 334}
 335
 336static int cfg_keys(struct cryp_ctx *ctx)
 337{
 338        int i;
 339        int num_of_regs = ctx->keylen / 8;
 340        u32 swapped_key[CRYP_MAX_KEY_SIZE / 4];
 341        int cryp_error = 0;
 342
 343        dev_dbg(ctx->device->dev, "[%s]", __func__);
 344
 345        if (mode_is_aes(ctx->config.algomode)) {
 346                swap_words_in_key_and_bits_in_byte((u8 *)ctx->key,
 347                                                   (u8 *)swapped_key,
 348                                                   ctx->keylen);
 349        } else {
 350                for (i = 0; i < ctx->keylen / 4; i++)
 351                        swapped_key[i] = uint8p_to_uint32_be(ctx->key + i*4);
 352        }
 353
 354        for (i = 0; i < num_of_regs; i++) {
 355                cryp_error = set_key(ctx->device,
 356                                     *(((u32 *)swapped_key)+i*2),
 357                                     *(((u32 *)swapped_key)+i*2+1),
 358                                     (enum cryp_key_reg_index) i);
 359
 360                if (cryp_error != 0) {
 361                        dev_err(ctx->device->dev, "[%s]: set_key() failed!",
 362                                        __func__);
 363                        return cryp_error;
 364                }
 365        }
 366        return cryp_error;
 367}
 368
 369static int cryp_setup_context(struct cryp_ctx *ctx,
 370                              struct cryp_device_data *device_data)
 371{
 372        u32 control_register = CRYP_CR_DEFAULT;
 373
 374        switch (cryp_mode) {
 375        case CRYP_MODE_INTERRUPT:
 376                writel_relaxed(CRYP_IMSC_DEFAULT, &device_data->base->imsc);
 377                break;
 378
 379        case CRYP_MODE_DMA:
 380                writel_relaxed(CRYP_DMACR_DEFAULT, &device_data->base->dmacr);
 381                break;
 382
 383        default:
 384                break;
 385        }
 386
 387        if (ctx->updated == 0) {
 388                cryp_flush_inoutfifo(device_data);
 389                if (cfg_keys(ctx) != 0) {
 390                        dev_err(ctx->device->dev, "[%s]: cfg_keys failed!",
 391                                __func__);
 392                        return -EINVAL;
 393                }
 394
 395                if (ctx->iv &&
 396                    CRYP_ALGO_AES_ECB != ctx->config.algomode &&
 397                    CRYP_ALGO_DES_ECB != ctx->config.algomode &&
 398                    CRYP_ALGO_TDES_ECB != ctx->config.algomode) {
 399                        if (cfg_ivs(device_data, ctx) != 0)
 400                                return -EPERM;
 401                }
 402
 403                cryp_set_configuration(device_data, &ctx->config,
 404                                       &control_register);
 405                add_session_id(ctx);
 406        } else if (ctx->updated == 1 &&
 407                   ctx->session_id != atomic_read(&session_id)) {
 408                cryp_flush_inoutfifo(device_data);
 409                cryp_restore_device_context(device_data, &ctx->dev_ctx);
 410
 411                add_session_id(ctx);
 412                control_register = ctx->dev_ctx.cr;
 413        } else
 414                control_register = ctx->dev_ctx.cr;
 415
 416        writel(control_register |
 417               (CRYP_CRYPEN_ENABLE << CRYP_CR_CRYPEN_POS),
 418               &device_data->base->cr);
 419
 420        return 0;
 421}
 422
 423static int cryp_get_device_data(struct cryp_ctx *ctx,
 424                                struct cryp_device_data **device_data)
 425{
 426        int ret;
 427        struct klist_iter device_iterator;
 428        struct klist_node *device_node;
 429        struct cryp_device_data *local_device_data = NULL;
 430        pr_debug(DEV_DBG_NAME " [%s]", __func__);
 431
 432        /* Wait until a device is available */
 433        ret = down_interruptible(&driver_data.device_allocation);
 434        if (ret)
 435                return ret;  /* Interrupted */
 436
 437        /* Select a device */
 438        klist_iter_init(&driver_data.device_list, &device_iterator);
 439
 440        device_node = klist_next(&device_iterator);
 441        while (device_node) {
 442                local_device_data = container_of(device_node,
 443                                           struct cryp_device_data, list_node);
 444                spin_lock(&local_device_data->ctx_lock);
 445                /* current_ctx allocates a device, NULL = unallocated */
 446                if (local_device_data->current_ctx) {
 447                        device_node = klist_next(&device_iterator);
 448                } else {
 449                        local_device_data->current_ctx = ctx;
 450                        ctx->device = local_device_data;
 451                        spin_unlock(&local_device_data->ctx_lock);
 452                        break;
 453                }
 454                spin_unlock(&local_device_data->ctx_lock);
 455        }
 456        klist_iter_exit(&device_iterator);
 457
 458        if (!device_node) {
 459                /**
 460                 * No free device found.
 461                 * Since we allocated a device with down_interruptible, this
 462                 * should not be able to happen.
 463                 * Number of available devices, which are contained in
 464                 * device_allocation, is therefore decremented by not doing
 465                 * an up(device_allocation).
 466                 */
 467                return -EBUSY;
 468        }
 469
 470        *device_data = local_device_data;
 471
 472        return 0;
 473}
 474
 475static void cryp_dma_setup_channel(struct cryp_device_data *device_data,
 476                                   struct device *dev)
 477{
 478        struct dma_slave_config mem2cryp = {
 479                .direction = DMA_MEM_TO_DEV,
 480                .dst_addr = device_data->phybase + CRYP_DMA_TX_FIFO,
 481                .dst_addr_width = DMA_SLAVE_BUSWIDTH_2_BYTES,
 482                .dst_maxburst = 4,
 483        };
 484        struct dma_slave_config cryp2mem = {
 485                .direction = DMA_DEV_TO_MEM,
 486                .src_addr = device_data->phybase + CRYP_DMA_RX_FIFO,
 487                .src_addr_width = DMA_SLAVE_BUSWIDTH_2_BYTES,
 488                .src_maxburst = 4,
 489        };
 490
 491        dma_cap_zero(device_data->dma.mask);
 492        dma_cap_set(DMA_SLAVE, device_data->dma.mask);
 493
 494        device_data->dma.cfg_mem2cryp = mem_to_engine;
 495        device_data->dma.chan_mem2cryp =
 496                dma_request_channel(device_data->dma.mask,
 497                                    stedma40_filter,
 498                                    device_data->dma.cfg_mem2cryp);
 499
 500        device_data->dma.cfg_cryp2mem = engine_to_mem;
 501        device_data->dma.chan_cryp2mem =
 502                dma_request_channel(device_data->dma.mask,
 503                                    stedma40_filter,
 504                                    device_data->dma.cfg_cryp2mem);
 505
 506        dmaengine_slave_config(device_data->dma.chan_mem2cryp, &mem2cryp);
 507        dmaengine_slave_config(device_data->dma.chan_cryp2mem, &cryp2mem);
 508
 509        init_completion(&device_data->dma.cryp_dma_complete);
 510}
 511
 512static void cryp_dma_out_callback(void *data)
 513{
 514        struct cryp_ctx *ctx = (struct cryp_ctx *) data;
 515        dev_dbg(ctx->device->dev, "[%s]: ", __func__);
 516
 517        complete(&ctx->device->dma.cryp_dma_complete);
 518}
 519
 520static int cryp_set_dma_transfer(struct cryp_ctx *ctx,
 521                                 struct scatterlist *sg,
 522                                 int len,
 523                                 enum dma_data_direction direction)
 524{
 525        struct dma_async_tx_descriptor *desc;
 526        struct dma_chan *channel = NULL;
 527        dma_cookie_t cookie;
 528
 529        dev_dbg(ctx->device->dev, "[%s]: ", __func__);
 530
 531        if (unlikely(!IS_ALIGNED((u32)sg, 4))) {
 532                dev_err(ctx->device->dev, "[%s]: Data in sg list isn't "
 533                        "aligned! Addr: 0x%08x", __func__, (u32)sg);
 534                return -EFAULT;
 535        }
 536
 537        switch (direction) {
 538        case DMA_TO_DEVICE:
 539                channel = ctx->device->dma.chan_mem2cryp;
 540                ctx->device->dma.sg_src = sg;
 541                ctx->device->dma.sg_src_len = dma_map_sg(channel->device->dev,
 542                                                 ctx->device->dma.sg_src,
 543                                                 ctx->device->dma.nents_src,
 544                                                 direction);
 545
 546                if (!ctx->device->dma.sg_src_len) {
 547                        dev_dbg(ctx->device->dev,
 548                                "[%s]: Could not map the sg list (TO_DEVICE)",
 549                                __func__);
 550                        return -EFAULT;
 551                }
 552
 553                dev_dbg(ctx->device->dev, "[%s]: Setting up DMA for buffer "
 554                        "(TO_DEVICE)", __func__);
 555
 556                desc = dmaengine_prep_slave_sg(channel,
 557                                ctx->device->dma.sg_src,
 558                                ctx->device->dma.sg_src_len,
 559                                direction, DMA_CTRL_ACK);
 560                break;
 561
 562        case DMA_FROM_DEVICE:
 563                channel = ctx->device->dma.chan_cryp2mem;
 564                ctx->device->dma.sg_dst = sg;
 565                ctx->device->dma.sg_dst_len = dma_map_sg(channel->device->dev,
 566                                                 ctx->device->dma.sg_dst,
 567                                                 ctx->device->dma.nents_dst,
 568                                                 direction);
 569
 570                if (!ctx->device->dma.sg_dst_len) {
 571                        dev_dbg(ctx->device->dev,
 572                                "[%s]: Could not map the sg list (FROM_DEVICE)",
 573                                __func__);
 574                        return -EFAULT;
 575                }
 576
 577                dev_dbg(ctx->device->dev, "[%s]: Setting up DMA for buffer "
 578                        "(FROM_DEVICE)", __func__);
 579
 580                desc = dmaengine_prep_slave_sg(channel,
 581                                ctx->device->dma.sg_dst,
 582                                ctx->device->dma.sg_dst_len,
 583                                direction,
 584                                DMA_CTRL_ACK |
 585                                DMA_PREP_INTERRUPT);
 586
 587                desc->callback = cryp_dma_out_callback;
 588                desc->callback_param = ctx;
 589                break;
 590
 591        default:
 592                dev_dbg(ctx->device->dev, "[%s]: Invalid DMA direction",
 593                        __func__);
 594                return -EFAULT;
 595        }
 596
 597        cookie = dmaengine_submit(desc);
 598        dma_async_issue_pending(channel);
 599
 600        return 0;
 601}
 602
 603static void cryp_dma_done(struct cryp_ctx *ctx)
 604{
 605        struct dma_chan *chan;
 606
 607        dev_dbg(ctx->device->dev, "[%s]: ", __func__);
 608
 609        chan = ctx->device->dma.chan_mem2cryp;
 610        dmaengine_terminate_all(chan);
 611        dma_unmap_sg(chan->device->dev, ctx->device->dma.sg_src,
 612                     ctx->device->dma.sg_src_len, DMA_TO_DEVICE);
 613
 614        chan = ctx->device->dma.chan_cryp2mem;
 615        dmaengine_terminate_all(chan);
 616        dma_unmap_sg(chan->device->dev, ctx->device->dma.sg_dst,
 617                     ctx->device->dma.sg_dst_len, DMA_FROM_DEVICE);
 618}
 619
 620static int cryp_dma_write(struct cryp_ctx *ctx, struct scatterlist *sg,
 621                          int len)
 622{
 623        int error = cryp_set_dma_transfer(ctx, sg, len, DMA_TO_DEVICE);
 624        dev_dbg(ctx->device->dev, "[%s]: ", __func__);
 625
 626        if (error) {
 627                dev_dbg(ctx->device->dev, "[%s]: cryp_set_dma_transfer() "
 628                        "failed", __func__);
 629                return error;
 630        }
 631
 632        return len;
 633}
 634
 635static int cryp_dma_read(struct cryp_ctx *ctx, struct scatterlist *sg, int len)
 636{
 637        int error = cryp_set_dma_transfer(ctx, sg, len, DMA_FROM_DEVICE);
 638        if (error) {
 639                dev_dbg(ctx->device->dev, "[%s]: cryp_set_dma_transfer() "
 640                        "failed", __func__);
 641                return error;
 642        }
 643
 644        return len;
 645}
 646
 647static void cryp_polling_mode(struct cryp_ctx *ctx,
 648                              struct cryp_device_data *device_data)
 649{
 650        int len = ctx->blocksize / BYTES_PER_WORD;
 651        int remaining_length = ctx->datalen;
 652        u32 *indata = (u32 *)ctx->indata;
 653        u32 *outdata = (u32 *)ctx->outdata;
 654
 655        while (remaining_length > 0) {
 656                writesl(&device_data->base->din, indata, len);
 657                indata += len;
 658                remaining_length -= (len * BYTES_PER_WORD);
 659                cryp_wait_until_done(device_data);
 660
 661                readsl(&device_data->base->dout, outdata, len);
 662                outdata += len;
 663                cryp_wait_until_done(device_data);
 664        }
 665}
 666
 667static int cryp_disable_power(struct device *dev,
 668                              struct cryp_device_data *device_data,
 669                              bool save_device_context)
 670{
 671        int ret = 0;
 672
 673        dev_dbg(dev, "[%s]", __func__);
 674
 675        spin_lock(&device_data->power_state_spinlock);
 676        if (!device_data->power_state)
 677                goto out;
 678
 679        spin_lock(&device_data->ctx_lock);
 680        if (save_device_context && device_data->current_ctx) {
 681                cryp_save_device_context(device_data,
 682                                &device_data->current_ctx->dev_ctx,
 683                                cryp_mode);
 684                device_data->restore_dev_ctx = true;
 685        }
 686        spin_unlock(&device_data->ctx_lock);
 687
 688        clk_disable(device_data->clk);
 689        ret = regulator_disable(device_data->pwr_regulator);
 690        if (ret)
 691                dev_err(dev, "[%s]: "
 692                                "regulator_disable() failed!",
 693                                __func__);
 694
 695        device_data->power_state = false;
 696
 697out:
 698        spin_unlock(&device_data->power_state_spinlock);
 699
 700        return ret;
 701}
 702
 703static int cryp_enable_power(
 704                struct device *dev,
 705                struct cryp_device_data *device_data,
 706                bool restore_device_context)
 707{
 708        int ret = 0;
 709
 710        dev_dbg(dev, "[%s]", __func__);
 711
 712        spin_lock(&device_data->power_state_spinlock);
 713        if (!device_data->power_state) {
 714                ret = regulator_enable(device_data->pwr_regulator);
 715                if (ret) {
 716                        dev_err(dev, "[%s]: regulator_enable() failed!",
 717                                        __func__);
 718                        goto out;
 719                }
 720
 721                ret = clk_enable(device_data->clk);
 722                if (ret) {
 723                        dev_err(dev, "[%s]: clk_enable() failed!",
 724                                        __func__);
 725                        regulator_disable(device_data->pwr_regulator);
 726                        goto out;
 727                }
 728                device_data->power_state = true;
 729        }
 730
 731        if (device_data->restore_dev_ctx) {
 732                spin_lock(&device_data->ctx_lock);
 733                if (restore_device_context && device_data->current_ctx) {
 734                        device_data->restore_dev_ctx = false;
 735                        cryp_restore_device_context(device_data,
 736                                        &device_data->current_ctx->dev_ctx);
 737                }
 738                spin_unlock(&device_data->ctx_lock);
 739        }
 740out:
 741        spin_unlock(&device_data->power_state_spinlock);
 742
 743        return ret;
 744}
 745
 746static int hw_crypt_noxts(struct cryp_ctx *ctx,
 747                          struct cryp_device_data *device_data)
 748{
 749        int ret = 0;
 750
 751        const u8 *indata = ctx->indata;
 752        u8 *outdata = ctx->outdata;
 753        u32 datalen = ctx->datalen;
 754        u32 outlen = datalen;
 755
 756        pr_debug(DEV_DBG_NAME " [%s]", __func__);
 757
 758        ctx->outlen = ctx->datalen;
 759
 760        if (unlikely(!IS_ALIGNED((u32)indata, 4))) {
 761                pr_debug(DEV_DBG_NAME " [%s]: Data isn't aligned! Addr: "
 762                         "0x%08x", __func__, (u32)indata);
 763                return -EINVAL;
 764        }
 765
 766        ret = cryp_setup_context(ctx, device_data);
 767
 768        if (ret)
 769                goto out;
 770
 771        if (cryp_mode == CRYP_MODE_INTERRUPT) {
 772                cryp_enable_irq_src(device_data, CRYP_IRQ_SRC_INPUT_FIFO |
 773                                    CRYP_IRQ_SRC_OUTPUT_FIFO);
 774
 775                /*
 776                 * ctx->outlen is decremented in the cryp_interrupt_handler
 777                 * function. We had to add cpu_relax() (barrier) to make sure
 778                 * that gcc didn't optimze away this variable.
 779                 */
 780                while (ctx->outlen > 0)
 781                        cpu_relax();
 782        } else if (cryp_mode == CRYP_MODE_POLLING ||
 783                   cryp_mode == CRYP_MODE_DMA) {
 784                /*
 785                 * The reason for having DMA in this if case is that if we are
 786                 * running cryp_mode = 2, then we separate DMA routines for
 787                 * handling cipher/plaintext > blocksize, except when
 788                 * running the normal CRYPTO_ALG_TYPE_CIPHER, then we still use
 789                 * the polling mode. Overhead of doing DMA setup eats up the
 790                 * benefits using it.
 791                 */
 792                cryp_polling_mode(ctx, device_data);
 793        } else {
 794                dev_err(ctx->device->dev, "[%s]: Invalid operation mode!",
 795                        __func__);
 796                ret = -EPERM;
 797                goto out;
 798        }
 799
 800        cryp_save_device_context(device_data, &ctx->dev_ctx, cryp_mode);
 801        ctx->updated = 1;
 802
 803out:
 804        ctx->indata = indata;
 805        ctx->outdata = outdata;
 806        ctx->datalen = datalen;
 807        ctx->outlen = outlen;
 808
 809        return ret;
 810}
 811
 812static int get_nents(struct scatterlist *sg, int nbytes)
 813{
 814        int nents = 0;
 815
 816        while (nbytes > 0) {
 817                nbytes -= sg->length;
 818                sg = sg_next(sg);
 819                nents++;
 820        }
 821
 822        return nents;
 823}
 824
 825static int ablk_dma_crypt(struct ablkcipher_request *areq)
 826{
 827        struct crypto_ablkcipher *cipher = crypto_ablkcipher_reqtfm(areq);
 828        struct cryp_ctx *ctx = crypto_ablkcipher_ctx(cipher);
 829        struct cryp_device_data *device_data;
 830
 831        int bytes_written = 0;
 832        int bytes_read = 0;
 833        int ret;
 834
 835        pr_debug(DEV_DBG_NAME " [%s]", __func__);
 836
 837        ctx->datalen = areq->nbytes;
 838        ctx->outlen = areq->nbytes;
 839
 840        ret = cryp_get_device_data(ctx, &device_data);
 841        if (ret)
 842                return ret;
 843
 844        ret = cryp_setup_context(ctx, device_data);
 845        if (ret)
 846                goto out;
 847
 848        /* We have the device now, so store the nents in the dma struct. */
 849        ctx->device->dma.nents_src = get_nents(areq->src, ctx->datalen);
 850        ctx->device->dma.nents_dst = get_nents(areq->dst, ctx->outlen);
 851
 852        /* Enable DMA in- and output. */
 853        cryp_configure_for_dma(device_data, CRYP_DMA_ENABLE_BOTH_DIRECTIONS);
 854
 855        bytes_written = cryp_dma_write(ctx, areq->src, ctx->datalen);
 856        bytes_read = cryp_dma_read(ctx, areq->dst, bytes_written);
 857
 858        wait_for_completion(&ctx->device->dma.cryp_dma_complete);
 859        cryp_dma_done(ctx);
 860
 861        cryp_save_device_context(device_data, &ctx->dev_ctx, cryp_mode);
 862        ctx->updated = 1;
 863
 864out:
 865        spin_lock(&device_data->ctx_lock);
 866        device_data->current_ctx = NULL;
 867        ctx->device = NULL;
 868        spin_unlock(&device_data->ctx_lock);
 869
 870        /*
 871         * The down_interruptible part for this semaphore is called in
 872         * cryp_get_device_data.
 873         */
 874        up(&driver_data.device_allocation);
 875
 876        if (unlikely(bytes_written != bytes_read))
 877                return -EPERM;
 878
 879        return 0;
 880}
 881
 882static int ablk_crypt(struct ablkcipher_request *areq)
 883{
 884        struct ablkcipher_walk walk;
 885        struct crypto_ablkcipher *cipher = crypto_ablkcipher_reqtfm(areq);
 886        struct cryp_ctx *ctx = crypto_ablkcipher_ctx(cipher);
 887        struct cryp_device_data *device_data;
 888        unsigned long src_paddr;
 889        unsigned long dst_paddr;
 890        int ret;
 891        int nbytes;
 892
 893        pr_debug(DEV_DBG_NAME " [%s]", __func__);
 894
 895        ret = cryp_get_device_data(ctx, &device_data);
 896        if (ret)
 897                goto out;
 898
 899        ablkcipher_walk_init(&walk, areq->dst, areq->src, areq->nbytes);
 900        ret = ablkcipher_walk_phys(areq, &walk);
 901
 902        if (ret) {
 903                pr_err(DEV_DBG_NAME "[%s]: ablkcipher_walk_phys() failed!",
 904                        __func__);
 905                goto out;
 906        }
 907
 908        while ((nbytes = walk.nbytes) > 0) {
 909                ctx->iv = walk.iv;
 910                src_paddr = (page_to_phys(walk.src.page) + walk.src.offset);
 911                ctx->indata = phys_to_virt(src_paddr);
 912
 913                dst_paddr = (page_to_phys(walk.dst.page) + walk.dst.offset);
 914                ctx->outdata = phys_to_virt(dst_paddr);
 915
 916                ctx->datalen = nbytes - (nbytes % ctx->blocksize);
 917
 918                ret = hw_crypt_noxts(ctx, device_data);
 919                if (ret)
 920                        goto out;
 921
 922                nbytes -= ctx->datalen;
 923                ret = ablkcipher_walk_done(areq, &walk, nbytes);
 924                if (ret)
 925                        goto out;
 926        }
 927        ablkcipher_walk_complete(&walk);
 928
 929out:
 930        /* Release the device */
 931        spin_lock(&device_data->ctx_lock);
 932        device_data->current_ctx = NULL;
 933        ctx->device = NULL;
 934        spin_unlock(&device_data->ctx_lock);
 935
 936        /*
 937         * The down_interruptible part for this semaphore is called in
 938         * cryp_get_device_data.
 939         */
 940        up(&driver_data.device_allocation);
 941
 942        return ret;
 943}
 944
 945static int aes_ablkcipher_setkey(struct crypto_ablkcipher *cipher,
 946                                 const u8 *key, unsigned int keylen)
 947{
 948        struct cryp_ctx *ctx = crypto_ablkcipher_ctx(cipher);
 949        u32 *flags = &cipher->base.crt_flags;
 950
 951        pr_debug(DEV_DBG_NAME " [%s]", __func__);
 952
 953        switch (keylen) {
 954        case AES_KEYSIZE_128:
 955                ctx->config.keysize = CRYP_KEY_SIZE_128;
 956                break;
 957
 958        case AES_KEYSIZE_192:
 959                ctx->config.keysize = CRYP_KEY_SIZE_192;
 960                break;
 961
 962        case AES_KEYSIZE_256:
 963                ctx->config.keysize = CRYP_KEY_SIZE_256;
 964                break;
 965
 966        default:
 967                pr_err(DEV_DBG_NAME "[%s]: Unknown keylen!", __func__);
 968                *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
 969                return -EINVAL;
 970        }
 971
 972        memcpy(ctx->key, key, keylen);
 973        ctx->keylen = keylen;
 974
 975        ctx->updated = 0;
 976
 977        return 0;
 978}
 979
 980static int des_ablkcipher_setkey(struct crypto_ablkcipher *cipher,
 981                                 const u8 *key, unsigned int keylen)
 982{
 983        struct cryp_ctx *ctx = crypto_ablkcipher_ctx(cipher);
 984        u32 *flags = &cipher->base.crt_flags;
 985        u32 tmp[DES_EXPKEY_WORDS];
 986        int ret;
 987
 988        pr_debug(DEV_DBG_NAME " [%s]", __func__);
 989        if (keylen != DES_KEY_SIZE) {
 990                *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
 991                pr_debug(DEV_DBG_NAME " [%s]: CRYPTO_TFM_RES_BAD_KEY_LEN",
 992                                __func__);
 993                return -EINVAL;
 994        }
 995
 996        ret = des_ekey(tmp, key);
 997        if (unlikely(ret == 0) && (*flags & CRYPTO_TFM_REQ_WEAK_KEY)) {
 998                *flags |= CRYPTO_TFM_RES_WEAK_KEY;
 999                pr_debug(DEV_DBG_NAME " [%s]: CRYPTO_TFM_REQ_WEAK_KEY",
1000                                __func__);
1001                return -EINVAL;
1002        }
1003
1004        memcpy(ctx->key, key, keylen);
1005        ctx->keylen = keylen;
1006
1007        ctx->updated = 0;
1008        return 0;
1009}
1010
1011static int des3_ablkcipher_setkey(struct crypto_ablkcipher *cipher,
1012                                  const u8 *key, unsigned int keylen)
1013{
1014        struct cryp_ctx *ctx = crypto_ablkcipher_ctx(cipher);
1015        u32 *flags = &cipher->base.crt_flags;
1016        const u32 *K = (const u32 *)key;
1017        u32 tmp[DES3_EDE_EXPKEY_WORDS];
1018        int i, ret;
1019
1020        pr_debug(DEV_DBG_NAME " [%s]", __func__);
1021        if (keylen != DES3_EDE_KEY_SIZE) {
1022                *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
1023                pr_debug(DEV_DBG_NAME " [%s]: CRYPTO_TFM_RES_BAD_KEY_LEN",
1024                                __func__);
1025                return -EINVAL;
1026        }
1027
1028        /* Checking key interdependency for weak key detection. */
1029        if (unlikely(!((K[0] ^ K[2]) | (K[1] ^ K[3])) ||
1030                                !((K[2] ^ K[4]) | (K[3] ^ K[5]))) &&
1031                        (*flags & CRYPTO_TFM_REQ_WEAK_KEY)) {
1032                *flags |= CRYPTO_TFM_RES_WEAK_KEY;
1033                pr_debug(DEV_DBG_NAME " [%s]: CRYPTO_TFM_REQ_WEAK_KEY",
1034                                __func__);
1035                return -EINVAL;
1036        }
1037        for (i = 0; i < 3; i++) {
1038                ret = des_ekey(tmp, key + i*DES_KEY_SIZE);
1039                if (unlikely(ret == 0) && (*flags & CRYPTO_TFM_REQ_WEAK_KEY)) {
1040                        *flags |= CRYPTO_TFM_RES_WEAK_KEY;
1041                        pr_debug(DEV_DBG_NAME " [%s]: "
1042                                        "CRYPTO_TFM_REQ_WEAK_KEY", __func__);
1043                        return -EINVAL;
1044                }
1045        }
1046
1047        memcpy(ctx->key, key, keylen);
1048        ctx->keylen = keylen;
1049
1050        ctx->updated = 0;
1051        return 0;
1052}
1053
1054static int cryp_blk_encrypt(struct ablkcipher_request *areq)
1055{
1056        struct crypto_ablkcipher *cipher = crypto_ablkcipher_reqtfm(areq);
1057        struct cryp_ctx *ctx = crypto_ablkcipher_ctx(cipher);
1058
1059        pr_debug(DEV_DBG_NAME " [%s]", __func__);
1060
1061        ctx->config.algodir = CRYP_ALGORITHM_ENCRYPT;
1062
1063        /*
1064         * DMA does not work for DES due to a hw bug */
1065        if (cryp_mode == CRYP_MODE_DMA && mode_is_aes(ctx->config.algomode))
1066                return ablk_dma_crypt(areq);
1067
1068        /* For everything except DMA, we run the non DMA version. */
1069        return ablk_crypt(areq);
1070}
1071
1072static int cryp_blk_decrypt(struct ablkcipher_request *areq)
1073{
1074        struct crypto_ablkcipher *cipher = crypto_ablkcipher_reqtfm(areq);
1075        struct cryp_ctx *ctx = crypto_ablkcipher_ctx(cipher);
1076
1077        pr_debug(DEV_DBG_NAME " [%s]", __func__);
1078
1079        ctx->config.algodir = CRYP_ALGORITHM_DECRYPT;
1080
1081        /* DMA does not work for DES due to a hw bug */
1082        if (cryp_mode == CRYP_MODE_DMA && mode_is_aes(ctx->config.algomode))
1083                return ablk_dma_crypt(areq);
1084
1085        /* For everything except DMA, we run the non DMA version. */
1086        return ablk_crypt(areq);
1087}
1088
1089struct cryp_algo_template {
1090        enum cryp_algo_mode algomode;
1091        struct crypto_alg crypto;
1092};
1093
1094static int cryp_cra_init(struct crypto_tfm *tfm)
1095{
1096        struct cryp_ctx *ctx = crypto_tfm_ctx(tfm);
1097        struct crypto_alg *alg = tfm->__crt_alg;
1098        struct cryp_algo_template *cryp_alg = container_of(alg,
1099                        struct cryp_algo_template,
1100                        crypto);
1101
1102        ctx->config.algomode = cryp_alg->algomode;
1103        ctx->blocksize = crypto_tfm_alg_blocksize(tfm);
1104
1105        return 0;
1106}
1107
1108static struct cryp_algo_template cryp_algs[] = {
1109        {
1110                .algomode = CRYP_ALGO_AES_ECB,
1111                .crypto = {
1112                        .cra_name = "aes",
1113                        .cra_driver_name = "aes-ux500",
1114                        .cra_priority = 300,
1115                        .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
1116                                        CRYPTO_ALG_ASYNC,
1117                        .cra_blocksize = AES_BLOCK_SIZE,
1118                        .cra_ctxsize = sizeof(struct cryp_ctx),
1119                        .cra_alignmask = 3,
1120                        .cra_type = &crypto_ablkcipher_type,
1121                        .cra_init = cryp_cra_init,
1122                        .cra_module = THIS_MODULE,
1123                        .cra_u = {
1124                                .ablkcipher = {
1125                                        .min_keysize = AES_MIN_KEY_SIZE,
1126                                        .max_keysize = AES_MAX_KEY_SIZE,
1127                                        .setkey = aes_ablkcipher_setkey,
1128                                        .encrypt = cryp_blk_encrypt,
1129                                        .decrypt = cryp_blk_decrypt
1130                                }
1131                        }
1132                }
1133        },
1134        {
1135                .algomode = CRYP_ALGO_AES_ECB,
1136                .crypto = {
1137                        .cra_name = "ecb(aes)",
1138                        .cra_driver_name = "ecb-aes-ux500",
1139                        .cra_priority = 300,
1140                        .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
1141                                        CRYPTO_ALG_ASYNC,
1142                        .cra_blocksize = AES_BLOCK_SIZE,
1143                        .cra_ctxsize = sizeof(struct cryp_ctx),
1144                        .cra_alignmask = 3,
1145                        .cra_type = &crypto_ablkcipher_type,
1146                        .cra_init = cryp_cra_init,
1147                        .cra_module = THIS_MODULE,
1148                        .cra_u = {
1149                                .ablkcipher = {
1150                                        .min_keysize = AES_MIN_KEY_SIZE,
1151                                        .max_keysize = AES_MAX_KEY_SIZE,
1152                                        .setkey = aes_ablkcipher_setkey,
1153                                        .encrypt = cryp_blk_encrypt,
1154                                        .decrypt = cryp_blk_decrypt,
1155                                }
1156                        }
1157                }
1158        },
1159        {
1160                .algomode = CRYP_ALGO_AES_CBC,
1161                .crypto = {
1162                        .cra_name = "cbc(aes)",
1163                        .cra_driver_name = "cbc-aes-ux500",
1164                        .cra_priority = 300,
1165                        .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
1166                                        CRYPTO_ALG_ASYNC,
1167                        .cra_blocksize = AES_BLOCK_SIZE,
1168                        .cra_ctxsize = sizeof(struct cryp_ctx),
1169                        .cra_alignmask = 3,
1170                        .cra_type = &crypto_ablkcipher_type,
1171                        .cra_init = cryp_cra_init,
1172                        .cra_module = THIS_MODULE,
1173                        .cra_u = {
1174                                .ablkcipher = {
1175                                        .min_keysize = AES_MIN_KEY_SIZE,
1176                                        .max_keysize = AES_MAX_KEY_SIZE,
1177                                        .setkey = aes_ablkcipher_setkey,
1178                                        .encrypt = cryp_blk_encrypt,
1179                                        .decrypt = cryp_blk_decrypt,
1180                                        .ivsize = AES_BLOCK_SIZE,
1181                                }
1182                        }
1183                }
1184        },
1185        {
1186                .algomode = CRYP_ALGO_AES_CTR,
1187                .crypto = {
1188                        .cra_name = "ctr(aes)",
1189                        .cra_driver_name = "ctr-aes-ux500",
1190                        .cra_priority = 300,
1191                        .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
1192                                                CRYPTO_ALG_ASYNC,
1193                        .cra_blocksize = AES_BLOCK_SIZE,
1194                        .cra_ctxsize = sizeof(struct cryp_ctx),
1195                        .cra_alignmask = 3,
1196                        .cra_type = &crypto_ablkcipher_type,
1197                        .cra_init = cryp_cra_init,
1198                        .cra_module = THIS_MODULE,
1199                        .cra_u = {
1200                                .ablkcipher = {
1201                                        .min_keysize = AES_MIN_KEY_SIZE,
1202                                        .max_keysize = AES_MAX_KEY_SIZE,
1203                                        .setkey = aes_ablkcipher_setkey,
1204                                        .encrypt = cryp_blk_encrypt,
1205                                        .decrypt = cryp_blk_decrypt,
1206                                        .ivsize = AES_BLOCK_SIZE,
1207                                }
1208                        }
1209                }
1210        },
1211        {
1212                .algomode = CRYP_ALGO_DES_ECB,
1213                .crypto = {
1214                        .cra_name = "des",
1215                        .cra_driver_name = "des-ux500",
1216                        .cra_priority = 300,
1217                        .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
1218                                                CRYPTO_ALG_ASYNC,
1219                        .cra_blocksize = DES_BLOCK_SIZE,
1220                        .cra_ctxsize = sizeof(struct cryp_ctx),
1221                        .cra_alignmask = 3,
1222                        .cra_type = &crypto_ablkcipher_type,
1223                        .cra_init = cryp_cra_init,
1224                        .cra_module = THIS_MODULE,
1225                        .cra_u = {
1226                                .ablkcipher = {
1227                                        .min_keysize = DES_KEY_SIZE,
1228                                        .max_keysize = DES_KEY_SIZE,
1229                                        .setkey = des_ablkcipher_setkey,
1230                                        .encrypt = cryp_blk_encrypt,
1231                                        .decrypt = cryp_blk_decrypt
1232                                }
1233                        }
1234                }
1235
1236        },
1237        {
1238                .algomode = CRYP_ALGO_TDES_ECB,
1239                .crypto = {
1240                        .cra_name = "des3_ede",
1241                        .cra_driver_name = "des3_ede-ux500",
1242                        .cra_priority = 300,
1243                        .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
1244                                                CRYPTO_ALG_ASYNC,
1245                        .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1246                        .cra_ctxsize = sizeof(struct cryp_ctx),
1247                        .cra_alignmask = 3,
1248                        .cra_type = &crypto_ablkcipher_type,
1249                        .cra_init = cryp_cra_init,
1250                        .cra_module = THIS_MODULE,
1251                        .cra_u = {
1252                                .ablkcipher = {
1253                                        .min_keysize = DES3_EDE_KEY_SIZE,
1254                                        .max_keysize = DES3_EDE_KEY_SIZE,
1255                                        .setkey = des_ablkcipher_setkey,
1256                                        .encrypt = cryp_blk_encrypt,
1257                                        .decrypt = cryp_blk_decrypt
1258                                }
1259                        }
1260                }
1261        },
1262        {
1263                .algomode = CRYP_ALGO_DES_ECB,
1264                .crypto = {
1265                        .cra_name = "ecb(des)",
1266                        .cra_driver_name = "ecb-des-ux500",
1267                        .cra_priority = 300,
1268                        .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
1269                                        CRYPTO_ALG_ASYNC,
1270                        .cra_blocksize = DES_BLOCK_SIZE,
1271                        .cra_ctxsize = sizeof(struct cryp_ctx),
1272                        .cra_alignmask = 3,
1273                        .cra_type = &crypto_ablkcipher_type,
1274                        .cra_init = cryp_cra_init,
1275                        .cra_module = THIS_MODULE,
1276                        .cra_u = {
1277                                .ablkcipher = {
1278                                        .min_keysize = DES_KEY_SIZE,
1279                                        .max_keysize = DES_KEY_SIZE,
1280                                        .setkey = des_ablkcipher_setkey,
1281                                        .encrypt = cryp_blk_encrypt,
1282                                        .decrypt = cryp_blk_decrypt,
1283                                }
1284                        }
1285                }
1286        },
1287        {
1288                .algomode = CRYP_ALGO_TDES_ECB,
1289                .crypto = {
1290                        .cra_name = "ecb(des3_ede)",
1291                        .cra_driver_name = "ecb-des3_ede-ux500",
1292                        .cra_priority = 300,
1293                        .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
1294                                        CRYPTO_ALG_ASYNC,
1295                        .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1296                        .cra_ctxsize = sizeof(struct cryp_ctx),
1297                        .cra_alignmask = 3,
1298                        .cra_type = &crypto_ablkcipher_type,
1299                        .cra_init = cryp_cra_init,
1300                        .cra_module = THIS_MODULE,
1301                        .cra_u = {
1302                                .ablkcipher = {
1303                                        .min_keysize = DES3_EDE_KEY_SIZE,
1304                                        .max_keysize = DES3_EDE_KEY_SIZE,
1305                                        .setkey = des3_ablkcipher_setkey,
1306                                        .encrypt = cryp_blk_encrypt,
1307                                        .decrypt = cryp_blk_decrypt,
1308                                }
1309                        }
1310                }
1311        },
1312        {
1313                .algomode = CRYP_ALGO_DES_CBC,
1314                .crypto = {
1315                        .cra_name = "cbc(des)",
1316                        .cra_driver_name = "cbc-des-ux500",
1317                        .cra_priority = 300,
1318                        .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
1319                                        CRYPTO_ALG_ASYNC,
1320                        .cra_blocksize = DES_BLOCK_SIZE,
1321                        .cra_ctxsize = sizeof(struct cryp_ctx),
1322                        .cra_alignmask = 3,
1323                        .cra_type = &crypto_ablkcipher_type,
1324                        .cra_init = cryp_cra_init,
1325                        .cra_module = THIS_MODULE,
1326                        .cra_u = {
1327                                .ablkcipher = {
1328                                        .min_keysize = DES_KEY_SIZE,
1329                                        .max_keysize = DES_KEY_SIZE,
1330                                        .setkey = des_ablkcipher_setkey,
1331                                        .encrypt = cryp_blk_encrypt,
1332                                        .decrypt = cryp_blk_decrypt,
1333                                }
1334                        }
1335                }
1336        },
1337        {
1338                .algomode = CRYP_ALGO_TDES_CBC,
1339                .crypto = {
1340                        .cra_name = "cbc(des3_ede)",
1341                        .cra_driver_name = "cbc-des3_ede-ux500",
1342                        .cra_priority = 300,
1343                        .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
1344                                        CRYPTO_ALG_ASYNC,
1345                        .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1346                        .cra_ctxsize = sizeof(struct cryp_ctx),
1347                        .cra_alignmask = 3,
1348                        .cra_type = &crypto_ablkcipher_type,
1349                        .cra_init = cryp_cra_init,
1350                        .cra_module = THIS_MODULE,
1351                        .cra_u = {
1352                                .ablkcipher = {
1353                                        .min_keysize = DES3_EDE_KEY_SIZE,
1354                                        .max_keysize = DES3_EDE_KEY_SIZE,
1355                                        .setkey = des3_ablkcipher_setkey,
1356                                        .encrypt = cryp_blk_encrypt,
1357                                        .decrypt = cryp_blk_decrypt,
1358                                        .ivsize = DES3_EDE_BLOCK_SIZE,
1359                                }
1360                        }
1361                }
1362        }
1363};
1364
1365/**
1366 * cryp_algs_register_all -
1367 */
1368static int cryp_algs_register_all(void)
1369{
1370        int ret;
1371        int i;
1372        int count;
1373
1374        pr_debug("[%s]", __func__);
1375
1376        for (i = 0; i < ARRAY_SIZE(cryp_algs); i++) {
1377                ret = crypto_register_alg(&cryp_algs[i].crypto);
1378                if (ret) {
1379                        count = i;
1380                        pr_err("[%s] alg registration failed",
1381                                        cryp_algs[i].crypto.cra_driver_name);
1382                        goto unreg;
1383                }
1384        }
1385        return 0;
1386unreg:
1387        for (i = 0; i < count; i++)
1388                crypto_unregister_alg(&cryp_algs[i].crypto);
1389        return ret;
1390}
1391
1392/**
1393 * cryp_algs_unregister_all -
1394 */
1395static void cryp_algs_unregister_all(void)
1396{
1397        int i;
1398
1399        pr_debug(DEV_DBG_NAME " [%s]", __func__);
1400
1401        for (i = 0; i < ARRAY_SIZE(cryp_algs); i++)
1402                crypto_unregister_alg(&cryp_algs[i].crypto);
1403}
1404
1405static int ux500_cryp_probe(struct platform_device *pdev)
1406{
1407        int ret;
1408        struct resource *res;
1409        struct resource *res_irq;
1410        struct cryp_device_data *device_data;
1411        struct cryp_protection_config prot = {
1412                .privilege_access = CRYP_STATE_ENABLE
1413        };
1414        struct device *dev = &pdev->dev;
1415
1416        dev_dbg(dev, "[%s]", __func__);
1417        device_data = devm_kzalloc(dev, sizeof(*device_data), GFP_ATOMIC);
1418        if (!device_data) {
1419                ret = -ENOMEM;
1420                goto out;
1421        }
1422
1423        device_data->dev = dev;
1424        device_data->current_ctx = NULL;
1425
1426        /* Grab the DMA configuration from platform data. */
1427        mem_to_engine = &((struct cryp_platform_data *)
1428                         dev->platform_data)->mem_to_engine;
1429        engine_to_mem = &((struct cryp_platform_data *)
1430                         dev->platform_data)->engine_to_mem;
1431
1432        res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
1433        if (!res) {
1434                dev_err(dev, "[%s]: platform_get_resource() failed",
1435                                __func__);
1436                ret = -ENODEV;
1437                goto out;
1438        }
1439
1440        device_data->phybase = res->start;
1441        device_data->base = devm_ioremap_resource(dev, res);
1442        if (IS_ERR(device_data->base)) {
1443                dev_err(dev, "[%s]: ioremap failed!", __func__);
1444                ret = PTR_ERR(device_data->base);
1445                goto out;
1446        }
1447
1448        spin_lock_init(&device_data->ctx_lock);
1449        spin_lock_init(&device_data->power_state_spinlock);
1450
1451        /* Enable power for CRYP hardware block */
1452        device_data->pwr_regulator = regulator_get(&pdev->dev, "v-ape");
1453        if (IS_ERR(device_data->pwr_regulator)) {
1454                dev_err(dev, "[%s]: could not get cryp regulator", __func__);
1455                ret = PTR_ERR(device_data->pwr_regulator);
1456                device_data->pwr_regulator = NULL;
1457                goto out;
1458        }
1459
1460        /* Enable the clk for CRYP hardware block */
1461        device_data->clk = devm_clk_get(&pdev->dev, NULL);
1462        if (IS_ERR(device_data->clk)) {
1463                dev_err(dev, "[%s]: clk_get() failed!", __func__);
1464                ret = PTR_ERR(device_data->clk);
1465                goto out_regulator;
1466        }
1467
1468        ret = clk_prepare(device_data->clk);
1469        if (ret) {
1470                dev_err(dev, "[%s]: clk_prepare() failed!", __func__);
1471                goto out_regulator;
1472        }
1473
1474        /* Enable device power (and clock) */
1475        ret = cryp_enable_power(device_data->dev, device_data, false);
1476        if (ret) {
1477                dev_err(dev, "[%s]: cryp_enable_power() failed!", __func__);
1478                goto out_clk_unprepare;
1479        }
1480
1481        if (cryp_check(device_data)) {
1482                dev_err(dev, "[%s]: cryp_check() failed!", __func__);
1483                ret = -EINVAL;
1484                goto out_power;
1485        }
1486
1487        if (cryp_configure_protection(device_data, &prot)) {
1488                dev_err(dev, "[%s]: cryp_configure_protection() failed!",
1489                        __func__);
1490                ret = -EINVAL;
1491                goto out_power;
1492        }
1493
1494        res_irq = platform_get_resource(pdev, IORESOURCE_IRQ, 0);
1495        if (!res_irq) {
1496                dev_err(dev, "[%s]: IORESOURCE_IRQ unavailable",
1497                        __func__);
1498                ret = -ENODEV;
1499                goto out_power;
1500        }
1501
1502        ret = devm_request_irq(&pdev->dev, res_irq->start,
1503                               cryp_interrupt_handler, 0, "cryp1", device_data);
1504        if (ret) {
1505                dev_err(dev, "[%s]: Unable to request IRQ", __func__);
1506                goto out_power;
1507        }
1508
1509        if (cryp_mode == CRYP_MODE_DMA)
1510                cryp_dma_setup_channel(device_data, dev);
1511
1512        platform_set_drvdata(pdev, device_data);
1513
1514        /* Put the new device into the device list... */
1515        klist_add_tail(&device_data->list_node, &driver_data.device_list);
1516
1517        /* ... and signal that a new device is available. */
1518        up(&driver_data.device_allocation);
1519
1520        atomic_set(&session_id, 1);
1521
1522        ret = cryp_algs_register_all();
1523        if (ret) {
1524                dev_err(dev, "[%s]: cryp_algs_register_all() failed!",
1525                        __func__);
1526                goto out_power;
1527        }
1528
1529        dev_info(dev, "successfully registered\n");
1530
1531        return 0;
1532
1533out_power:
1534        cryp_disable_power(device_data->dev, device_data, false);
1535
1536out_clk_unprepare:
1537        clk_unprepare(device_data->clk);
1538
1539out_regulator:
1540        regulator_put(device_data->pwr_regulator);
1541
1542out:
1543        return ret;
1544}
1545
1546static int ux500_cryp_remove(struct platform_device *pdev)
1547{
1548        struct cryp_device_data *device_data;
1549
1550        dev_dbg(&pdev->dev, "[%s]", __func__);
1551        device_data = platform_get_drvdata(pdev);
1552        if (!device_data) {
1553                dev_err(&pdev->dev, "[%s]: platform_get_drvdata() failed!",
1554                        __func__);
1555                return -ENOMEM;
1556        }
1557
1558        /* Try to decrease the number of available devices. */
1559        if (down_trylock(&driver_data.device_allocation))
1560                return -EBUSY;
1561
1562        /* Check that the device is free */
1563        spin_lock(&device_data->ctx_lock);
1564        /* current_ctx allocates a device, NULL = unallocated */
1565        if (device_data->current_ctx) {
1566                /* The device is busy */
1567                spin_unlock(&device_data->ctx_lock);
1568                /* Return the device to the pool. */
1569                up(&driver_data.device_allocation);
1570                return -EBUSY;
1571        }
1572
1573        spin_unlock(&device_data->ctx_lock);
1574
1575        /* Remove the device from the list */
1576        if (klist_node_attached(&device_data->list_node))
1577                klist_remove(&device_data->list_node);
1578
1579        /* If this was the last device, remove the services */
1580        if (list_empty(&driver_data.device_list.k_list))
1581                cryp_algs_unregister_all();
1582
1583        if (cryp_disable_power(&pdev->dev, device_data, false))
1584                dev_err(&pdev->dev, "[%s]: cryp_disable_power() failed",
1585                        __func__);
1586
1587        clk_unprepare(device_data->clk);
1588        regulator_put(device_data->pwr_regulator);
1589
1590        return 0;
1591}
1592
1593static void ux500_cryp_shutdown(struct platform_device *pdev)
1594{
1595        struct cryp_device_data *device_data;
1596
1597        dev_dbg(&pdev->dev, "[%s]", __func__);
1598
1599        device_data = platform_get_drvdata(pdev);
1600        if (!device_data) {
1601                dev_err(&pdev->dev, "[%s]: platform_get_drvdata() failed!",
1602                        __func__);
1603                return;
1604        }
1605
1606        /* Check that the device is free */
1607        spin_lock(&device_data->ctx_lock);
1608        /* current_ctx allocates a device, NULL = unallocated */
1609        if (!device_data->current_ctx) {
1610                if (down_trylock(&driver_data.device_allocation))
1611                        dev_dbg(&pdev->dev, "[%s]: Cryp still in use!"
1612                                "Shutting down anyway...", __func__);
1613                /**
1614                 * (Allocate the device)
1615                 * Need to set this to non-null (dummy) value,
1616                 * to avoid usage if context switching.
1617                 */
1618                device_data->current_ctx++;
1619        }
1620        spin_unlock(&device_data->ctx_lock);
1621
1622        /* Remove the device from the list */
1623        if (klist_node_attached(&device_data->list_node))
1624                klist_remove(&device_data->list_node);
1625
1626        /* If this was the last device, remove the services */
1627        if (list_empty(&driver_data.device_list.k_list))
1628                cryp_algs_unregister_all();
1629
1630        if (cryp_disable_power(&pdev->dev, device_data, false))
1631                dev_err(&pdev->dev, "[%s]: cryp_disable_power() failed",
1632                        __func__);
1633
1634}
1635
1636#ifdef CONFIG_PM_SLEEP
1637static int ux500_cryp_suspend(struct device *dev)
1638{
1639        int ret;
1640        struct platform_device *pdev = to_platform_device(dev);
1641        struct cryp_device_data *device_data;
1642        struct resource *res_irq;
1643        struct cryp_ctx *temp_ctx = NULL;
1644
1645        dev_dbg(dev, "[%s]", __func__);
1646
1647        /* Handle state? */
1648        device_data = platform_get_drvdata(pdev);
1649        if (!device_data) {
1650                dev_err(dev, "[%s]: platform_get_drvdata() failed!", __func__);
1651                return -ENOMEM;
1652        }
1653
1654        res_irq = platform_get_resource(pdev, IORESOURCE_IRQ, 0);
1655        if (!res_irq)
1656                dev_err(dev, "[%s]: IORESOURCE_IRQ, unavailable", __func__);
1657        else
1658                disable_irq(res_irq->start);
1659
1660        spin_lock(&device_data->ctx_lock);
1661        if (!device_data->current_ctx)
1662                device_data->current_ctx++;
1663        spin_unlock(&device_data->ctx_lock);
1664
1665        if (device_data->current_ctx == ++temp_ctx) {
1666                if (down_interruptible(&driver_data.device_allocation))
1667                        dev_dbg(dev, "[%s]: down_interruptible() failed",
1668                                __func__);
1669                ret = cryp_disable_power(dev, device_data, false);
1670
1671        } else
1672                ret = cryp_disable_power(dev, device_data, true);
1673
1674        if (ret)
1675                dev_err(dev, "[%s]: cryp_disable_power()", __func__);
1676
1677        return ret;
1678}
1679
1680static int ux500_cryp_resume(struct device *dev)
1681{
1682        int ret = 0;
1683        struct platform_device *pdev = to_platform_device(dev);
1684        struct cryp_device_data *device_data;
1685        struct resource *res_irq;
1686        struct cryp_ctx *temp_ctx = NULL;
1687
1688        dev_dbg(dev, "[%s]", __func__);
1689
1690        device_data = platform_get_drvdata(pdev);
1691        if (!device_data) {
1692                dev_err(dev, "[%s]: platform_get_drvdata() failed!", __func__);
1693                return -ENOMEM;
1694        }
1695
1696        spin_lock(&device_data->ctx_lock);
1697        if (device_data->current_ctx == ++temp_ctx)
1698                device_data->current_ctx = NULL;
1699        spin_unlock(&device_data->ctx_lock);
1700
1701
1702        if (!device_data->current_ctx)
1703                up(&driver_data.device_allocation);
1704        else
1705                ret = cryp_enable_power(dev, device_data, true);
1706
1707        if (ret)
1708                dev_err(dev, "[%s]: cryp_enable_power() failed!", __func__);
1709        else {
1710                res_irq = platform_get_resource(pdev, IORESOURCE_IRQ, 0);
1711                if (res_irq)
1712                        enable_irq(res_irq->start);
1713        }
1714
1715        return ret;
1716}
1717#endif
1718
1719static SIMPLE_DEV_PM_OPS(ux500_cryp_pm, ux500_cryp_suspend, ux500_cryp_resume);
1720
1721static const struct of_device_id ux500_cryp_match[] = {
1722        { .compatible = "stericsson,ux500-cryp" },
1723        { },
1724};
1725MODULE_DEVICE_TABLE(of, ux500_cryp_match);
1726
1727static struct platform_driver cryp_driver = {
1728        .probe  = ux500_cryp_probe,
1729        .remove = ux500_cryp_remove,
1730        .shutdown = ux500_cryp_shutdown,
1731        .driver = {
1732                .name  = "cryp1",
1733                .of_match_table = ux500_cryp_match,
1734                .pm    = &ux500_cryp_pm,
1735        }
1736};
1737
1738static int __init ux500_cryp_mod_init(void)
1739{
1740        pr_debug("[%s] is called!", __func__);
1741        klist_init(&driver_data.device_list, NULL, NULL);
1742        /* Initialize the semaphore to 0 devices (locked state) */
1743        sema_init(&driver_data.device_allocation, 0);
1744        return platform_driver_register(&cryp_driver);
1745}
1746
1747static void __exit ux500_cryp_mod_fini(void)
1748{
1749        pr_debug("[%s] is called!", __func__);
1750        platform_driver_unregister(&cryp_driver);
1751}
1752
1753module_init(ux500_cryp_mod_init);
1754module_exit(ux500_cryp_mod_fini);
1755
1756module_param(cryp_mode, int, 0);
1757
1758MODULE_DESCRIPTION("Driver for ST-Ericsson UX500 CRYP crypto engine.");
1759MODULE_ALIAS_CRYPTO("aes-all");
1760MODULE_ALIAS_CRYPTO("des-all");
1761
1762MODULE_LICENSE("GPL");
1763