linux/drivers/crypto/ux500/cryp/cryp_core.c
<<
>>
Prefs
   1// SPDX-License-Identifier: GPL-2.0-only
   2/**
   3 * Copyright (C) ST-Ericsson SA 2010
   4 * Author: Shujuan Chen <shujuan.chen@stericsson.com> for ST-Ericsson.
   5 * Author: Joakim Bech <joakim.xx.bech@stericsson.com> for ST-Ericsson.
   6 * Author: Berne Hebark <berne.herbark@stericsson.com> for ST-Ericsson.
   7 * Author: Niklas Hernaeus <niklas.hernaeus@stericsson.com> for ST-Ericsson.
   8 * Author: Jonas Linde <jonas.linde@stericsson.com> for ST-Ericsson.
   9 * Author: Andreas Westin <andreas.westin@stericsson.com> for ST-Ericsson.
  10 */
  11
  12#include <linux/clk.h>
  13#include <linux/completion.h>
  14#include <linux/crypto.h>
  15#include <linux/dmaengine.h>
  16#include <linux/err.h>
  17#include <linux/errno.h>
  18#include <linux/interrupt.h>
  19#include <linux/io.h>
  20#include <linux/irqreturn.h>
  21#include <linux/klist.h>
  22#include <linux/module.h>
  23#include <linux/mod_devicetable.h>
  24#include <linux/platform_device.h>
  25#include <linux/regulator/consumer.h>
  26#include <linux/semaphore.h>
  27#include <linux/platform_data/dma-ste-dma40.h>
  28
  29#include <crypto/aes.h>
  30#include <crypto/algapi.h>
  31#include <crypto/ctr.h>
  32#include <crypto/internal/des.h>
  33#include <crypto/internal/skcipher.h>
  34#include <crypto/scatterwalk.h>
  35
  36#include <linux/platform_data/crypto-ux500.h>
  37
  38#include "cryp_p.h"
  39#include "cryp.h"
  40
  41#define CRYP_MAX_KEY_SIZE       32
  42#define BYTES_PER_WORD          4
  43
  44static int cryp_mode;
  45static atomic_t session_id;
  46
  47static struct stedma40_chan_cfg *mem_to_engine;
  48static struct stedma40_chan_cfg *engine_to_mem;
  49
  50/**
  51 * struct cryp_driver_data - data specific to the driver.
  52 *
  53 * @device_list: A list of registered devices to choose from.
  54 * @device_allocation: A semaphore initialized with number of devices.
  55 */
  56struct cryp_driver_data {
  57        struct klist device_list;
  58        struct semaphore device_allocation;
  59};
  60
  61/**
  62 * struct cryp_ctx - Crypto context
  63 * @config: Crypto mode.
  64 * @key[CRYP_MAX_KEY_SIZE]: Key.
  65 * @keylen: Length of key.
  66 * @iv: Pointer to initialization vector.
  67 * @indata: Pointer to indata.
  68 * @outdata: Pointer to outdata.
  69 * @datalen: Length of indata.
  70 * @outlen: Length of outdata.
  71 * @blocksize: Size of blocks.
  72 * @updated: Updated flag.
  73 * @dev_ctx: Device dependent context.
  74 * @device: Pointer to the device.
  75 */
  76struct cryp_ctx {
  77        struct cryp_config config;
  78        u8 key[CRYP_MAX_KEY_SIZE];
  79        u32 keylen;
  80        u8 *iv;
  81        const u8 *indata;
  82        u8 *outdata;
  83        u32 datalen;
  84        u32 outlen;
  85        u32 blocksize;
  86        u8 updated;
  87        struct cryp_device_context dev_ctx;
  88        struct cryp_device_data *device;
  89        u32 session_id;
  90};
  91
  92static struct cryp_driver_data driver_data;
  93
  94/**
  95 * uint8p_to_uint32_be - 4*uint8 to uint32 big endian
  96 * @in: Data to convert.
  97 */
  98static inline u32 uint8p_to_uint32_be(u8 *in)
  99{
 100        u32 *data = (u32 *)in;
 101
 102        return cpu_to_be32p(data);
 103}
 104
 105/**
 106 * swap_bits_in_byte - mirror the bits in a byte
 107 * @b: the byte to be mirrored
 108 *
 109 * The bits are swapped the following way:
 110 *  Byte b include bits 0-7, nibble 1 (n1) include bits 0-3 and
 111 *  nibble 2 (n2) bits 4-7.
 112 *
 113 *  Nibble 1 (n1):
 114 *  (The "old" (moved) bit is replaced with a zero)
 115 *  1. Move bit 6 and 7, 4 positions to the left.
 116 *  2. Move bit 3 and 5, 2 positions to the left.
 117 *  3. Move bit 1-4, 1 position to the left.
 118 *
 119 *  Nibble 2 (n2):
 120 *  1. Move bit 0 and 1, 4 positions to the right.
 121 *  2. Move bit 2 and 4, 2 positions to the right.
 122 *  3. Move bit 3-6, 1 position to the right.
 123 *
 124 *  Combine the two nibbles to a complete and swapped byte.
 125 */
 126
 127static inline u8 swap_bits_in_byte(u8 b)
 128{
 129#define R_SHIFT_4_MASK  0xc0 /* Bits 6 and 7, right shift 4 */
 130#define R_SHIFT_2_MASK  0x28 /* (After right shift 4) Bits 3 and 5,
 131                                  right shift 2 */
 132#define R_SHIFT_1_MASK  0x1e /* (After right shift 2) Bits 1-4,
 133                                  right shift 1 */
 134#define L_SHIFT_4_MASK  0x03 /* Bits 0 and 1, left shift 4 */
 135#define L_SHIFT_2_MASK  0x14 /* (After left shift 4) Bits 2 and 4,
 136                                  left shift 2 */
 137#define L_SHIFT_1_MASK  0x78 /* (After left shift 1) Bits 3-6,
 138                                  left shift 1 */
 139
 140        u8 n1;
 141        u8 n2;
 142
 143        /* Swap most significant nibble */
 144        /* Right shift 4, bits 6 and 7 */
 145        n1 = ((b  & R_SHIFT_4_MASK) >> 4) | (b  & ~(R_SHIFT_4_MASK >> 4));
 146        /* Right shift 2, bits 3 and 5 */
 147        n1 = ((n1 & R_SHIFT_2_MASK) >> 2) | (n1 & ~(R_SHIFT_2_MASK >> 2));
 148        /* Right shift 1, bits 1-4 */
 149        n1 = (n1  & R_SHIFT_1_MASK) >> 1;
 150
 151        /* Swap least significant nibble */
 152        /* Left shift 4, bits 0 and 1 */
 153        n2 = ((b  & L_SHIFT_4_MASK) << 4) | (b  & ~(L_SHIFT_4_MASK << 4));
 154        /* Left shift 2, bits 2 and 4 */
 155        n2 = ((n2 & L_SHIFT_2_MASK) << 2) | (n2 & ~(L_SHIFT_2_MASK << 2));
 156        /* Left shift 1, bits 3-6 */
 157        n2 = (n2  & L_SHIFT_1_MASK) << 1;
 158
 159        return n1 | n2;
 160}
 161
 162static inline void swap_words_in_key_and_bits_in_byte(const u8 *in,
 163                                                      u8 *out, u32 len)
 164{
 165        unsigned int i = 0;
 166        int j;
 167        int index = 0;
 168
 169        j = len - BYTES_PER_WORD;
 170        while (j >= 0) {
 171                for (i = 0; i < BYTES_PER_WORD; i++) {
 172                        index = len - j - BYTES_PER_WORD + i;
 173                        out[j + i] =
 174                                swap_bits_in_byte(in[index]);
 175                }
 176                j -= BYTES_PER_WORD;
 177        }
 178}
 179
 180static void add_session_id(struct cryp_ctx *ctx)
 181{
 182        /*
 183         * We never want 0 to be a valid value, since this is the default value
 184         * for the software context.
 185         */
 186        if (unlikely(atomic_inc_and_test(&session_id)))
 187                atomic_inc(&session_id);
 188
 189        ctx->session_id = atomic_read(&session_id);
 190}
 191
 192static irqreturn_t cryp_interrupt_handler(int irq, void *param)
 193{
 194        struct cryp_ctx *ctx;
 195        int count;
 196        struct cryp_device_data *device_data;
 197
 198        if (param == NULL) {
 199                BUG_ON(!param);
 200                return IRQ_HANDLED;
 201        }
 202
 203        /* The device is coming from the one found in hw_crypt_noxts. */
 204        device_data = (struct cryp_device_data *)param;
 205
 206        ctx = device_data->current_ctx;
 207
 208        if (ctx == NULL) {
 209                BUG_ON(!ctx);
 210                return IRQ_HANDLED;
 211        }
 212
 213        dev_dbg(ctx->device->dev, "[%s] (len: %d) %s, ", __func__, ctx->outlen,
 214                cryp_pending_irq_src(device_data, CRYP_IRQ_SRC_OUTPUT_FIFO) ?
 215                "out" : "in");
 216
 217        if (cryp_pending_irq_src(device_data,
 218                                 CRYP_IRQ_SRC_OUTPUT_FIFO)) {
 219                if (ctx->outlen / ctx->blocksize > 0) {
 220                        count = ctx->blocksize / 4;
 221
 222                        readsl(&device_data->base->dout, ctx->outdata, count);
 223                        ctx->outdata += count;
 224                        ctx->outlen -= count;
 225
 226                        if (ctx->outlen == 0) {
 227                                cryp_disable_irq_src(device_data,
 228                                                     CRYP_IRQ_SRC_OUTPUT_FIFO);
 229                        }
 230                }
 231        } else if (cryp_pending_irq_src(device_data,
 232                                        CRYP_IRQ_SRC_INPUT_FIFO)) {
 233                if (ctx->datalen / ctx->blocksize > 0) {
 234                        count = ctx->blocksize / 4;
 235
 236                        writesl(&device_data->base->din, ctx->indata, count);
 237
 238                        ctx->indata += count;
 239                        ctx->datalen -= count;
 240
 241                        if (ctx->datalen == 0)
 242                                cryp_disable_irq_src(device_data,
 243                                                   CRYP_IRQ_SRC_INPUT_FIFO);
 244
 245                        if (ctx->config.algomode == CRYP_ALGO_AES_XTS) {
 246                                CRYP_PUT_BITS(&device_data->base->cr,
 247                                              CRYP_START_ENABLE,
 248                                              CRYP_CR_START_POS,
 249                                              CRYP_CR_START_MASK);
 250
 251                                cryp_wait_until_done(device_data);
 252                        }
 253                }
 254        }
 255
 256        return IRQ_HANDLED;
 257}
 258
 259static int mode_is_aes(enum cryp_algo_mode mode)
 260{
 261        return  CRYP_ALGO_AES_ECB == mode ||
 262                CRYP_ALGO_AES_CBC == mode ||
 263                CRYP_ALGO_AES_CTR == mode ||
 264                CRYP_ALGO_AES_XTS == mode;
 265}
 266
 267static int cfg_iv(struct cryp_device_data *device_data, u32 left, u32 right,
 268                  enum cryp_init_vector_index index)
 269{
 270        struct cryp_init_vector_value vector_value;
 271
 272        dev_dbg(device_data->dev, "[%s]", __func__);
 273
 274        vector_value.init_value_left = left;
 275        vector_value.init_value_right = right;
 276
 277        return cryp_configure_init_vector(device_data,
 278                                          index,
 279                                          vector_value);
 280}
 281
 282static int cfg_ivs(struct cryp_device_data *device_data, struct cryp_ctx *ctx)
 283{
 284        int i;
 285        int status = 0;
 286        int num_of_regs = ctx->blocksize / 8;
 287        u32 iv[AES_BLOCK_SIZE / 4];
 288
 289        dev_dbg(device_data->dev, "[%s]", __func__);
 290
 291        /*
 292         * Since we loop on num_of_regs we need to have a check in case
 293         * someone provides an incorrect blocksize which would force calling
 294         * cfg_iv with i greater than 2 which is an error.
 295         */
 296        if (num_of_regs > 2) {
 297                dev_err(device_data->dev, "[%s] Incorrect blocksize %d",
 298                        __func__, ctx->blocksize);
 299                return -EINVAL;
 300        }
 301
 302        for (i = 0; i < ctx->blocksize / 4; i++)
 303                iv[i] = uint8p_to_uint32_be(ctx->iv + i*4);
 304
 305        for (i = 0; i < num_of_regs; i++) {
 306                status = cfg_iv(device_data, iv[i*2], iv[i*2+1],
 307                                (enum cryp_init_vector_index) i);
 308                if (status != 0)
 309                        return status;
 310        }
 311        return status;
 312}
 313
 314static int set_key(struct cryp_device_data *device_data,
 315                   u32 left_key,
 316                   u32 right_key,
 317                   enum cryp_key_reg_index index)
 318{
 319        struct cryp_key_value key_value;
 320        int cryp_error;
 321
 322        dev_dbg(device_data->dev, "[%s]", __func__);
 323
 324        key_value.key_value_left = left_key;
 325        key_value.key_value_right = right_key;
 326
 327        cryp_error = cryp_configure_key_values(device_data,
 328                                               index,
 329                                               key_value);
 330        if (cryp_error != 0)
 331                dev_err(device_data->dev, "[%s]: "
 332                        "cryp_configure_key_values() failed!", __func__);
 333
 334        return cryp_error;
 335}
 336
 337static int cfg_keys(struct cryp_ctx *ctx)
 338{
 339        int i;
 340        int num_of_regs = ctx->keylen / 8;
 341        u32 swapped_key[CRYP_MAX_KEY_SIZE / 4];
 342        int cryp_error = 0;
 343
 344        dev_dbg(ctx->device->dev, "[%s]", __func__);
 345
 346        if (mode_is_aes(ctx->config.algomode)) {
 347                swap_words_in_key_and_bits_in_byte((u8 *)ctx->key,
 348                                                   (u8 *)swapped_key,
 349                                                   ctx->keylen);
 350        } else {
 351                for (i = 0; i < ctx->keylen / 4; i++)
 352                        swapped_key[i] = uint8p_to_uint32_be(ctx->key + i*4);
 353        }
 354
 355        for (i = 0; i < num_of_regs; i++) {
 356                cryp_error = set_key(ctx->device,
 357                                     *(((u32 *)swapped_key)+i*2),
 358                                     *(((u32 *)swapped_key)+i*2+1),
 359                                     (enum cryp_key_reg_index) i);
 360
 361                if (cryp_error != 0) {
 362                        dev_err(ctx->device->dev, "[%s]: set_key() failed!",
 363                                        __func__);
 364                        return cryp_error;
 365                }
 366        }
 367        return cryp_error;
 368}
 369
 370static int cryp_setup_context(struct cryp_ctx *ctx,
 371                              struct cryp_device_data *device_data)
 372{
 373        u32 control_register = CRYP_CR_DEFAULT;
 374
 375        switch (cryp_mode) {
 376        case CRYP_MODE_INTERRUPT:
 377                writel_relaxed(CRYP_IMSC_DEFAULT, &device_data->base->imsc);
 378                break;
 379
 380        case CRYP_MODE_DMA:
 381                writel_relaxed(CRYP_DMACR_DEFAULT, &device_data->base->dmacr);
 382                break;
 383
 384        default:
 385                break;
 386        }
 387
 388        if (ctx->updated == 0) {
 389                cryp_flush_inoutfifo(device_data);
 390                if (cfg_keys(ctx) != 0) {
 391                        dev_err(ctx->device->dev, "[%s]: cfg_keys failed!",
 392                                __func__);
 393                        return -EINVAL;
 394                }
 395
 396                if (ctx->iv &&
 397                    CRYP_ALGO_AES_ECB != ctx->config.algomode &&
 398                    CRYP_ALGO_DES_ECB != ctx->config.algomode &&
 399                    CRYP_ALGO_TDES_ECB != ctx->config.algomode) {
 400                        if (cfg_ivs(device_data, ctx) != 0)
 401                                return -EPERM;
 402                }
 403
 404                cryp_set_configuration(device_data, &ctx->config,
 405                                       &control_register);
 406                add_session_id(ctx);
 407        } else if (ctx->updated == 1 &&
 408                   ctx->session_id != atomic_read(&session_id)) {
 409                cryp_flush_inoutfifo(device_data);
 410                cryp_restore_device_context(device_data, &ctx->dev_ctx);
 411
 412                add_session_id(ctx);
 413                control_register = ctx->dev_ctx.cr;
 414        } else
 415                control_register = ctx->dev_ctx.cr;
 416
 417        writel(control_register |
 418               (CRYP_CRYPEN_ENABLE << CRYP_CR_CRYPEN_POS),
 419               &device_data->base->cr);
 420
 421        return 0;
 422}
 423
 424static int cryp_get_device_data(struct cryp_ctx *ctx,
 425                                struct cryp_device_data **device_data)
 426{
 427        int ret;
 428        struct klist_iter device_iterator;
 429        struct klist_node *device_node;
 430        struct cryp_device_data *local_device_data = NULL;
 431        pr_debug(DEV_DBG_NAME " [%s]", __func__);
 432
 433        /* Wait until a device is available */
 434        ret = down_interruptible(&driver_data.device_allocation);
 435        if (ret)
 436                return ret;  /* Interrupted */
 437
 438        /* Select a device */
 439        klist_iter_init(&driver_data.device_list, &device_iterator);
 440
 441        device_node = klist_next(&device_iterator);
 442        while (device_node) {
 443                local_device_data = container_of(device_node,
 444                                           struct cryp_device_data, list_node);
 445                spin_lock(&local_device_data->ctx_lock);
 446                /* current_ctx allocates a device, NULL = unallocated */
 447                if (local_device_data->current_ctx) {
 448                        device_node = klist_next(&device_iterator);
 449                } else {
 450                        local_device_data->current_ctx = ctx;
 451                        ctx->device = local_device_data;
 452                        spin_unlock(&local_device_data->ctx_lock);
 453                        break;
 454                }
 455                spin_unlock(&local_device_data->ctx_lock);
 456        }
 457        klist_iter_exit(&device_iterator);
 458
 459        if (!device_node) {
 460                /**
 461                 * No free device found.
 462                 * Since we allocated a device with down_interruptible, this
 463                 * should not be able to happen.
 464                 * Number of available devices, which are contained in
 465                 * device_allocation, is therefore decremented by not doing
 466                 * an up(device_allocation).
 467                 */
 468                return -EBUSY;
 469        }
 470
 471        *device_data = local_device_data;
 472
 473        return 0;
 474}
 475
 476static void cryp_dma_setup_channel(struct cryp_device_data *device_data,
 477                                   struct device *dev)
 478{
 479        struct dma_slave_config mem2cryp = {
 480                .direction = DMA_MEM_TO_DEV,
 481                .dst_addr = device_data->phybase + CRYP_DMA_TX_FIFO,
 482                .dst_addr_width = DMA_SLAVE_BUSWIDTH_2_BYTES,
 483                .dst_maxburst = 4,
 484        };
 485        struct dma_slave_config cryp2mem = {
 486                .direction = DMA_DEV_TO_MEM,
 487                .src_addr = device_data->phybase + CRYP_DMA_RX_FIFO,
 488                .src_addr_width = DMA_SLAVE_BUSWIDTH_2_BYTES,
 489                .src_maxburst = 4,
 490        };
 491
 492        dma_cap_zero(device_data->dma.mask);
 493        dma_cap_set(DMA_SLAVE, device_data->dma.mask);
 494
 495        device_data->dma.cfg_mem2cryp = mem_to_engine;
 496        device_data->dma.chan_mem2cryp =
 497                dma_request_channel(device_data->dma.mask,
 498                                    stedma40_filter,
 499                                    device_data->dma.cfg_mem2cryp);
 500
 501        device_data->dma.cfg_cryp2mem = engine_to_mem;
 502        device_data->dma.chan_cryp2mem =
 503                dma_request_channel(device_data->dma.mask,
 504                                    stedma40_filter,
 505                                    device_data->dma.cfg_cryp2mem);
 506
 507        dmaengine_slave_config(device_data->dma.chan_mem2cryp, &mem2cryp);
 508        dmaengine_slave_config(device_data->dma.chan_cryp2mem, &cryp2mem);
 509
 510        init_completion(&device_data->dma.cryp_dma_complete);
 511}
 512
 513static void cryp_dma_out_callback(void *data)
 514{
 515        struct cryp_ctx *ctx = (struct cryp_ctx *) data;
 516        dev_dbg(ctx->device->dev, "[%s]: ", __func__);
 517
 518        complete(&ctx->device->dma.cryp_dma_complete);
 519}
 520
 521static int cryp_set_dma_transfer(struct cryp_ctx *ctx,
 522                                 struct scatterlist *sg,
 523                                 int len,
 524                                 enum dma_data_direction direction)
 525{
 526        struct dma_async_tx_descriptor *desc;
 527        struct dma_chan *channel = NULL;
 528        dma_cookie_t cookie;
 529
 530        dev_dbg(ctx->device->dev, "[%s]: ", __func__);
 531
 532        if (unlikely(!IS_ALIGNED((unsigned long)sg, 4))) {
 533                dev_err(ctx->device->dev, "[%s]: Data in sg list isn't "
 534                        "aligned! Addr: 0x%08lx", __func__, (unsigned long)sg);
 535                return -EFAULT;
 536        }
 537
 538        switch (direction) {
 539        case DMA_TO_DEVICE:
 540                channel = ctx->device->dma.chan_mem2cryp;
 541                ctx->device->dma.sg_src = sg;
 542                ctx->device->dma.sg_src_len = dma_map_sg(channel->device->dev,
 543                                                 ctx->device->dma.sg_src,
 544                                                 ctx->device->dma.nents_src,
 545                                                 direction);
 546
 547                if (!ctx->device->dma.sg_src_len) {
 548                        dev_dbg(ctx->device->dev,
 549                                "[%s]: Could not map the sg list (TO_DEVICE)",
 550                                __func__);
 551                        return -EFAULT;
 552                }
 553
 554                dev_dbg(ctx->device->dev, "[%s]: Setting up DMA for buffer "
 555                        "(TO_DEVICE)", __func__);
 556
 557                desc = dmaengine_prep_slave_sg(channel,
 558                                ctx->device->dma.sg_src,
 559                                ctx->device->dma.sg_src_len,
 560                                DMA_MEM_TO_DEV, DMA_CTRL_ACK);
 561                break;
 562
 563        case DMA_FROM_DEVICE:
 564                channel = ctx->device->dma.chan_cryp2mem;
 565                ctx->device->dma.sg_dst = sg;
 566                ctx->device->dma.sg_dst_len = dma_map_sg(channel->device->dev,
 567                                                 ctx->device->dma.sg_dst,
 568                                                 ctx->device->dma.nents_dst,
 569                                                 direction);
 570
 571                if (!ctx->device->dma.sg_dst_len) {
 572                        dev_dbg(ctx->device->dev,
 573                                "[%s]: Could not map the sg list (FROM_DEVICE)",
 574                                __func__);
 575                        return -EFAULT;
 576                }
 577
 578                dev_dbg(ctx->device->dev, "[%s]: Setting up DMA for buffer "
 579                        "(FROM_DEVICE)", __func__);
 580
 581                desc = dmaengine_prep_slave_sg(channel,
 582                                ctx->device->dma.sg_dst,
 583                                ctx->device->dma.sg_dst_len,
 584                                DMA_DEV_TO_MEM,
 585                                DMA_CTRL_ACK |
 586                                DMA_PREP_INTERRUPT);
 587
 588                desc->callback = cryp_dma_out_callback;
 589                desc->callback_param = ctx;
 590                break;
 591
 592        default:
 593                dev_dbg(ctx->device->dev, "[%s]: Invalid DMA direction",
 594                        __func__);
 595                return -EFAULT;
 596        }
 597
 598        cookie = dmaengine_submit(desc);
 599        if (dma_submit_error(cookie)) {
 600                dev_dbg(ctx->device->dev, "[%s]: DMA submission failed\n",
 601                        __func__);
 602                return cookie;
 603        }
 604
 605        dma_async_issue_pending(channel);
 606
 607        return 0;
 608}
 609
 610static void cryp_dma_done(struct cryp_ctx *ctx)
 611{
 612        struct dma_chan *chan;
 613
 614        dev_dbg(ctx->device->dev, "[%s]: ", __func__);
 615
 616        chan = ctx->device->dma.chan_mem2cryp;
 617        dmaengine_terminate_all(chan);
 618        dma_unmap_sg(chan->device->dev, ctx->device->dma.sg_src,
 619                     ctx->device->dma.sg_src_len, DMA_TO_DEVICE);
 620
 621        chan = ctx->device->dma.chan_cryp2mem;
 622        dmaengine_terminate_all(chan);
 623        dma_unmap_sg(chan->device->dev, ctx->device->dma.sg_dst,
 624                     ctx->device->dma.sg_dst_len, DMA_FROM_DEVICE);
 625}
 626
 627static int cryp_dma_write(struct cryp_ctx *ctx, struct scatterlist *sg,
 628                          int len)
 629{
 630        int error = cryp_set_dma_transfer(ctx, sg, len, DMA_TO_DEVICE);
 631        dev_dbg(ctx->device->dev, "[%s]: ", __func__);
 632
 633        if (error) {
 634                dev_dbg(ctx->device->dev, "[%s]: cryp_set_dma_transfer() "
 635                        "failed", __func__);
 636                return error;
 637        }
 638
 639        return len;
 640}
 641
 642static int cryp_dma_read(struct cryp_ctx *ctx, struct scatterlist *sg, int len)
 643{
 644        int error = cryp_set_dma_transfer(ctx, sg, len, DMA_FROM_DEVICE);
 645        if (error) {
 646                dev_dbg(ctx->device->dev, "[%s]: cryp_set_dma_transfer() "
 647                        "failed", __func__);
 648                return error;
 649        }
 650
 651        return len;
 652}
 653
 654static void cryp_polling_mode(struct cryp_ctx *ctx,
 655                              struct cryp_device_data *device_data)
 656{
 657        int len = ctx->blocksize / BYTES_PER_WORD;
 658        int remaining_length = ctx->datalen;
 659        u32 *indata = (u32 *)ctx->indata;
 660        u32 *outdata = (u32 *)ctx->outdata;
 661
 662        while (remaining_length > 0) {
 663                writesl(&device_data->base->din, indata, len);
 664                indata += len;
 665                remaining_length -= (len * BYTES_PER_WORD);
 666                cryp_wait_until_done(device_data);
 667
 668                readsl(&device_data->base->dout, outdata, len);
 669                outdata += len;
 670                cryp_wait_until_done(device_data);
 671        }
 672}
 673
 674static int cryp_disable_power(struct device *dev,
 675                              struct cryp_device_data *device_data,
 676                              bool save_device_context)
 677{
 678        int ret = 0;
 679
 680        dev_dbg(dev, "[%s]", __func__);
 681
 682        spin_lock(&device_data->power_state_spinlock);
 683        if (!device_data->power_state)
 684                goto out;
 685
 686        spin_lock(&device_data->ctx_lock);
 687        if (save_device_context && device_data->current_ctx) {
 688                cryp_save_device_context(device_data,
 689                                &device_data->current_ctx->dev_ctx,
 690                                cryp_mode);
 691                device_data->restore_dev_ctx = true;
 692        }
 693        spin_unlock(&device_data->ctx_lock);
 694
 695        clk_disable(device_data->clk);
 696        ret = regulator_disable(device_data->pwr_regulator);
 697        if (ret)
 698                dev_err(dev, "[%s]: "
 699                                "regulator_disable() failed!",
 700                                __func__);
 701
 702        device_data->power_state = false;
 703
 704out:
 705        spin_unlock(&device_data->power_state_spinlock);
 706
 707        return ret;
 708}
 709
 710static int cryp_enable_power(
 711                struct device *dev,
 712                struct cryp_device_data *device_data,
 713                bool restore_device_context)
 714{
 715        int ret = 0;
 716
 717        dev_dbg(dev, "[%s]", __func__);
 718
 719        spin_lock(&device_data->power_state_spinlock);
 720        if (!device_data->power_state) {
 721                ret = regulator_enable(device_data->pwr_regulator);
 722                if (ret) {
 723                        dev_err(dev, "[%s]: regulator_enable() failed!",
 724                                        __func__);
 725                        goto out;
 726                }
 727
 728                ret = clk_enable(device_data->clk);
 729                if (ret) {
 730                        dev_err(dev, "[%s]: clk_enable() failed!",
 731                                        __func__);
 732                        regulator_disable(device_data->pwr_regulator);
 733                        goto out;
 734                }
 735                device_data->power_state = true;
 736        }
 737
 738        if (device_data->restore_dev_ctx) {
 739                spin_lock(&device_data->ctx_lock);
 740                if (restore_device_context && device_data->current_ctx) {
 741                        device_data->restore_dev_ctx = false;
 742                        cryp_restore_device_context(device_data,
 743                                        &device_data->current_ctx->dev_ctx);
 744                }
 745                spin_unlock(&device_data->ctx_lock);
 746        }
 747out:
 748        spin_unlock(&device_data->power_state_spinlock);
 749
 750        return ret;
 751}
 752
 753static int hw_crypt_noxts(struct cryp_ctx *ctx,
 754                          struct cryp_device_data *device_data)
 755{
 756        int ret = 0;
 757
 758        const u8 *indata = ctx->indata;
 759        u8 *outdata = ctx->outdata;
 760        u32 datalen = ctx->datalen;
 761        u32 outlen = datalen;
 762
 763        pr_debug(DEV_DBG_NAME " [%s]", __func__);
 764
 765        ctx->outlen = ctx->datalen;
 766
 767        if (unlikely(!IS_ALIGNED((unsigned long)indata, 4))) {
 768                pr_debug(DEV_DBG_NAME " [%s]: Data isn't aligned! Addr: "
 769                         "0x%08lx", __func__, (unsigned long)indata);
 770                return -EINVAL;
 771        }
 772
 773        ret = cryp_setup_context(ctx, device_data);
 774
 775        if (ret)
 776                goto out;
 777
 778        if (cryp_mode == CRYP_MODE_INTERRUPT) {
 779                cryp_enable_irq_src(device_data, CRYP_IRQ_SRC_INPUT_FIFO |
 780                                    CRYP_IRQ_SRC_OUTPUT_FIFO);
 781
 782                /*
 783                 * ctx->outlen is decremented in the cryp_interrupt_handler
 784                 * function. We had to add cpu_relax() (barrier) to make sure
 785                 * that gcc didn't optimze away this variable.
 786                 */
 787                while (ctx->outlen > 0)
 788                        cpu_relax();
 789        } else if (cryp_mode == CRYP_MODE_POLLING ||
 790                   cryp_mode == CRYP_MODE_DMA) {
 791                /*
 792                 * The reason for having DMA in this if case is that if we are
 793                 * running cryp_mode = 2, then we separate DMA routines for
 794                 * handling cipher/plaintext > blocksize, except when
 795                 * running the normal CRYPTO_ALG_TYPE_CIPHER, then we still use
 796                 * the polling mode. Overhead of doing DMA setup eats up the
 797                 * benefits using it.
 798                 */
 799                cryp_polling_mode(ctx, device_data);
 800        } else {
 801                dev_err(ctx->device->dev, "[%s]: Invalid operation mode!",
 802                        __func__);
 803                ret = -EPERM;
 804                goto out;
 805        }
 806
 807        cryp_save_device_context(device_data, &ctx->dev_ctx, cryp_mode);
 808        ctx->updated = 1;
 809
 810out:
 811        ctx->indata = indata;
 812        ctx->outdata = outdata;
 813        ctx->datalen = datalen;
 814        ctx->outlen = outlen;
 815
 816        return ret;
 817}
 818
 819static int get_nents(struct scatterlist *sg, int nbytes)
 820{
 821        int nents = 0;
 822
 823        while (nbytes > 0) {
 824                nbytes -= sg->length;
 825                sg = sg_next(sg);
 826                nents++;
 827        }
 828
 829        return nents;
 830}
 831
 832static int ablk_dma_crypt(struct skcipher_request *areq)
 833{
 834        struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(areq);
 835        struct cryp_ctx *ctx = crypto_skcipher_ctx(cipher);
 836        struct cryp_device_data *device_data;
 837
 838        int bytes_written = 0;
 839        int bytes_read = 0;
 840        int ret;
 841
 842        pr_debug(DEV_DBG_NAME " [%s]", __func__);
 843
 844        ctx->datalen = areq->cryptlen;
 845        ctx->outlen = areq->cryptlen;
 846
 847        ret = cryp_get_device_data(ctx, &device_data);
 848        if (ret)
 849                return ret;
 850
 851        ret = cryp_setup_context(ctx, device_data);
 852        if (ret)
 853                goto out;
 854
 855        /* We have the device now, so store the nents in the dma struct. */
 856        ctx->device->dma.nents_src = get_nents(areq->src, ctx->datalen);
 857        ctx->device->dma.nents_dst = get_nents(areq->dst, ctx->outlen);
 858
 859        /* Enable DMA in- and output. */
 860        cryp_configure_for_dma(device_data, CRYP_DMA_ENABLE_BOTH_DIRECTIONS);
 861
 862        bytes_written = cryp_dma_write(ctx, areq->src, ctx->datalen);
 863        bytes_read = cryp_dma_read(ctx, areq->dst, bytes_written);
 864
 865        wait_for_completion(&ctx->device->dma.cryp_dma_complete);
 866        cryp_dma_done(ctx);
 867
 868        cryp_save_device_context(device_data, &ctx->dev_ctx, cryp_mode);
 869        ctx->updated = 1;
 870
 871out:
 872        spin_lock(&device_data->ctx_lock);
 873        device_data->current_ctx = NULL;
 874        ctx->device = NULL;
 875        spin_unlock(&device_data->ctx_lock);
 876
 877        /*
 878         * The down_interruptible part for this semaphore is called in
 879         * cryp_get_device_data.
 880         */
 881        up(&driver_data.device_allocation);
 882
 883        if (unlikely(bytes_written != bytes_read))
 884                return -EPERM;
 885
 886        return 0;
 887}
 888
 889static int ablk_crypt(struct skcipher_request *areq)
 890{
 891        struct skcipher_walk walk;
 892        struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(areq);
 893        struct cryp_ctx *ctx = crypto_skcipher_ctx(cipher);
 894        struct cryp_device_data *device_data;
 895        unsigned long src_paddr;
 896        unsigned long dst_paddr;
 897        int ret;
 898        int nbytes;
 899
 900        pr_debug(DEV_DBG_NAME " [%s]", __func__);
 901
 902        ret = cryp_get_device_data(ctx, &device_data);
 903        if (ret)
 904                goto out;
 905
 906        ret = skcipher_walk_async(&walk, areq);
 907
 908        if (ret) {
 909                pr_err(DEV_DBG_NAME "[%s]: skcipher_walk_async() failed!",
 910                        __func__);
 911                goto out;
 912        }
 913
 914        while ((nbytes = walk.nbytes) > 0) {
 915                ctx->iv = walk.iv;
 916                src_paddr = (page_to_phys(walk.src.phys.page) + walk.src.phys.offset);
 917                ctx->indata = phys_to_virt(src_paddr);
 918
 919                dst_paddr = (page_to_phys(walk.dst.phys.page) + walk.dst.phys.offset);
 920                ctx->outdata = phys_to_virt(dst_paddr);
 921
 922                ctx->datalen = nbytes - (nbytes % ctx->blocksize);
 923
 924                ret = hw_crypt_noxts(ctx, device_data);
 925                if (ret)
 926                        goto out;
 927
 928                nbytes -= ctx->datalen;
 929                ret = skcipher_walk_done(&walk, nbytes);
 930                if (ret)
 931                        goto out;
 932        }
 933
 934out:
 935        /* Release the device */
 936        spin_lock(&device_data->ctx_lock);
 937        device_data->current_ctx = NULL;
 938        ctx->device = NULL;
 939        spin_unlock(&device_data->ctx_lock);
 940
 941        /*
 942         * The down_interruptible part for this semaphore is called in
 943         * cryp_get_device_data.
 944         */
 945        up(&driver_data.device_allocation);
 946
 947        return ret;
 948}
 949
 950static int aes_skcipher_setkey(struct crypto_skcipher *cipher,
 951                                 const u8 *key, unsigned int keylen)
 952{
 953        struct cryp_ctx *ctx = crypto_skcipher_ctx(cipher);
 954
 955        pr_debug(DEV_DBG_NAME " [%s]", __func__);
 956
 957        switch (keylen) {
 958        case AES_KEYSIZE_128:
 959                ctx->config.keysize = CRYP_KEY_SIZE_128;
 960                break;
 961
 962        case AES_KEYSIZE_192:
 963                ctx->config.keysize = CRYP_KEY_SIZE_192;
 964                break;
 965
 966        case AES_KEYSIZE_256:
 967                ctx->config.keysize = CRYP_KEY_SIZE_256;
 968                break;
 969
 970        default:
 971                pr_err(DEV_DBG_NAME "[%s]: Unknown keylen!", __func__);
 972                return -EINVAL;
 973        }
 974
 975        memcpy(ctx->key, key, keylen);
 976        ctx->keylen = keylen;
 977
 978        ctx->updated = 0;
 979
 980        return 0;
 981}
 982
 983static int des_skcipher_setkey(struct crypto_skcipher *cipher,
 984                                 const u8 *key, unsigned int keylen)
 985{
 986        struct cryp_ctx *ctx = crypto_skcipher_ctx(cipher);
 987        int err;
 988
 989        pr_debug(DEV_DBG_NAME " [%s]", __func__);
 990
 991        err = verify_skcipher_des_key(cipher, key);
 992        if (err)
 993                return err;
 994
 995        memcpy(ctx->key, key, keylen);
 996        ctx->keylen = keylen;
 997
 998        ctx->updated = 0;
 999        return 0;
1000}
1001
1002static int des3_skcipher_setkey(struct crypto_skcipher *cipher,
1003                                  const u8 *key, unsigned int keylen)
1004{
1005        struct cryp_ctx *ctx = crypto_skcipher_ctx(cipher);
1006        int err;
1007
1008        pr_debug(DEV_DBG_NAME " [%s]", __func__);
1009
1010        err = verify_skcipher_des3_key(cipher, key);
1011        if (err)
1012                return err;
1013
1014        memcpy(ctx->key, key, keylen);
1015        ctx->keylen = keylen;
1016
1017        ctx->updated = 0;
1018        return 0;
1019}
1020
1021static int cryp_blk_encrypt(struct skcipher_request *areq)
1022{
1023        struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(areq);
1024        struct cryp_ctx *ctx = crypto_skcipher_ctx(cipher);
1025
1026        pr_debug(DEV_DBG_NAME " [%s]", __func__);
1027
1028        ctx->config.algodir = CRYP_ALGORITHM_ENCRYPT;
1029
1030        /*
1031         * DMA does not work for DES due to a hw bug */
1032        if (cryp_mode == CRYP_MODE_DMA && mode_is_aes(ctx->config.algomode))
1033                return ablk_dma_crypt(areq);
1034
1035        /* For everything except DMA, we run the non DMA version. */
1036        return ablk_crypt(areq);
1037}
1038
1039static int cryp_blk_decrypt(struct skcipher_request *areq)
1040{
1041        struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(areq);
1042        struct cryp_ctx *ctx = crypto_skcipher_ctx(cipher);
1043
1044        pr_debug(DEV_DBG_NAME " [%s]", __func__);
1045
1046        ctx->config.algodir = CRYP_ALGORITHM_DECRYPT;
1047
1048        /* DMA does not work for DES due to a hw bug */
1049        if (cryp_mode == CRYP_MODE_DMA && mode_is_aes(ctx->config.algomode))
1050                return ablk_dma_crypt(areq);
1051
1052        /* For everything except DMA, we run the non DMA version. */
1053        return ablk_crypt(areq);
1054}
1055
1056struct cryp_algo_template {
1057        enum cryp_algo_mode algomode;
1058        struct skcipher_alg skcipher;
1059};
1060
1061static int cryp_init_tfm(struct crypto_skcipher *tfm)
1062{
1063        struct cryp_ctx *ctx = crypto_skcipher_ctx(tfm);
1064        struct skcipher_alg *alg = crypto_skcipher_alg(tfm);
1065        struct cryp_algo_template *cryp_alg = container_of(alg,
1066                        struct cryp_algo_template,
1067                        skcipher);
1068
1069        ctx->config.algomode = cryp_alg->algomode;
1070        ctx->blocksize = crypto_skcipher_blocksize(tfm);
1071
1072        return 0;
1073}
1074
1075static struct cryp_algo_template cryp_algs[] = {
1076        {
1077                .algomode = CRYP_ALGO_AES_ECB,
1078                .skcipher = {
1079                        .base.cra_name          = "ecb(aes)",
1080                        .base.cra_driver_name   = "ecb-aes-ux500",
1081                        .base.cra_priority      = 300,
1082                        .base.cra_flags         = CRYPTO_ALG_ASYNC,
1083                        .base.cra_blocksize     = AES_BLOCK_SIZE,
1084                        .base.cra_ctxsize       = sizeof(struct cryp_ctx),
1085                        .base.cra_alignmask     = 3,
1086                        .base.cra_module        = THIS_MODULE,
1087
1088                        .min_keysize            = AES_MIN_KEY_SIZE,
1089                        .max_keysize            = AES_MAX_KEY_SIZE,
1090                        .setkey                 = aes_skcipher_setkey,
1091                        .encrypt                = cryp_blk_encrypt,
1092                        .decrypt                = cryp_blk_decrypt,
1093                        .init                   = cryp_init_tfm,
1094                }
1095        },
1096        {
1097                .algomode = CRYP_ALGO_AES_CBC,
1098                .skcipher = {
1099                        .base.cra_name          = "cbc(aes)",
1100                        .base.cra_driver_name   = "cbc-aes-ux500",
1101                        .base.cra_priority      = 300,
1102                        .base.cra_flags         = CRYPTO_ALG_ASYNC,
1103                        .base.cra_blocksize     = AES_BLOCK_SIZE,
1104                        .base.cra_ctxsize       = sizeof(struct cryp_ctx),
1105                        .base.cra_alignmask     = 3,
1106                        .base.cra_module        = THIS_MODULE,
1107
1108                        .min_keysize            = AES_MIN_KEY_SIZE,
1109                        .max_keysize            = AES_MAX_KEY_SIZE,
1110                        .setkey                 = aes_skcipher_setkey,
1111                        .encrypt                = cryp_blk_encrypt,
1112                        .decrypt                = cryp_blk_decrypt,
1113                        .init                   = cryp_init_tfm,
1114                        .ivsize                 = AES_BLOCK_SIZE,
1115                }
1116        },
1117        {
1118                .algomode = CRYP_ALGO_AES_CTR,
1119                .skcipher = {
1120                        .base.cra_name          = "ctr(aes)",
1121                        .base.cra_driver_name   = "ctr-aes-ux500",
1122                        .base.cra_priority      = 300,
1123                        .base.cra_flags         = CRYPTO_ALG_ASYNC,
1124                        .base.cra_blocksize     = 1,
1125                        .base.cra_ctxsize       = sizeof(struct cryp_ctx),
1126                        .base.cra_alignmask     = 3,
1127                        .base.cra_module        = THIS_MODULE,
1128
1129                        .min_keysize            = AES_MIN_KEY_SIZE,
1130                        .max_keysize            = AES_MAX_KEY_SIZE,
1131                        .setkey                 = aes_skcipher_setkey,
1132                        .encrypt                = cryp_blk_encrypt,
1133                        .decrypt                = cryp_blk_decrypt,
1134                        .init                   = cryp_init_tfm,
1135                        .ivsize                 = AES_BLOCK_SIZE,
1136                        .chunksize              = AES_BLOCK_SIZE,
1137                }
1138        },
1139        {
1140                .algomode = CRYP_ALGO_DES_ECB,
1141                .skcipher = {
1142                        .base.cra_name          = "ecb(des)",
1143                        .base.cra_driver_name   = "ecb-des-ux500",
1144                        .base.cra_priority      = 300,
1145                        .base.cra_flags         = CRYPTO_ALG_ASYNC,
1146                        .base.cra_blocksize     = DES_BLOCK_SIZE,
1147                        .base.cra_ctxsize       = sizeof(struct cryp_ctx),
1148                        .base.cra_alignmask     = 3,
1149                        .base.cra_module        = THIS_MODULE,
1150
1151                        .min_keysize            = DES_KEY_SIZE,
1152                        .max_keysize            = DES_KEY_SIZE,
1153                        .setkey                 = des_skcipher_setkey,
1154                        .encrypt                = cryp_blk_encrypt,
1155                        .decrypt                = cryp_blk_decrypt,
1156                        .init                   = cryp_init_tfm,
1157                }
1158        },
1159        {
1160                .algomode = CRYP_ALGO_TDES_ECB,
1161                .skcipher = {
1162                        .base.cra_name          = "ecb(des3_ede)",
1163                        .base.cra_driver_name   = "ecb-des3_ede-ux500",
1164                        .base.cra_priority      = 300,
1165                        .base.cra_flags         = CRYPTO_ALG_ASYNC,
1166                        .base.cra_blocksize     = DES3_EDE_BLOCK_SIZE,
1167                        .base.cra_ctxsize       = sizeof(struct cryp_ctx),
1168                        .base.cra_alignmask     = 3,
1169                        .base.cra_module        = THIS_MODULE,
1170
1171                        .min_keysize            = DES3_EDE_KEY_SIZE,
1172                        .max_keysize            = DES3_EDE_KEY_SIZE,
1173                        .setkey                 = des3_skcipher_setkey,
1174                        .encrypt                = cryp_blk_encrypt,
1175                        .decrypt                = cryp_blk_decrypt,
1176                        .init                   = cryp_init_tfm,
1177                }
1178        },
1179        {
1180                .algomode = CRYP_ALGO_DES_CBC,
1181                .skcipher = {
1182                        .base.cra_name          = "cbc(des)",
1183                        .base.cra_driver_name   = "cbc-des-ux500",
1184                        .base.cra_priority      = 300,
1185                        .base.cra_flags         = CRYPTO_ALG_ASYNC,
1186                        .base.cra_blocksize     = DES_BLOCK_SIZE,
1187                        .base.cra_ctxsize       = sizeof(struct cryp_ctx),
1188                        .base.cra_alignmask     = 3,
1189                        .base.cra_module        = THIS_MODULE,
1190
1191                        .min_keysize            = DES_KEY_SIZE,
1192                        .max_keysize            = DES_KEY_SIZE,
1193                        .setkey                 = des_skcipher_setkey,
1194                        .encrypt                = cryp_blk_encrypt,
1195                        .decrypt                = cryp_blk_decrypt,
1196                        .ivsize                 = DES_BLOCK_SIZE,
1197                        .init                   = cryp_init_tfm,
1198                }
1199        },
1200        {
1201                .algomode = CRYP_ALGO_TDES_CBC,
1202                .skcipher = {
1203                        .base.cra_name          = "cbc(des3_ede)",
1204                        .base.cra_driver_name   = "cbc-des3_ede-ux500",
1205                        .base.cra_priority      = 300,
1206                        .base.cra_flags         = CRYPTO_ALG_ASYNC,
1207                        .base.cra_blocksize     = DES3_EDE_BLOCK_SIZE,
1208                        .base.cra_ctxsize       = sizeof(struct cryp_ctx),
1209                        .base.cra_alignmask     = 3,
1210                        .base.cra_module        = THIS_MODULE,
1211
1212                        .min_keysize            = DES3_EDE_KEY_SIZE,
1213                        .max_keysize            = DES3_EDE_KEY_SIZE,
1214                        .setkey                 = des3_skcipher_setkey,
1215                        .encrypt                = cryp_blk_encrypt,
1216                        .decrypt                = cryp_blk_decrypt,
1217                        .ivsize                 = DES3_EDE_BLOCK_SIZE,
1218                        .init                   = cryp_init_tfm,
1219                }
1220        }
1221};
1222
1223/**
1224 * cryp_algs_register_all -
1225 */
1226static int cryp_algs_register_all(void)
1227{
1228        int ret;
1229        int i;
1230        int count;
1231
1232        pr_debug("[%s]", __func__);
1233
1234        for (i = 0; i < ARRAY_SIZE(cryp_algs); i++) {
1235                ret = crypto_register_skcipher(&cryp_algs[i].skcipher);
1236                if (ret) {
1237                        count = i;
1238                        pr_err("[%s] alg registration failed",
1239                                        cryp_algs[i].skcipher.base.cra_driver_name);
1240                        goto unreg;
1241                }
1242        }
1243        return 0;
1244unreg:
1245        for (i = 0; i < count; i++)
1246                crypto_unregister_skcipher(&cryp_algs[i].skcipher);
1247        return ret;
1248}
1249
1250/**
1251 * cryp_algs_unregister_all -
1252 */
1253static void cryp_algs_unregister_all(void)
1254{
1255        int i;
1256
1257        pr_debug(DEV_DBG_NAME " [%s]", __func__);
1258
1259        for (i = 0; i < ARRAY_SIZE(cryp_algs); i++)
1260                crypto_unregister_skcipher(&cryp_algs[i].skcipher);
1261}
1262
1263static int ux500_cryp_probe(struct platform_device *pdev)
1264{
1265        int ret;
1266        struct resource *res;
1267        struct resource *res_irq;
1268        struct cryp_device_data *device_data;
1269        struct cryp_protection_config prot = {
1270                .privilege_access = CRYP_STATE_ENABLE
1271        };
1272        struct device *dev = &pdev->dev;
1273
1274        dev_dbg(dev, "[%s]", __func__);
1275        device_data = devm_kzalloc(dev, sizeof(*device_data), GFP_ATOMIC);
1276        if (!device_data) {
1277                ret = -ENOMEM;
1278                goto out;
1279        }
1280
1281        device_data->dev = dev;
1282        device_data->current_ctx = NULL;
1283
1284        /* Grab the DMA configuration from platform data. */
1285        mem_to_engine = &((struct cryp_platform_data *)
1286                         dev->platform_data)->mem_to_engine;
1287        engine_to_mem = &((struct cryp_platform_data *)
1288                         dev->platform_data)->engine_to_mem;
1289
1290        res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
1291        if (!res) {
1292                dev_err(dev, "[%s]: platform_get_resource() failed",
1293                                __func__);
1294                ret = -ENODEV;
1295                goto out;
1296        }
1297
1298        device_data->phybase = res->start;
1299        device_data->base = devm_ioremap_resource(dev, res);
1300        if (IS_ERR(device_data->base)) {
1301                dev_err(dev, "[%s]: ioremap failed!", __func__);
1302                ret = PTR_ERR(device_data->base);
1303                goto out;
1304        }
1305
1306        spin_lock_init(&device_data->ctx_lock);
1307        spin_lock_init(&device_data->power_state_spinlock);
1308
1309        /* Enable power for CRYP hardware block */
1310        device_data->pwr_regulator = regulator_get(&pdev->dev, "v-ape");
1311        if (IS_ERR(device_data->pwr_regulator)) {
1312                dev_err(dev, "[%s]: could not get cryp regulator", __func__);
1313                ret = PTR_ERR(device_data->pwr_regulator);
1314                device_data->pwr_regulator = NULL;
1315                goto out;
1316        }
1317
1318        /* Enable the clk for CRYP hardware block */
1319        device_data->clk = devm_clk_get(&pdev->dev, NULL);
1320        if (IS_ERR(device_data->clk)) {
1321                dev_err(dev, "[%s]: clk_get() failed!", __func__);
1322                ret = PTR_ERR(device_data->clk);
1323                goto out_regulator;
1324        }
1325
1326        ret = clk_prepare(device_data->clk);
1327        if (ret) {
1328                dev_err(dev, "[%s]: clk_prepare() failed!", __func__);
1329                goto out_regulator;
1330        }
1331
1332        /* Enable device power (and clock) */
1333        ret = cryp_enable_power(device_data->dev, device_data, false);
1334        if (ret) {
1335                dev_err(dev, "[%s]: cryp_enable_power() failed!", __func__);
1336                goto out_clk_unprepare;
1337        }
1338
1339        if (cryp_check(device_data)) {
1340                dev_err(dev, "[%s]: cryp_check() failed!", __func__);
1341                ret = -EINVAL;
1342                goto out_power;
1343        }
1344
1345        if (cryp_configure_protection(device_data, &prot)) {
1346                dev_err(dev, "[%s]: cryp_configure_protection() failed!",
1347                        __func__);
1348                ret = -EINVAL;
1349                goto out_power;
1350        }
1351
1352        res_irq = platform_get_resource(pdev, IORESOURCE_IRQ, 0);
1353        if (!res_irq) {
1354                dev_err(dev, "[%s]: IORESOURCE_IRQ unavailable",
1355                        __func__);
1356                ret = -ENODEV;
1357                goto out_power;
1358        }
1359
1360        ret = devm_request_irq(&pdev->dev, res_irq->start,
1361                               cryp_interrupt_handler, 0, "cryp1", device_data);
1362        if (ret) {
1363                dev_err(dev, "[%s]: Unable to request IRQ", __func__);
1364                goto out_power;
1365        }
1366
1367        if (cryp_mode == CRYP_MODE_DMA)
1368                cryp_dma_setup_channel(device_data, dev);
1369
1370        platform_set_drvdata(pdev, device_data);
1371
1372        /* Put the new device into the device list... */
1373        klist_add_tail(&device_data->list_node, &driver_data.device_list);
1374
1375        /* ... and signal that a new device is available. */
1376        up(&driver_data.device_allocation);
1377
1378        atomic_set(&session_id, 1);
1379
1380        ret = cryp_algs_register_all();
1381        if (ret) {
1382                dev_err(dev, "[%s]: cryp_algs_register_all() failed!",
1383                        __func__);
1384                goto out_power;
1385        }
1386
1387        dev_info(dev, "successfully registered\n");
1388
1389        return 0;
1390
1391out_power:
1392        cryp_disable_power(device_data->dev, device_data, false);
1393
1394out_clk_unprepare:
1395        clk_unprepare(device_data->clk);
1396
1397out_regulator:
1398        regulator_put(device_data->pwr_regulator);
1399
1400out:
1401        return ret;
1402}
1403
1404static int ux500_cryp_remove(struct platform_device *pdev)
1405{
1406        struct cryp_device_data *device_data;
1407
1408        dev_dbg(&pdev->dev, "[%s]", __func__);
1409        device_data = platform_get_drvdata(pdev);
1410        if (!device_data) {
1411                dev_err(&pdev->dev, "[%s]: platform_get_drvdata() failed!",
1412                        __func__);
1413                return -ENOMEM;
1414        }
1415
1416        /* Try to decrease the number of available devices. */
1417        if (down_trylock(&driver_data.device_allocation))
1418                return -EBUSY;
1419
1420        /* Check that the device is free */
1421        spin_lock(&device_data->ctx_lock);
1422        /* current_ctx allocates a device, NULL = unallocated */
1423        if (device_data->current_ctx) {
1424                /* The device is busy */
1425                spin_unlock(&device_data->ctx_lock);
1426                /* Return the device to the pool. */
1427                up(&driver_data.device_allocation);
1428                return -EBUSY;
1429        }
1430
1431        spin_unlock(&device_data->ctx_lock);
1432
1433        /* Remove the device from the list */
1434        if (klist_node_attached(&device_data->list_node))
1435                klist_remove(&device_data->list_node);
1436
1437        /* If this was the last device, remove the services */
1438        if (list_empty(&driver_data.device_list.k_list))
1439                cryp_algs_unregister_all();
1440
1441        if (cryp_disable_power(&pdev->dev, device_data, false))
1442                dev_err(&pdev->dev, "[%s]: cryp_disable_power() failed",
1443                        __func__);
1444
1445        clk_unprepare(device_data->clk);
1446        regulator_put(device_data->pwr_regulator);
1447
1448        return 0;
1449}
1450
1451static void ux500_cryp_shutdown(struct platform_device *pdev)
1452{
1453        struct cryp_device_data *device_data;
1454
1455        dev_dbg(&pdev->dev, "[%s]", __func__);
1456
1457        device_data = platform_get_drvdata(pdev);
1458        if (!device_data) {
1459                dev_err(&pdev->dev, "[%s]: platform_get_drvdata() failed!",
1460                        __func__);
1461                return;
1462        }
1463
1464        /* Check that the device is free */
1465        spin_lock(&device_data->ctx_lock);
1466        /* current_ctx allocates a device, NULL = unallocated */
1467        if (!device_data->current_ctx) {
1468                if (down_trylock(&driver_data.device_allocation))
1469                        dev_dbg(&pdev->dev, "[%s]: Cryp still in use!"
1470                                "Shutting down anyway...", __func__);
1471                /**
1472                 * (Allocate the device)
1473                 * Need to set this to non-null (dummy) value,
1474                 * to avoid usage if context switching.
1475                 */
1476                device_data->current_ctx++;
1477        }
1478        spin_unlock(&device_data->ctx_lock);
1479
1480        /* Remove the device from the list */
1481        if (klist_node_attached(&device_data->list_node))
1482                klist_remove(&device_data->list_node);
1483
1484        /* If this was the last device, remove the services */
1485        if (list_empty(&driver_data.device_list.k_list))
1486                cryp_algs_unregister_all();
1487
1488        if (cryp_disable_power(&pdev->dev, device_data, false))
1489                dev_err(&pdev->dev, "[%s]: cryp_disable_power() failed",
1490                        __func__);
1491
1492}
1493
1494#ifdef CONFIG_PM_SLEEP
1495static int ux500_cryp_suspend(struct device *dev)
1496{
1497        int ret;
1498        struct platform_device *pdev = to_platform_device(dev);
1499        struct cryp_device_data *device_data;
1500        struct resource *res_irq;
1501        struct cryp_ctx *temp_ctx = NULL;
1502
1503        dev_dbg(dev, "[%s]", __func__);
1504
1505        /* Handle state? */
1506        device_data = platform_get_drvdata(pdev);
1507        if (!device_data) {
1508                dev_err(dev, "[%s]: platform_get_drvdata() failed!", __func__);
1509                return -ENOMEM;
1510        }
1511
1512        res_irq = platform_get_resource(pdev, IORESOURCE_IRQ, 0);
1513        if (!res_irq)
1514                dev_err(dev, "[%s]: IORESOURCE_IRQ, unavailable", __func__);
1515        else
1516                disable_irq(res_irq->start);
1517
1518        spin_lock(&device_data->ctx_lock);
1519        if (!device_data->current_ctx)
1520                device_data->current_ctx++;
1521        spin_unlock(&device_data->ctx_lock);
1522
1523        if (device_data->current_ctx == ++temp_ctx) {
1524                if (down_interruptible(&driver_data.device_allocation))
1525                        dev_dbg(dev, "[%s]: down_interruptible() failed",
1526                                __func__);
1527                ret = cryp_disable_power(dev, device_data, false);
1528
1529        } else
1530                ret = cryp_disable_power(dev, device_data, true);
1531
1532        if (ret)
1533                dev_err(dev, "[%s]: cryp_disable_power()", __func__);
1534
1535        return ret;
1536}
1537
1538static int ux500_cryp_resume(struct device *dev)
1539{
1540        int ret = 0;
1541        struct platform_device *pdev = to_platform_device(dev);
1542        struct cryp_device_data *device_data;
1543        struct resource *res_irq;
1544        struct cryp_ctx *temp_ctx = NULL;
1545
1546        dev_dbg(dev, "[%s]", __func__);
1547
1548        device_data = platform_get_drvdata(pdev);
1549        if (!device_data) {
1550                dev_err(dev, "[%s]: platform_get_drvdata() failed!", __func__);
1551                return -ENOMEM;
1552        }
1553
1554        spin_lock(&device_data->ctx_lock);
1555        if (device_data->current_ctx == ++temp_ctx)
1556                device_data->current_ctx = NULL;
1557        spin_unlock(&device_data->ctx_lock);
1558
1559
1560        if (!device_data->current_ctx)
1561                up(&driver_data.device_allocation);
1562        else
1563                ret = cryp_enable_power(dev, device_data, true);
1564
1565        if (ret)
1566                dev_err(dev, "[%s]: cryp_enable_power() failed!", __func__);
1567        else {
1568                res_irq = platform_get_resource(pdev, IORESOURCE_IRQ, 0);
1569                if (res_irq)
1570                        enable_irq(res_irq->start);
1571        }
1572
1573        return ret;
1574}
1575#endif
1576
1577static SIMPLE_DEV_PM_OPS(ux500_cryp_pm, ux500_cryp_suspend, ux500_cryp_resume);
1578
1579static const struct of_device_id ux500_cryp_match[] = {
1580        { .compatible = "stericsson,ux500-cryp" },
1581        { },
1582};
1583MODULE_DEVICE_TABLE(of, ux500_cryp_match);
1584
1585static struct platform_driver cryp_driver = {
1586        .probe  = ux500_cryp_probe,
1587        .remove = ux500_cryp_remove,
1588        .shutdown = ux500_cryp_shutdown,
1589        .driver = {
1590                .name  = "cryp1",
1591                .of_match_table = ux500_cryp_match,
1592                .pm    = &ux500_cryp_pm,
1593        }
1594};
1595
1596static int __init ux500_cryp_mod_init(void)
1597{
1598        pr_debug("[%s] is called!", __func__);
1599        klist_init(&driver_data.device_list, NULL, NULL);
1600        /* Initialize the semaphore to 0 devices (locked state) */
1601        sema_init(&driver_data.device_allocation, 0);
1602        return platform_driver_register(&cryp_driver);
1603}
1604
1605static void __exit ux500_cryp_mod_fini(void)
1606{
1607        pr_debug("[%s] is called!", __func__);
1608        platform_driver_unregister(&cryp_driver);
1609}
1610
1611module_init(ux500_cryp_mod_init);
1612module_exit(ux500_cryp_mod_fini);
1613
1614module_param(cryp_mode, int, 0);
1615
1616MODULE_DESCRIPTION("Driver for ST-Ericsson UX500 CRYP crypto engine.");
1617MODULE_ALIAS_CRYPTO("aes-all");
1618MODULE_ALIAS_CRYPTO("des-all");
1619
1620MODULE_LICENSE("GPL");
1621