linux/drivers/crypto/stm32/stm32-cryp.c
<<
>>
Prefs
   1// SPDX-License-Identifier: GPL-2.0-only
   2/*
   3 * Copyright (C) STMicroelectronics SA 2017
   4 * Author: Fabien Dessenne <fabien.dessenne@st.com>
   5 */
   6
   7#include <linux/clk.h>
   8#include <linux/delay.h>
   9#include <linux/interrupt.h>
  10#include <linux/iopoll.h>
  11#include <linux/module.h>
  12#include <linux/of_device.h>
  13#include <linux/platform_device.h>
  14#include <linux/pm_runtime.h>
  15#include <linux/reset.h>
  16
  17#include <crypto/aes.h>
  18#include <crypto/internal/des.h>
  19#include <crypto/engine.h>
  20#include <crypto/scatterwalk.h>
  21#include <crypto/internal/aead.h>
  22#include <crypto/internal/skcipher.h>
  23
  24#define DRIVER_NAME             "stm32-cryp"
  25
  26/* Bit [0] encrypt / decrypt */
  27#define FLG_ENCRYPT             BIT(0)
  28/* Bit [8..1] algo & operation mode */
  29#define FLG_AES                 BIT(1)
  30#define FLG_DES                 BIT(2)
  31#define FLG_TDES                BIT(3)
  32#define FLG_ECB                 BIT(4)
  33#define FLG_CBC                 BIT(5)
  34#define FLG_CTR                 BIT(6)
  35#define FLG_GCM                 BIT(7)
  36#define FLG_CCM                 BIT(8)
  37/* Mode mask = bits [15..0] */
  38#define FLG_MODE_MASK           GENMASK(15, 0)
  39/* Bit [31..16] status  */
  40#define FLG_CCM_PADDED_WA       BIT(16)
  41
  42/* Registers */
  43#define CRYP_CR                 0x00000000
  44#define CRYP_SR                 0x00000004
  45#define CRYP_DIN                0x00000008
  46#define CRYP_DOUT               0x0000000C
  47#define CRYP_DMACR              0x00000010
  48#define CRYP_IMSCR              0x00000014
  49#define CRYP_RISR               0x00000018
  50#define CRYP_MISR               0x0000001C
  51#define CRYP_K0LR               0x00000020
  52#define CRYP_K0RR               0x00000024
  53#define CRYP_K1LR               0x00000028
  54#define CRYP_K1RR               0x0000002C
  55#define CRYP_K2LR               0x00000030
  56#define CRYP_K2RR               0x00000034
  57#define CRYP_K3LR               0x00000038
  58#define CRYP_K3RR               0x0000003C
  59#define CRYP_IV0LR              0x00000040
  60#define CRYP_IV0RR              0x00000044
  61#define CRYP_IV1LR              0x00000048
  62#define CRYP_IV1RR              0x0000004C
  63#define CRYP_CSGCMCCM0R         0x00000050
  64#define CRYP_CSGCM0R            0x00000070
  65
  66/* Registers values */
  67#define CR_DEC_NOT_ENC          0x00000004
  68#define CR_TDES_ECB             0x00000000
  69#define CR_TDES_CBC             0x00000008
  70#define CR_DES_ECB              0x00000010
  71#define CR_DES_CBC              0x00000018
  72#define CR_AES_ECB              0x00000020
  73#define CR_AES_CBC              0x00000028
  74#define CR_AES_CTR              0x00000030
  75#define CR_AES_KP               0x00000038
  76#define CR_AES_GCM              0x00080000
  77#define CR_AES_CCM              0x00080008
  78#define CR_AES_UNKNOWN          0xFFFFFFFF
  79#define CR_ALGO_MASK            0x00080038
  80#define CR_DATA32               0x00000000
  81#define CR_DATA16               0x00000040
  82#define CR_DATA8                0x00000080
  83#define CR_DATA1                0x000000C0
  84#define CR_KEY128               0x00000000
  85#define CR_KEY192               0x00000100
  86#define CR_KEY256               0x00000200
  87#define CR_FFLUSH               0x00004000
  88#define CR_CRYPEN               0x00008000
  89#define CR_PH_INIT              0x00000000
  90#define CR_PH_HEADER            0x00010000
  91#define CR_PH_PAYLOAD           0x00020000
  92#define CR_PH_FINAL             0x00030000
  93#define CR_PH_MASK              0x00030000
  94#define CR_NBPBL_SHIFT          20
  95
  96#define SR_BUSY                 0x00000010
  97#define SR_OFNE                 0x00000004
  98
  99#define IMSCR_IN                BIT(0)
 100#define IMSCR_OUT               BIT(1)
 101
 102#define MISR_IN                 BIT(0)
 103#define MISR_OUT                BIT(1)
 104
 105/* Misc */
 106#define AES_BLOCK_32            (AES_BLOCK_SIZE / sizeof(u32))
 107#define GCM_CTR_INIT            2
 108#define _walked_in              (cryp->in_walk.offset - cryp->in_sg->offset)
 109#define _walked_out             (cryp->out_walk.offset - cryp->out_sg->offset)
 110#define CRYP_AUTOSUSPEND_DELAY  50
 111
 112struct stm32_cryp_caps {
 113        bool                    swap_final;
 114        bool                    padding_wa;
 115};
 116
 117struct stm32_cryp_ctx {
 118        struct crypto_engine_ctx enginectx;
 119        struct stm32_cryp       *cryp;
 120        int                     keylen;
 121        u32                     key[AES_KEYSIZE_256 / sizeof(u32)];
 122        unsigned long           flags;
 123};
 124
 125struct stm32_cryp_reqctx {
 126        unsigned long mode;
 127};
 128
 129struct stm32_cryp {
 130        struct list_head        list;
 131        struct device           *dev;
 132        void __iomem            *regs;
 133        struct clk              *clk;
 134        unsigned long           flags;
 135        u32                     irq_status;
 136        const struct stm32_cryp_caps *caps;
 137        struct stm32_cryp_ctx   *ctx;
 138
 139        struct crypto_engine    *engine;
 140
 141        struct skcipher_request *req;
 142        struct aead_request     *areq;
 143
 144        size_t                  authsize;
 145        size_t                  hw_blocksize;
 146
 147        size_t                  total_in;
 148        size_t                  total_in_save;
 149        size_t                  total_out;
 150        size_t                  total_out_save;
 151
 152        struct scatterlist      *in_sg;
 153        struct scatterlist      *out_sg;
 154        struct scatterlist      *out_sg_save;
 155
 156        struct scatterlist      in_sgl;
 157        struct scatterlist      out_sgl;
 158        bool                    sgs_copied;
 159
 160        int                     in_sg_len;
 161        int                     out_sg_len;
 162
 163        struct scatter_walk     in_walk;
 164        struct scatter_walk     out_walk;
 165
 166        u32                     last_ctr[4];
 167        u32                     gcm_ctr;
 168};
 169
 170struct stm32_cryp_list {
 171        struct list_head        dev_list;
 172        spinlock_t              lock; /* protect dev_list */
 173};
 174
 175static struct stm32_cryp_list cryp_list = {
 176        .dev_list = LIST_HEAD_INIT(cryp_list.dev_list),
 177        .lock     = __SPIN_LOCK_UNLOCKED(cryp_list.lock),
 178};
 179
 180static inline bool is_aes(struct stm32_cryp *cryp)
 181{
 182        return cryp->flags & FLG_AES;
 183}
 184
 185static inline bool is_des(struct stm32_cryp *cryp)
 186{
 187        return cryp->flags & FLG_DES;
 188}
 189
 190static inline bool is_tdes(struct stm32_cryp *cryp)
 191{
 192        return cryp->flags & FLG_TDES;
 193}
 194
 195static inline bool is_ecb(struct stm32_cryp *cryp)
 196{
 197        return cryp->flags & FLG_ECB;
 198}
 199
 200static inline bool is_cbc(struct stm32_cryp *cryp)
 201{
 202        return cryp->flags & FLG_CBC;
 203}
 204
 205static inline bool is_ctr(struct stm32_cryp *cryp)
 206{
 207        return cryp->flags & FLG_CTR;
 208}
 209
 210static inline bool is_gcm(struct stm32_cryp *cryp)
 211{
 212        return cryp->flags & FLG_GCM;
 213}
 214
 215static inline bool is_ccm(struct stm32_cryp *cryp)
 216{
 217        return cryp->flags & FLG_CCM;
 218}
 219
 220static inline bool is_encrypt(struct stm32_cryp *cryp)
 221{
 222        return cryp->flags & FLG_ENCRYPT;
 223}
 224
 225static inline bool is_decrypt(struct stm32_cryp *cryp)
 226{
 227        return !is_encrypt(cryp);
 228}
 229
 230static inline u32 stm32_cryp_read(struct stm32_cryp *cryp, u32 ofst)
 231{
 232        return readl_relaxed(cryp->regs + ofst);
 233}
 234
 235static inline void stm32_cryp_write(struct stm32_cryp *cryp, u32 ofst, u32 val)
 236{
 237        writel_relaxed(val, cryp->regs + ofst);
 238}
 239
 240static inline int stm32_cryp_wait_busy(struct stm32_cryp *cryp)
 241{
 242        u32 status;
 243
 244        return readl_relaxed_poll_timeout(cryp->regs + CRYP_SR, status,
 245                        !(status & SR_BUSY), 10, 100000);
 246}
 247
 248static inline int stm32_cryp_wait_enable(struct stm32_cryp *cryp)
 249{
 250        u32 status;
 251
 252        return readl_relaxed_poll_timeout(cryp->regs + CRYP_CR, status,
 253                        !(status & CR_CRYPEN), 10, 100000);
 254}
 255
 256static inline int stm32_cryp_wait_output(struct stm32_cryp *cryp)
 257{
 258        u32 status;
 259
 260        return readl_relaxed_poll_timeout(cryp->regs + CRYP_SR, status,
 261                        status & SR_OFNE, 10, 100000);
 262}
 263
 264static int stm32_cryp_read_auth_tag(struct stm32_cryp *cryp);
 265
 266static struct stm32_cryp *stm32_cryp_find_dev(struct stm32_cryp_ctx *ctx)
 267{
 268        struct stm32_cryp *tmp, *cryp = NULL;
 269
 270        spin_lock_bh(&cryp_list.lock);
 271        if (!ctx->cryp) {
 272                list_for_each_entry(tmp, &cryp_list.dev_list, list) {
 273                        cryp = tmp;
 274                        break;
 275                }
 276                ctx->cryp = cryp;
 277        } else {
 278                cryp = ctx->cryp;
 279        }
 280
 281        spin_unlock_bh(&cryp_list.lock);
 282
 283        return cryp;
 284}
 285
 286static int stm32_cryp_check_aligned(struct scatterlist *sg, size_t total,
 287                                    size_t align)
 288{
 289        int len = 0;
 290
 291        if (!total)
 292                return 0;
 293
 294        if (!IS_ALIGNED(total, align))
 295                return -EINVAL;
 296
 297        while (sg) {
 298                if (!IS_ALIGNED(sg->offset, sizeof(u32)))
 299                        return -EINVAL;
 300
 301                if (!IS_ALIGNED(sg->length, align))
 302                        return -EINVAL;
 303
 304                len += sg->length;
 305                sg = sg_next(sg);
 306        }
 307
 308        if (len != total)
 309                return -EINVAL;
 310
 311        return 0;
 312}
 313
 314static int stm32_cryp_check_io_aligned(struct stm32_cryp *cryp)
 315{
 316        int ret;
 317
 318        ret = stm32_cryp_check_aligned(cryp->in_sg, cryp->total_in,
 319                                       cryp->hw_blocksize);
 320        if (ret)
 321                return ret;
 322
 323        ret = stm32_cryp_check_aligned(cryp->out_sg, cryp->total_out,
 324                                       cryp->hw_blocksize);
 325
 326        return ret;
 327}
 328
 329static void sg_copy_buf(void *buf, struct scatterlist *sg,
 330                        unsigned int start, unsigned int nbytes, int out)
 331{
 332        struct scatter_walk walk;
 333
 334        if (!nbytes)
 335                return;
 336
 337        scatterwalk_start(&walk, sg);
 338        scatterwalk_advance(&walk, start);
 339        scatterwalk_copychunks(buf, &walk, nbytes, out);
 340        scatterwalk_done(&walk, out, 0);
 341}
 342
 343static int stm32_cryp_copy_sgs(struct stm32_cryp *cryp)
 344{
 345        void *buf_in, *buf_out;
 346        int pages, total_in, total_out;
 347
 348        if (!stm32_cryp_check_io_aligned(cryp)) {
 349                cryp->sgs_copied = 0;
 350                return 0;
 351        }
 352
 353        total_in = ALIGN(cryp->total_in, cryp->hw_blocksize);
 354        pages = total_in ? get_order(total_in) : 1;
 355        buf_in = (void *)__get_free_pages(GFP_ATOMIC, pages);
 356
 357        total_out = ALIGN(cryp->total_out, cryp->hw_blocksize);
 358        pages = total_out ? get_order(total_out) : 1;
 359        buf_out = (void *)__get_free_pages(GFP_ATOMIC, pages);
 360
 361        if (!buf_in || !buf_out) {
 362                dev_err(cryp->dev, "Can't allocate pages when unaligned\n");
 363                cryp->sgs_copied = 0;
 364                return -EFAULT;
 365        }
 366
 367        sg_copy_buf(buf_in, cryp->in_sg, 0, cryp->total_in, 0);
 368
 369        sg_init_one(&cryp->in_sgl, buf_in, total_in);
 370        cryp->in_sg = &cryp->in_sgl;
 371        cryp->in_sg_len = 1;
 372
 373        sg_init_one(&cryp->out_sgl, buf_out, total_out);
 374        cryp->out_sg_save = cryp->out_sg;
 375        cryp->out_sg = &cryp->out_sgl;
 376        cryp->out_sg_len = 1;
 377
 378        cryp->sgs_copied = 1;
 379
 380        return 0;
 381}
 382
 383static void stm32_cryp_hw_write_iv(struct stm32_cryp *cryp, u32 *iv)
 384{
 385        if (!iv)
 386                return;
 387
 388        stm32_cryp_write(cryp, CRYP_IV0LR, cpu_to_be32(*iv++));
 389        stm32_cryp_write(cryp, CRYP_IV0RR, cpu_to_be32(*iv++));
 390
 391        if (is_aes(cryp)) {
 392                stm32_cryp_write(cryp, CRYP_IV1LR, cpu_to_be32(*iv++));
 393                stm32_cryp_write(cryp, CRYP_IV1RR, cpu_to_be32(*iv++));
 394        }
 395}
 396
 397static void stm32_cryp_get_iv(struct stm32_cryp *cryp)
 398{
 399        struct skcipher_request *req = cryp->req;
 400        u32 *tmp = (void *)req->iv;
 401
 402        if (!tmp)
 403                return;
 404
 405        *tmp++ = cpu_to_be32(stm32_cryp_read(cryp, CRYP_IV0LR));
 406        *tmp++ = cpu_to_be32(stm32_cryp_read(cryp, CRYP_IV0RR));
 407
 408        if (is_aes(cryp)) {
 409                *tmp++ = cpu_to_be32(stm32_cryp_read(cryp, CRYP_IV1LR));
 410                *tmp++ = cpu_to_be32(stm32_cryp_read(cryp, CRYP_IV1RR));
 411        }
 412}
 413
 414static void stm32_cryp_hw_write_key(struct stm32_cryp *c)
 415{
 416        unsigned int i;
 417        int r_id;
 418
 419        if (is_des(c)) {
 420                stm32_cryp_write(c, CRYP_K1LR, cpu_to_be32(c->ctx->key[0]));
 421                stm32_cryp_write(c, CRYP_K1RR, cpu_to_be32(c->ctx->key[1]));
 422        } else {
 423                r_id = CRYP_K3RR;
 424                for (i = c->ctx->keylen / sizeof(u32); i > 0; i--, r_id -= 4)
 425                        stm32_cryp_write(c, r_id,
 426                                         cpu_to_be32(c->ctx->key[i - 1]));
 427        }
 428}
 429
 430static u32 stm32_cryp_get_hw_mode(struct stm32_cryp *cryp)
 431{
 432        if (is_aes(cryp) && is_ecb(cryp))
 433                return CR_AES_ECB;
 434
 435        if (is_aes(cryp) && is_cbc(cryp))
 436                return CR_AES_CBC;
 437
 438        if (is_aes(cryp) && is_ctr(cryp))
 439                return CR_AES_CTR;
 440
 441        if (is_aes(cryp) && is_gcm(cryp))
 442                return CR_AES_GCM;
 443
 444        if (is_aes(cryp) && is_ccm(cryp))
 445                return CR_AES_CCM;
 446
 447        if (is_des(cryp) && is_ecb(cryp))
 448                return CR_DES_ECB;
 449
 450        if (is_des(cryp) && is_cbc(cryp))
 451                return CR_DES_CBC;
 452
 453        if (is_tdes(cryp) && is_ecb(cryp))
 454                return CR_TDES_ECB;
 455
 456        if (is_tdes(cryp) && is_cbc(cryp))
 457                return CR_TDES_CBC;
 458
 459        dev_err(cryp->dev, "Unknown mode\n");
 460        return CR_AES_UNKNOWN;
 461}
 462
 463static unsigned int stm32_cryp_get_input_text_len(struct stm32_cryp *cryp)
 464{
 465        return is_encrypt(cryp) ? cryp->areq->cryptlen :
 466                                  cryp->areq->cryptlen - cryp->authsize;
 467}
 468
 469static int stm32_cryp_gcm_init(struct stm32_cryp *cryp, u32 cfg)
 470{
 471        int ret;
 472        u32 iv[4];
 473
 474        /* Phase 1 : init */
 475        memcpy(iv, cryp->areq->iv, 12);
 476        iv[3] = cpu_to_be32(GCM_CTR_INIT);
 477        cryp->gcm_ctr = GCM_CTR_INIT;
 478        stm32_cryp_hw_write_iv(cryp, iv);
 479
 480        stm32_cryp_write(cryp, CRYP_CR, cfg | CR_PH_INIT | CR_CRYPEN);
 481
 482        /* Wait for end of processing */
 483        ret = stm32_cryp_wait_enable(cryp);
 484        if (ret)
 485                dev_err(cryp->dev, "Timeout (gcm init)\n");
 486
 487        return ret;
 488}
 489
 490static int stm32_cryp_ccm_init(struct stm32_cryp *cryp, u32 cfg)
 491{
 492        int ret;
 493        u8 iv[AES_BLOCK_SIZE], b0[AES_BLOCK_SIZE];
 494        u32 *d;
 495        unsigned int i, textlen;
 496
 497        /* Phase 1 : init. Firstly set the CTR value to 1 (not 0) */
 498        memcpy(iv, cryp->areq->iv, AES_BLOCK_SIZE);
 499        memset(iv + AES_BLOCK_SIZE - 1 - iv[0], 0, iv[0] + 1);
 500        iv[AES_BLOCK_SIZE - 1] = 1;
 501        stm32_cryp_hw_write_iv(cryp, (u32 *)iv);
 502
 503        /* Build B0 */
 504        memcpy(b0, iv, AES_BLOCK_SIZE);
 505
 506        b0[0] |= (8 * ((cryp->authsize - 2) / 2));
 507
 508        if (cryp->areq->assoclen)
 509                b0[0] |= 0x40;
 510
 511        textlen = stm32_cryp_get_input_text_len(cryp);
 512
 513        b0[AES_BLOCK_SIZE - 2] = textlen >> 8;
 514        b0[AES_BLOCK_SIZE - 1] = textlen & 0xFF;
 515
 516        /* Enable HW */
 517        stm32_cryp_write(cryp, CRYP_CR, cfg | CR_PH_INIT | CR_CRYPEN);
 518
 519        /* Write B0 */
 520        d = (u32 *)b0;
 521
 522        for (i = 0; i < AES_BLOCK_32; i++) {
 523                if (!cryp->caps->padding_wa)
 524                        *d = cpu_to_be32(*d);
 525                stm32_cryp_write(cryp, CRYP_DIN, *d++);
 526        }
 527
 528        /* Wait for end of processing */
 529        ret = stm32_cryp_wait_enable(cryp);
 530        if (ret)
 531                dev_err(cryp->dev, "Timeout (ccm init)\n");
 532
 533        return ret;
 534}
 535
 536static int stm32_cryp_hw_init(struct stm32_cryp *cryp)
 537{
 538        int ret;
 539        u32 cfg, hw_mode;
 540
 541        pm_runtime_get_sync(cryp->dev);
 542
 543        /* Disable interrupt */
 544        stm32_cryp_write(cryp, CRYP_IMSCR, 0);
 545
 546        /* Set key */
 547        stm32_cryp_hw_write_key(cryp);
 548
 549        /* Set configuration */
 550        cfg = CR_DATA8 | CR_FFLUSH;
 551
 552        switch (cryp->ctx->keylen) {
 553        case AES_KEYSIZE_128:
 554                cfg |= CR_KEY128;
 555                break;
 556
 557        case AES_KEYSIZE_192:
 558                cfg |= CR_KEY192;
 559                break;
 560
 561        default:
 562        case AES_KEYSIZE_256:
 563                cfg |= CR_KEY256;
 564                break;
 565        }
 566
 567        hw_mode = stm32_cryp_get_hw_mode(cryp);
 568        if (hw_mode == CR_AES_UNKNOWN)
 569                return -EINVAL;
 570
 571        /* AES ECB/CBC decrypt: run key preparation first */
 572        if (is_decrypt(cryp) &&
 573            ((hw_mode == CR_AES_ECB) || (hw_mode == CR_AES_CBC))) {
 574                stm32_cryp_write(cryp, CRYP_CR, cfg | CR_AES_KP | CR_CRYPEN);
 575
 576                /* Wait for end of processing */
 577                ret = stm32_cryp_wait_busy(cryp);
 578                if (ret) {
 579                        dev_err(cryp->dev, "Timeout (key preparation)\n");
 580                        return ret;
 581                }
 582        }
 583
 584        cfg |= hw_mode;
 585
 586        if (is_decrypt(cryp))
 587                cfg |= CR_DEC_NOT_ENC;
 588
 589        /* Apply config and flush (valid when CRYPEN = 0) */
 590        stm32_cryp_write(cryp, CRYP_CR, cfg);
 591
 592        switch (hw_mode) {
 593        case CR_AES_GCM:
 594        case CR_AES_CCM:
 595                /* Phase 1 : init */
 596                if (hw_mode == CR_AES_CCM)
 597                        ret = stm32_cryp_ccm_init(cryp, cfg);
 598                else
 599                        ret = stm32_cryp_gcm_init(cryp, cfg);
 600
 601                if (ret)
 602                        return ret;
 603
 604                /* Phase 2 : header (authenticated data) */
 605                if (cryp->areq->assoclen) {
 606                        cfg |= CR_PH_HEADER;
 607                } else if (stm32_cryp_get_input_text_len(cryp)) {
 608                        cfg |= CR_PH_PAYLOAD;
 609                        stm32_cryp_write(cryp, CRYP_CR, cfg);
 610                } else {
 611                        cfg |= CR_PH_INIT;
 612                }
 613
 614                break;
 615
 616        case CR_DES_CBC:
 617        case CR_TDES_CBC:
 618        case CR_AES_CBC:
 619        case CR_AES_CTR:
 620                stm32_cryp_hw_write_iv(cryp, (u32 *)cryp->req->iv);
 621                break;
 622
 623        default:
 624                break;
 625        }
 626
 627        /* Enable now */
 628        cfg |= CR_CRYPEN;
 629
 630        stm32_cryp_write(cryp, CRYP_CR, cfg);
 631
 632        cryp->flags &= ~FLG_CCM_PADDED_WA;
 633
 634        return 0;
 635}
 636
 637static void stm32_cryp_finish_req(struct stm32_cryp *cryp, int err)
 638{
 639        if (!err && (is_gcm(cryp) || is_ccm(cryp)))
 640                /* Phase 4 : output tag */
 641                err = stm32_cryp_read_auth_tag(cryp);
 642
 643        if (!err && (!(is_gcm(cryp) || is_ccm(cryp))))
 644                stm32_cryp_get_iv(cryp);
 645
 646        if (cryp->sgs_copied) {
 647                void *buf_in, *buf_out;
 648                int pages, len;
 649
 650                buf_in = sg_virt(&cryp->in_sgl);
 651                buf_out = sg_virt(&cryp->out_sgl);
 652
 653                sg_copy_buf(buf_out, cryp->out_sg_save, 0,
 654                            cryp->total_out_save, 1);
 655
 656                len = ALIGN(cryp->total_in_save, cryp->hw_blocksize);
 657                pages = len ? get_order(len) : 1;
 658                free_pages((unsigned long)buf_in, pages);
 659
 660                len = ALIGN(cryp->total_out_save, cryp->hw_blocksize);
 661                pages = len ? get_order(len) : 1;
 662                free_pages((unsigned long)buf_out, pages);
 663        }
 664
 665        pm_runtime_mark_last_busy(cryp->dev);
 666        pm_runtime_put_autosuspend(cryp->dev);
 667
 668        if (is_gcm(cryp) || is_ccm(cryp))
 669                crypto_finalize_aead_request(cryp->engine, cryp->areq, err);
 670        else
 671                crypto_finalize_skcipher_request(cryp->engine, cryp->req,
 672                                                   err);
 673
 674        memset(cryp->ctx->key, 0, cryp->ctx->keylen);
 675}
 676
 677static int stm32_cryp_cpu_start(struct stm32_cryp *cryp)
 678{
 679        /* Enable interrupt and let the IRQ handler do everything */
 680        stm32_cryp_write(cryp, CRYP_IMSCR, IMSCR_IN | IMSCR_OUT);
 681
 682        return 0;
 683}
 684
 685static int stm32_cryp_cipher_one_req(struct crypto_engine *engine, void *areq);
 686static int stm32_cryp_prepare_cipher_req(struct crypto_engine *engine,
 687                                         void *areq);
 688
 689static int stm32_cryp_init_tfm(struct crypto_skcipher *tfm)
 690{
 691        struct stm32_cryp_ctx *ctx = crypto_skcipher_ctx(tfm);
 692
 693        crypto_skcipher_set_reqsize(tfm, sizeof(struct stm32_cryp_reqctx));
 694
 695        ctx->enginectx.op.do_one_request = stm32_cryp_cipher_one_req;
 696        ctx->enginectx.op.prepare_request = stm32_cryp_prepare_cipher_req;
 697        ctx->enginectx.op.unprepare_request = NULL;
 698        return 0;
 699}
 700
 701static int stm32_cryp_aead_one_req(struct crypto_engine *engine, void *areq);
 702static int stm32_cryp_prepare_aead_req(struct crypto_engine *engine,
 703                                       void *areq);
 704
 705static int stm32_cryp_aes_aead_init(struct crypto_aead *tfm)
 706{
 707        struct stm32_cryp_ctx *ctx = crypto_aead_ctx(tfm);
 708
 709        tfm->reqsize = sizeof(struct stm32_cryp_reqctx);
 710
 711        ctx->enginectx.op.do_one_request = stm32_cryp_aead_one_req;
 712        ctx->enginectx.op.prepare_request = stm32_cryp_prepare_aead_req;
 713        ctx->enginectx.op.unprepare_request = NULL;
 714
 715        return 0;
 716}
 717
 718static int stm32_cryp_crypt(struct skcipher_request *req, unsigned long mode)
 719{
 720        struct stm32_cryp_ctx *ctx = crypto_skcipher_ctx(
 721                        crypto_skcipher_reqtfm(req));
 722        struct stm32_cryp_reqctx *rctx = skcipher_request_ctx(req);
 723        struct stm32_cryp *cryp = stm32_cryp_find_dev(ctx);
 724
 725        if (!cryp)
 726                return -ENODEV;
 727
 728        rctx->mode = mode;
 729
 730        return crypto_transfer_skcipher_request_to_engine(cryp->engine, req);
 731}
 732
 733static int stm32_cryp_aead_crypt(struct aead_request *req, unsigned long mode)
 734{
 735        struct stm32_cryp_ctx *ctx = crypto_aead_ctx(crypto_aead_reqtfm(req));
 736        struct stm32_cryp_reqctx *rctx = aead_request_ctx(req);
 737        struct stm32_cryp *cryp = stm32_cryp_find_dev(ctx);
 738
 739        if (!cryp)
 740                return -ENODEV;
 741
 742        rctx->mode = mode;
 743
 744        return crypto_transfer_aead_request_to_engine(cryp->engine, req);
 745}
 746
 747static int stm32_cryp_setkey(struct crypto_skcipher *tfm, const u8 *key,
 748                             unsigned int keylen)
 749{
 750        struct stm32_cryp_ctx *ctx = crypto_skcipher_ctx(tfm);
 751
 752        memcpy(ctx->key, key, keylen);
 753        ctx->keylen = keylen;
 754
 755        return 0;
 756}
 757
 758static int stm32_cryp_aes_setkey(struct crypto_skcipher *tfm, const u8 *key,
 759                                 unsigned int keylen)
 760{
 761        if (keylen != AES_KEYSIZE_128 && keylen != AES_KEYSIZE_192 &&
 762            keylen != AES_KEYSIZE_256)
 763                return -EINVAL;
 764        else
 765                return stm32_cryp_setkey(tfm, key, keylen);
 766}
 767
 768static int stm32_cryp_des_setkey(struct crypto_skcipher *tfm, const u8 *key,
 769                                 unsigned int keylen)
 770{
 771        return verify_skcipher_des_key(tfm, key) ?:
 772               stm32_cryp_setkey(tfm, key, keylen);
 773}
 774
 775static int stm32_cryp_tdes_setkey(struct crypto_skcipher *tfm, const u8 *key,
 776                                  unsigned int keylen)
 777{
 778        return verify_skcipher_des3_key(tfm, key) ?:
 779               stm32_cryp_setkey(tfm, key, keylen);
 780}
 781
 782static int stm32_cryp_aes_aead_setkey(struct crypto_aead *tfm, const u8 *key,
 783                                      unsigned int keylen)
 784{
 785        struct stm32_cryp_ctx *ctx = crypto_aead_ctx(tfm);
 786
 787        if (keylen != AES_KEYSIZE_128 && keylen != AES_KEYSIZE_192 &&
 788            keylen != AES_KEYSIZE_256)
 789                return -EINVAL;
 790
 791        memcpy(ctx->key, key, keylen);
 792        ctx->keylen = keylen;
 793
 794        return 0;
 795}
 796
 797static int stm32_cryp_aes_gcm_setauthsize(struct crypto_aead *tfm,
 798                                          unsigned int authsize)
 799{
 800        return authsize == AES_BLOCK_SIZE ? 0 : -EINVAL;
 801}
 802
 803static int stm32_cryp_aes_ccm_setauthsize(struct crypto_aead *tfm,
 804                                          unsigned int authsize)
 805{
 806        switch (authsize) {
 807        case 4:
 808        case 6:
 809        case 8:
 810        case 10:
 811        case 12:
 812        case 14:
 813        case 16:
 814                break;
 815        default:
 816                return -EINVAL;
 817        }
 818
 819        return 0;
 820}
 821
 822static int stm32_cryp_aes_ecb_encrypt(struct skcipher_request *req)
 823{
 824        return stm32_cryp_crypt(req, FLG_AES | FLG_ECB | FLG_ENCRYPT);
 825}
 826
 827static int stm32_cryp_aes_ecb_decrypt(struct skcipher_request *req)
 828{
 829        return stm32_cryp_crypt(req, FLG_AES | FLG_ECB);
 830}
 831
 832static int stm32_cryp_aes_cbc_encrypt(struct skcipher_request *req)
 833{
 834        return stm32_cryp_crypt(req, FLG_AES | FLG_CBC | FLG_ENCRYPT);
 835}
 836
 837static int stm32_cryp_aes_cbc_decrypt(struct skcipher_request *req)
 838{
 839        return stm32_cryp_crypt(req, FLG_AES | FLG_CBC);
 840}
 841
 842static int stm32_cryp_aes_ctr_encrypt(struct skcipher_request *req)
 843{
 844        return stm32_cryp_crypt(req, FLG_AES | FLG_CTR | FLG_ENCRYPT);
 845}
 846
 847static int stm32_cryp_aes_ctr_decrypt(struct skcipher_request *req)
 848{
 849        return stm32_cryp_crypt(req, FLG_AES | FLG_CTR);
 850}
 851
 852static int stm32_cryp_aes_gcm_encrypt(struct aead_request *req)
 853{
 854        return stm32_cryp_aead_crypt(req, FLG_AES | FLG_GCM | FLG_ENCRYPT);
 855}
 856
 857static int stm32_cryp_aes_gcm_decrypt(struct aead_request *req)
 858{
 859        return stm32_cryp_aead_crypt(req, FLG_AES | FLG_GCM);
 860}
 861
 862static int stm32_cryp_aes_ccm_encrypt(struct aead_request *req)
 863{
 864        return stm32_cryp_aead_crypt(req, FLG_AES | FLG_CCM | FLG_ENCRYPT);
 865}
 866
 867static int stm32_cryp_aes_ccm_decrypt(struct aead_request *req)
 868{
 869        return stm32_cryp_aead_crypt(req, FLG_AES | FLG_CCM);
 870}
 871
 872static int stm32_cryp_des_ecb_encrypt(struct skcipher_request *req)
 873{
 874        return stm32_cryp_crypt(req, FLG_DES | FLG_ECB | FLG_ENCRYPT);
 875}
 876
 877static int stm32_cryp_des_ecb_decrypt(struct skcipher_request *req)
 878{
 879        return stm32_cryp_crypt(req, FLG_DES | FLG_ECB);
 880}
 881
 882static int stm32_cryp_des_cbc_encrypt(struct skcipher_request *req)
 883{
 884        return stm32_cryp_crypt(req, FLG_DES | FLG_CBC | FLG_ENCRYPT);
 885}
 886
 887static int stm32_cryp_des_cbc_decrypt(struct skcipher_request *req)
 888{
 889        return stm32_cryp_crypt(req, FLG_DES | FLG_CBC);
 890}
 891
 892static int stm32_cryp_tdes_ecb_encrypt(struct skcipher_request *req)
 893{
 894        return stm32_cryp_crypt(req, FLG_TDES | FLG_ECB | FLG_ENCRYPT);
 895}
 896
 897static int stm32_cryp_tdes_ecb_decrypt(struct skcipher_request *req)
 898{
 899        return stm32_cryp_crypt(req, FLG_TDES | FLG_ECB);
 900}
 901
 902static int stm32_cryp_tdes_cbc_encrypt(struct skcipher_request *req)
 903{
 904        return stm32_cryp_crypt(req, FLG_TDES | FLG_CBC | FLG_ENCRYPT);
 905}
 906
 907static int stm32_cryp_tdes_cbc_decrypt(struct skcipher_request *req)
 908{
 909        return stm32_cryp_crypt(req, FLG_TDES | FLG_CBC);
 910}
 911
 912static int stm32_cryp_prepare_req(struct skcipher_request *req,
 913                                  struct aead_request *areq)
 914{
 915        struct stm32_cryp_ctx *ctx;
 916        struct stm32_cryp *cryp;
 917        struct stm32_cryp_reqctx *rctx;
 918        int ret;
 919
 920        if (!req && !areq)
 921                return -EINVAL;
 922
 923        ctx = req ? crypto_skcipher_ctx(crypto_skcipher_reqtfm(req)) :
 924                    crypto_aead_ctx(crypto_aead_reqtfm(areq));
 925
 926        cryp = ctx->cryp;
 927
 928        if (!cryp)
 929                return -ENODEV;
 930
 931        rctx = req ? skcipher_request_ctx(req) : aead_request_ctx(areq);
 932        rctx->mode &= FLG_MODE_MASK;
 933
 934        ctx->cryp = cryp;
 935
 936        cryp->flags = (cryp->flags & ~FLG_MODE_MASK) | rctx->mode;
 937        cryp->hw_blocksize = is_aes(cryp) ? AES_BLOCK_SIZE : DES_BLOCK_SIZE;
 938        cryp->ctx = ctx;
 939
 940        if (req) {
 941                cryp->req = req;
 942                cryp->areq = NULL;
 943                cryp->total_in = req->cryptlen;
 944                cryp->total_out = cryp->total_in;
 945        } else {
 946                /*
 947                 * Length of input and output data:
 948                 * Encryption case:
 949                 *  INPUT  =   AssocData  ||   PlainText
 950                 *          <- assoclen ->  <- cryptlen ->
 951                 *          <------- total_in ----------->
 952                 *
 953                 *  OUTPUT =   AssocData  ||  CipherText  ||   AuthTag
 954                 *          <- assoclen ->  <- cryptlen ->  <- authsize ->
 955                 *          <---------------- total_out ----------------->
 956                 *
 957                 * Decryption case:
 958                 *  INPUT  =   AssocData  ||  CipherText  ||  AuthTag
 959                 *          <- assoclen ->  <--------- cryptlen --------->
 960                 *                                          <- authsize ->
 961                 *          <---------------- total_in ------------------>
 962                 *
 963                 *  OUTPUT =   AssocData  ||   PlainText
 964                 *          <- assoclen ->  <- crypten - authsize ->
 965                 *          <---------- total_out ----------------->
 966                 */
 967                cryp->areq = areq;
 968                cryp->req = NULL;
 969                cryp->authsize = crypto_aead_authsize(crypto_aead_reqtfm(areq));
 970                cryp->total_in = areq->assoclen + areq->cryptlen;
 971                if (is_encrypt(cryp))
 972                        /* Append auth tag to output */
 973                        cryp->total_out = cryp->total_in + cryp->authsize;
 974                else
 975                        /* No auth tag in output */
 976                        cryp->total_out = cryp->total_in - cryp->authsize;
 977        }
 978
 979        cryp->total_in_save = cryp->total_in;
 980        cryp->total_out_save = cryp->total_out;
 981
 982        cryp->in_sg = req ? req->src : areq->src;
 983        cryp->out_sg = req ? req->dst : areq->dst;
 984        cryp->out_sg_save = cryp->out_sg;
 985
 986        cryp->in_sg_len = sg_nents_for_len(cryp->in_sg, cryp->total_in);
 987        if (cryp->in_sg_len < 0) {
 988                dev_err(cryp->dev, "Cannot get in_sg_len\n");
 989                ret = cryp->in_sg_len;
 990                return ret;
 991        }
 992
 993        cryp->out_sg_len = sg_nents_for_len(cryp->out_sg, cryp->total_out);
 994        if (cryp->out_sg_len < 0) {
 995                dev_err(cryp->dev, "Cannot get out_sg_len\n");
 996                ret = cryp->out_sg_len;
 997                return ret;
 998        }
 999
1000        ret = stm32_cryp_copy_sgs(cryp);
1001        if (ret)
1002                return ret;
1003
1004        scatterwalk_start(&cryp->in_walk, cryp->in_sg);
1005        scatterwalk_start(&cryp->out_walk, cryp->out_sg);
1006
1007        if (is_gcm(cryp) || is_ccm(cryp)) {
1008                /* In output, jump after assoc data */
1009                scatterwalk_advance(&cryp->out_walk, cryp->areq->assoclen);
1010                cryp->total_out -= cryp->areq->assoclen;
1011        }
1012
1013        ret = stm32_cryp_hw_init(cryp);
1014        return ret;
1015}
1016
1017static int stm32_cryp_prepare_cipher_req(struct crypto_engine *engine,
1018                                         void *areq)
1019{
1020        struct skcipher_request *req = container_of(areq,
1021                                                      struct skcipher_request,
1022                                                      base);
1023
1024        return stm32_cryp_prepare_req(req, NULL);
1025}
1026
1027static int stm32_cryp_cipher_one_req(struct crypto_engine *engine, void *areq)
1028{
1029        struct skcipher_request *req = container_of(areq,
1030                                                      struct skcipher_request,
1031                                                      base);
1032        struct stm32_cryp_ctx *ctx = crypto_skcipher_ctx(
1033                        crypto_skcipher_reqtfm(req));
1034        struct stm32_cryp *cryp = ctx->cryp;
1035
1036        if (!cryp)
1037                return -ENODEV;
1038
1039        return stm32_cryp_cpu_start(cryp);
1040}
1041
1042static int stm32_cryp_prepare_aead_req(struct crypto_engine *engine, void *areq)
1043{
1044        struct aead_request *req = container_of(areq, struct aead_request,
1045                                                base);
1046
1047        return stm32_cryp_prepare_req(NULL, req);
1048}
1049
1050static int stm32_cryp_aead_one_req(struct crypto_engine *engine, void *areq)
1051{
1052        struct aead_request *req = container_of(areq, struct aead_request,
1053                                                base);
1054        struct stm32_cryp_ctx *ctx = crypto_aead_ctx(crypto_aead_reqtfm(req));
1055        struct stm32_cryp *cryp = ctx->cryp;
1056
1057        if (!cryp)
1058                return -ENODEV;
1059
1060        if (unlikely(!cryp->areq->assoclen &&
1061                     !stm32_cryp_get_input_text_len(cryp))) {
1062                /* No input data to process: get tag and finish */
1063                stm32_cryp_finish_req(cryp, 0);
1064                return 0;
1065        }
1066
1067        return stm32_cryp_cpu_start(cryp);
1068}
1069
1070static u32 *stm32_cryp_next_out(struct stm32_cryp *cryp, u32 *dst,
1071                                unsigned int n)
1072{
1073        scatterwalk_advance(&cryp->out_walk, n);
1074
1075        if (unlikely(cryp->out_sg->length == _walked_out)) {
1076                cryp->out_sg = sg_next(cryp->out_sg);
1077                if (cryp->out_sg) {
1078                        scatterwalk_start(&cryp->out_walk, cryp->out_sg);
1079                        return (sg_virt(cryp->out_sg) + _walked_out);
1080                }
1081        }
1082
1083        return (u32 *)((u8 *)dst + n);
1084}
1085
1086static u32 *stm32_cryp_next_in(struct stm32_cryp *cryp, u32 *src,
1087                               unsigned int n)
1088{
1089        scatterwalk_advance(&cryp->in_walk, n);
1090
1091        if (unlikely(cryp->in_sg->length == _walked_in)) {
1092                cryp->in_sg = sg_next(cryp->in_sg);
1093                if (cryp->in_sg) {
1094                        scatterwalk_start(&cryp->in_walk, cryp->in_sg);
1095                        return (sg_virt(cryp->in_sg) + _walked_in);
1096                }
1097        }
1098
1099        return (u32 *)((u8 *)src + n);
1100}
1101
1102static int stm32_cryp_read_auth_tag(struct stm32_cryp *cryp)
1103{
1104        u32 cfg, size_bit, *dst, d32;
1105        u8 *d8;
1106        unsigned int i, j;
1107        int ret = 0;
1108
1109        /* Update Config */
1110        cfg = stm32_cryp_read(cryp, CRYP_CR);
1111
1112        cfg &= ~CR_PH_MASK;
1113        cfg |= CR_PH_FINAL;
1114        cfg &= ~CR_DEC_NOT_ENC;
1115        cfg |= CR_CRYPEN;
1116
1117        stm32_cryp_write(cryp, CRYP_CR, cfg);
1118
1119        if (is_gcm(cryp)) {
1120                /* GCM: write aad and payload size (in bits) */
1121                size_bit = cryp->areq->assoclen * 8;
1122                if (cryp->caps->swap_final)
1123                        size_bit = cpu_to_be32(size_bit);
1124
1125                stm32_cryp_write(cryp, CRYP_DIN, 0);
1126                stm32_cryp_write(cryp, CRYP_DIN, size_bit);
1127
1128                size_bit = is_encrypt(cryp) ? cryp->areq->cryptlen :
1129                                cryp->areq->cryptlen - AES_BLOCK_SIZE;
1130                size_bit *= 8;
1131                if (cryp->caps->swap_final)
1132                        size_bit = cpu_to_be32(size_bit);
1133
1134                stm32_cryp_write(cryp, CRYP_DIN, 0);
1135                stm32_cryp_write(cryp, CRYP_DIN, size_bit);
1136        } else {
1137                /* CCM: write CTR0 */
1138                u8 iv[AES_BLOCK_SIZE];
1139                u32 *iv32 = (u32 *)iv;
1140
1141                memcpy(iv, cryp->areq->iv, AES_BLOCK_SIZE);
1142                memset(iv + AES_BLOCK_SIZE - 1 - iv[0], 0, iv[0] + 1);
1143
1144                for (i = 0; i < AES_BLOCK_32; i++) {
1145                        if (!cryp->caps->padding_wa)
1146                                *iv32 = cpu_to_be32(*iv32);
1147                        stm32_cryp_write(cryp, CRYP_DIN, *iv32++);
1148                }
1149        }
1150
1151        /* Wait for output data */
1152        ret = stm32_cryp_wait_output(cryp);
1153        if (ret) {
1154                dev_err(cryp->dev, "Timeout (read tag)\n");
1155                return ret;
1156        }
1157
1158        if (is_encrypt(cryp)) {
1159                /* Get and write tag */
1160                dst = sg_virt(cryp->out_sg) + _walked_out;
1161
1162                for (i = 0; i < AES_BLOCK_32; i++) {
1163                        if (cryp->total_out >= sizeof(u32)) {
1164                                /* Read a full u32 */
1165                                *dst = stm32_cryp_read(cryp, CRYP_DOUT);
1166
1167                                dst = stm32_cryp_next_out(cryp, dst,
1168                                                          sizeof(u32));
1169                                cryp->total_out -= sizeof(u32);
1170                        } else if (!cryp->total_out) {
1171                                /* Empty fifo out (data from input padding) */
1172                                stm32_cryp_read(cryp, CRYP_DOUT);
1173                        } else {
1174                                /* Read less than an u32 */
1175                                d32 = stm32_cryp_read(cryp, CRYP_DOUT);
1176                                d8 = (u8 *)&d32;
1177
1178                                for (j = 0; j < cryp->total_out; j++) {
1179                                        *((u8 *)dst) = *(d8++);
1180                                        dst = stm32_cryp_next_out(cryp, dst, 1);
1181                                }
1182                                cryp->total_out = 0;
1183                        }
1184                }
1185        } else {
1186                /* Get and check tag */
1187                u32 in_tag[AES_BLOCK_32], out_tag[AES_BLOCK_32];
1188
1189                scatterwalk_map_and_copy(in_tag, cryp->in_sg,
1190                                         cryp->total_in_save - cryp->authsize,
1191                                         cryp->authsize, 0);
1192
1193                for (i = 0; i < AES_BLOCK_32; i++)
1194                        out_tag[i] = stm32_cryp_read(cryp, CRYP_DOUT);
1195
1196                if (crypto_memneq(in_tag, out_tag, cryp->authsize))
1197                        ret = -EBADMSG;
1198        }
1199
1200        /* Disable cryp */
1201        cfg &= ~CR_CRYPEN;
1202        stm32_cryp_write(cryp, CRYP_CR, cfg);
1203
1204        return ret;
1205}
1206
1207static void stm32_cryp_check_ctr_counter(struct stm32_cryp *cryp)
1208{
1209        u32 cr;
1210
1211        if (unlikely(cryp->last_ctr[3] == 0xFFFFFFFF)) {
1212                cryp->last_ctr[3] = 0;
1213                cryp->last_ctr[2]++;
1214                if (!cryp->last_ctr[2]) {
1215                        cryp->last_ctr[1]++;
1216                        if (!cryp->last_ctr[1])
1217                                cryp->last_ctr[0]++;
1218                }
1219
1220                cr = stm32_cryp_read(cryp, CRYP_CR);
1221                stm32_cryp_write(cryp, CRYP_CR, cr & ~CR_CRYPEN);
1222
1223                stm32_cryp_hw_write_iv(cryp, (u32 *)cryp->last_ctr);
1224
1225                stm32_cryp_write(cryp, CRYP_CR, cr);
1226        }
1227
1228        cryp->last_ctr[0] = stm32_cryp_read(cryp, CRYP_IV0LR);
1229        cryp->last_ctr[1] = stm32_cryp_read(cryp, CRYP_IV0RR);
1230        cryp->last_ctr[2] = stm32_cryp_read(cryp, CRYP_IV1LR);
1231        cryp->last_ctr[3] = stm32_cryp_read(cryp, CRYP_IV1RR);
1232}
1233
1234static bool stm32_cryp_irq_read_data(struct stm32_cryp *cryp)
1235{
1236        unsigned int i, j;
1237        u32 d32, *dst;
1238        u8 *d8;
1239        size_t tag_size;
1240
1241        /* Do no read tag now (if any) */
1242        if (is_encrypt(cryp) && (is_gcm(cryp) || is_ccm(cryp)))
1243                tag_size = cryp->authsize;
1244        else
1245                tag_size = 0;
1246
1247        dst = sg_virt(cryp->out_sg) + _walked_out;
1248
1249        for (i = 0; i < cryp->hw_blocksize / sizeof(u32); i++) {
1250                if (likely(cryp->total_out - tag_size >= sizeof(u32))) {
1251                        /* Read a full u32 */
1252                        *dst = stm32_cryp_read(cryp, CRYP_DOUT);
1253
1254                        dst = stm32_cryp_next_out(cryp, dst, sizeof(u32));
1255                        cryp->total_out -= sizeof(u32);
1256                } else if (cryp->total_out == tag_size) {
1257                        /* Empty fifo out (data from input padding) */
1258                        d32 = stm32_cryp_read(cryp, CRYP_DOUT);
1259                } else {
1260                        /* Read less than an u32 */
1261                        d32 = stm32_cryp_read(cryp, CRYP_DOUT);
1262                        d8 = (u8 *)&d32;
1263
1264                        for (j = 0; j < cryp->total_out - tag_size; j++) {
1265                                *((u8 *)dst) = *(d8++);
1266                                dst = stm32_cryp_next_out(cryp, dst, 1);
1267                        }
1268                        cryp->total_out = tag_size;
1269                }
1270        }
1271
1272        return !(cryp->total_out - tag_size) || !cryp->total_in;
1273}
1274
1275static void stm32_cryp_irq_write_block(struct stm32_cryp *cryp)
1276{
1277        unsigned int i, j;
1278        u32 *src;
1279        u8 d8[4];
1280        size_t tag_size;
1281
1282        /* Do no write tag (if any) */
1283        if (is_decrypt(cryp) && (is_gcm(cryp) || is_ccm(cryp)))
1284                tag_size = cryp->authsize;
1285        else
1286                tag_size = 0;
1287
1288        src = sg_virt(cryp->in_sg) + _walked_in;
1289
1290        for (i = 0; i < cryp->hw_blocksize / sizeof(u32); i++) {
1291                if (likely(cryp->total_in - tag_size >= sizeof(u32))) {
1292                        /* Write a full u32 */
1293                        stm32_cryp_write(cryp, CRYP_DIN, *src);
1294
1295                        src = stm32_cryp_next_in(cryp, src, sizeof(u32));
1296                        cryp->total_in -= sizeof(u32);
1297                } else if (cryp->total_in == tag_size) {
1298                        /* Write padding data */
1299                        stm32_cryp_write(cryp, CRYP_DIN, 0);
1300                } else {
1301                        /* Write less than an u32 */
1302                        memset(d8, 0, sizeof(u32));
1303                        for (j = 0; j < cryp->total_in - tag_size; j++) {
1304                                d8[j] = *((u8 *)src);
1305                                src = stm32_cryp_next_in(cryp, src, 1);
1306                        }
1307
1308                        stm32_cryp_write(cryp, CRYP_DIN, *(u32 *)d8);
1309                        cryp->total_in = tag_size;
1310                }
1311        }
1312}
1313
1314static void stm32_cryp_irq_write_gcm_padded_data(struct stm32_cryp *cryp)
1315{
1316        int err;
1317        u32 cfg, tmp[AES_BLOCK_32];
1318        size_t total_in_ori = cryp->total_in;
1319        struct scatterlist *out_sg_ori = cryp->out_sg;
1320        unsigned int i;
1321
1322        /* 'Special workaround' procedure described in the datasheet */
1323
1324        /* a) disable ip */
1325        stm32_cryp_write(cryp, CRYP_IMSCR, 0);
1326        cfg = stm32_cryp_read(cryp, CRYP_CR);
1327        cfg &= ~CR_CRYPEN;
1328        stm32_cryp_write(cryp, CRYP_CR, cfg);
1329
1330        /* b) Update IV1R */
1331        stm32_cryp_write(cryp, CRYP_IV1RR, cryp->gcm_ctr - 2);
1332
1333        /* c) change mode to CTR */
1334        cfg &= ~CR_ALGO_MASK;
1335        cfg |= CR_AES_CTR;
1336        stm32_cryp_write(cryp, CRYP_CR, cfg);
1337
1338        /* a) enable IP */
1339        cfg |= CR_CRYPEN;
1340        stm32_cryp_write(cryp, CRYP_CR, cfg);
1341
1342        /* b) pad and write the last block */
1343        stm32_cryp_irq_write_block(cryp);
1344        cryp->total_in = total_in_ori;
1345        err = stm32_cryp_wait_output(cryp);
1346        if (err) {
1347                dev_err(cryp->dev, "Timeout (write gcm header)\n");
1348                return stm32_cryp_finish_req(cryp, err);
1349        }
1350
1351        /* c) get and store encrypted data */
1352        stm32_cryp_irq_read_data(cryp);
1353        scatterwalk_map_and_copy(tmp, out_sg_ori,
1354                                 cryp->total_in_save - total_in_ori,
1355                                 total_in_ori, 0);
1356
1357        /* d) change mode back to AES GCM */
1358        cfg &= ~CR_ALGO_MASK;
1359        cfg |= CR_AES_GCM;
1360        stm32_cryp_write(cryp, CRYP_CR, cfg);
1361
1362        /* e) change phase to Final */
1363        cfg &= ~CR_PH_MASK;
1364        cfg |= CR_PH_FINAL;
1365        stm32_cryp_write(cryp, CRYP_CR, cfg);
1366
1367        /* f) write padded data */
1368        for (i = 0; i < AES_BLOCK_32; i++) {
1369                if (cryp->total_in)
1370                        stm32_cryp_write(cryp, CRYP_DIN, tmp[i]);
1371                else
1372                        stm32_cryp_write(cryp, CRYP_DIN, 0);
1373
1374                cryp->total_in -= min_t(size_t, sizeof(u32), cryp->total_in);
1375        }
1376
1377        /* g) Empty fifo out */
1378        err = stm32_cryp_wait_output(cryp);
1379        if (err) {
1380                dev_err(cryp->dev, "Timeout (write gcm header)\n");
1381                return stm32_cryp_finish_req(cryp, err);
1382        }
1383
1384        for (i = 0; i < AES_BLOCK_32; i++)
1385                stm32_cryp_read(cryp, CRYP_DOUT);
1386
1387        /* h) run the he normal Final phase */
1388        stm32_cryp_finish_req(cryp, 0);
1389}
1390
1391static void stm32_cryp_irq_set_npblb(struct stm32_cryp *cryp)
1392{
1393        u32 cfg, payload_bytes;
1394
1395        /* disable ip, set NPBLB and reneable ip */
1396        cfg = stm32_cryp_read(cryp, CRYP_CR);
1397        cfg &= ~CR_CRYPEN;
1398        stm32_cryp_write(cryp, CRYP_CR, cfg);
1399
1400        payload_bytes = is_decrypt(cryp) ? cryp->total_in - cryp->authsize :
1401                                           cryp->total_in;
1402        cfg |= (cryp->hw_blocksize - payload_bytes) << CR_NBPBL_SHIFT;
1403        cfg |= CR_CRYPEN;
1404        stm32_cryp_write(cryp, CRYP_CR, cfg);
1405}
1406
1407static void stm32_cryp_irq_write_ccm_padded_data(struct stm32_cryp *cryp)
1408{
1409        int err = 0;
1410        u32 cfg, iv1tmp;
1411        u32 cstmp1[AES_BLOCK_32], cstmp2[AES_BLOCK_32], tmp[AES_BLOCK_32];
1412        size_t last_total_out, total_in_ori = cryp->total_in;
1413        struct scatterlist *out_sg_ori = cryp->out_sg;
1414        unsigned int i;
1415
1416        /* 'Special workaround' procedure described in the datasheet */
1417        cryp->flags |= FLG_CCM_PADDED_WA;
1418
1419        /* a) disable ip */
1420        stm32_cryp_write(cryp, CRYP_IMSCR, 0);
1421
1422        cfg = stm32_cryp_read(cryp, CRYP_CR);
1423        cfg &= ~CR_CRYPEN;
1424        stm32_cryp_write(cryp, CRYP_CR, cfg);
1425
1426        /* b) get IV1 from CRYP_CSGCMCCM7 */
1427        iv1tmp = stm32_cryp_read(cryp, CRYP_CSGCMCCM0R + 7 * 4);
1428
1429        /* c) Load CRYP_CSGCMCCMxR */
1430        for (i = 0; i < ARRAY_SIZE(cstmp1); i++)
1431                cstmp1[i] = stm32_cryp_read(cryp, CRYP_CSGCMCCM0R + i * 4);
1432
1433        /* d) Write IV1R */
1434        stm32_cryp_write(cryp, CRYP_IV1RR, iv1tmp);
1435
1436        /* e) change mode to CTR */
1437        cfg &= ~CR_ALGO_MASK;
1438        cfg |= CR_AES_CTR;
1439        stm32_cryp_write(cryp, CRYP_CR, cfg);
1440
1441        /* a) enable IP */
1442        cfg |= CR_CRYPEN;
1443        stm32_cryp_write(cryp, CRYP_CR, cfg);
1444
1445        /* b) pad and write the last block */
1446        stm32_cryp_irq_write_block(cryp);
1447        cryp->total_in = total_in_ori;
1448        err = stm32_cryp_wait_output(cryp);
1449        if (err) {
1450                dev_err(cryp->dev, "Timeout (wite ccm padded data)\n");
1451                return stm32_cryp_finish_req(cryp, err);
1452        }
1453
1454        /* c) get and store decrypted data */
1455        last_total_out = cryp->total_out;
1456        stm32_cryp_irq_read_data(cryp);
1457
1458        memset(tmp, 0, sizeof(tmp));
1459        scatterwalk_map_and_copy(tmp, out_sg_ori,
1460                                 cryp->total_out_save - last_total_out,
1461                                 last_total_out, 0);
1462
1463        /* d) Load again CRYP_CSGCMCCMxR */
1464        for (i = 0; i < ARRAY_SIZE(cstmp2); i++)
1465                cstmp2[i] = stm32_cryp_read(cryp, CRYP_CSGCMCCM0R + i * 4);
1466
1467        /* e) change mode back to AES CCM */
1468        cfg &= ~CR_ALGO_MASK;
1469        cfg |= CR_AES_CCM;
1470        stm32_cryp_write(cryp, CRYP_CR, cfg);
1471
1472        /* f) change phase to header */
1473        cfg &= ~CR_PH_MASK;
1474        cfg |= CR_PH_HEADER;
1475        stm32_cryp_write(cryp, CRYP_CR, cfg);
1476
1477        /* g) XOR and write padded data */
1478        for (i = 0; i < ARRAY_SIZE(tmp); i++) {
1479                tmp[i] ^= cstmp1[i];
1480                tmp[i] ^= cstmp2[i];
1481                stm32_cryp_write(cryp, CRYP_DIN, tmp[i]);
1482        }
1483
1484        /* h) wait for completion */
1485        err = stm32_cryp_wait_busy(cryp);
1486        if (err)
1487                dev_err(cryp->dev, "Timeout (wite ccm padded data)\n");
1488
1489        /* i) run the he normal Final phase */
1490        stm32_cryp_finish_req(cryp, err);
1491}
1492
1493static void stm32_cryp_irq_write_data(struct stm32_cryp *cryp)
1494{
1495        if (unlikely(!cryp->total_in)) {
1496                dev_warn(cryp->dev, "No more data to process\n");
1497                return;
1498        }
1499
1500        if (unlikely(cryp->total_in < AES_BLOCK_SIZE &&
1501                     (stm32_cryp_get_hw_mode(cryp) == CR_AES_GCM) &&
1502                     is_encrypt(cryp))) {
1503                /* Padding for AES GCM encryption */
1504                if (cryp->caps->padding_wa)
1505                        /* Special case 1 */
1506                        return stm32_cryp_irq_write_gcm_padded_data(cryp);
1507
1508                /* Setting padding bytes (NBBLB) */
1509                stm32_cryp_irq_set_npblb(cryp);
1510        }
1511
1512        if (unlikely((cryp->total_in - cryp->authsize < AES_BLOCK_SIZE) &&
1513                     (stm32_cryp_get_hw_mode(cryp) == CR_AES_CCM) &&
1514                     is_decrypt(cryp))) {
1515                /* Padding for AES CCM decryption */
1516                if (cryp->caps->padding_wa)
1517                        /* Special case 2 */
1518                        return stm32_cryp_irq_write_ccm_padded_data(cryp);
1519
1520                /* Setting padding bytes (NBBLB) */
1521                stm32_cryp_irq_set_npblb(cryp);
1522        }
1523
1524        if (is_aes(cryp) && is_ctr(cryp))
1525                stm32_cryp_check_ctr_counter(cryp);
1526
1527        stm32_cryp_irq_write_block(cryp);
1528}
1529
1530static void stm32_cryp_irq_write_gcm_header(struct stm32_cryp *cryp)
1531{
1532        int err;
1533        unsigned int i, j;
1534        u32 cfg, *src;
1535
1536        src = sg_virt(cryp->in_sg) + _walked_in;
1537
1538        for (i = 0; i < AES_BLOCK_32; i++) {
1539                stm32_cryp_write(cryp, CRYP_DIN, *src);
1540
1541                src = stm32_cryp_next_in(cryp, src, sizeof(u32));
1542                cryp->total_in -= min_t(size_t, sizeof(u32), cryp->total_in);
1543
1544                /* Check if whole header written */
1545                if ((cryp->total_in_save - cryp->total_in) ==
1546                                cryp->areq->assoclen) {
1547                        /* Write padding if needed */
1548                        for (j = i + 1; j < AES_BLOCK_32; j++)
1549                                stm32_cryp_write(cryp, CRYP_DIN, 0);
1550
1551                        /* Wait for completion */
1552                        err = stm32_cryp_wait_busy(cryp);
1553                        if (err) {
1554                                dev_err(cryp->dev, "Timeout (gcm header)\n");
1555                                return stm32_cryp_finish_req(cryp, err);
1556                        }
1557
1558                        if (stm32_cryp_get_input_text_len(cryp)) {
1559                                /* Phase 3 : payload */
1560                                cfg = stm32_cryp_read(cryp, CRYP_CR);
1561                                cfg &= ~CR_CRYPEN;
1562                                stm32_cryp_write(cryp, CRYP_CR, cfg);
1563
1564                                cfg &= ~CR_PH_MASK;
1565                                cfg |= CR_PH_PAYLOAD;
1566                                cfg |= CR_CRYPEN;
1567                                stm32_cryp_write(cryp, CRYP_CR, cfg);
1568                        } else {
1569                                /* Phase 4 : tag */
1570                                stm32_cryp_write(cryp, CRYP_IMSCR, 0);
1571                                stm32_cryp_finish_req(cryp, 0);
1572                        }
1573
1574                        break;
1575                }
1576
1577                if (!cryp->total_in)
1578                        break;
1579        }
1580}
1581
1582static void stm32_cryp_irq_write_ccm_header(struct stm32_cryp *cryp)
1583{
1584        int err;
1585        unsigned int i = 0, j, k;
1586        u32 alen, cfg, *src;
1587        u8 d8[4];
1588
1589        src = sg_virt(cryp->in_sg) + _walked_in;
1590        alen = cryp->areq->assoclen;
1591
1592        if (!_walked_in) {
1593                if (cryp->areq->assoclen <= 65280) {
1594                        /* Write first u32 of B1 */
1595                        d8[0] = (alen >> 8) & 0xFF;
1596                        d8[1] = alen & 0xFF;
1597                        d8[2] = *((u8 *)src);
1598                        src = stm32_cryp_next_in(cryp, src, 1);
1599                        d8[3] = *((u8 *)src);
1600                        src = stm32_cryp_next_in(cryp, src, 1);
1601
1602                        stm32_cryp_write(cryp, CRYP_DIN, *(u32 *)d8);
1603                        i++;
1604
1605                        cryp->total_in -= min_t(size_t, 2, cryp->total_in);
1606                } else {
1607                        /* Build the two first u32 of B1 */
1608                        d8[0] = 0xFF;
1609                        d8[1] = 0xFE;
1610                        d8[2] = alen & 0xFF000000;
1611                        d8[3] = alen & 0x00FF0000;
1612
1613                        stm32_cryp_write(cryp, CRYP_DIN, *(u32 *)d8);
1614                        i++;
1615
1616                        d8[0] = alen & 0x0000FF00;
1617                        d8[1] = alen & 0x000000FF;
1618                        d8[2] = *((u8 *)src);
1619                        src = stm32_cryp_next_in(cryp, src, 1);
1620                        d8[3] = *((u8 *)src);
1621                        src = stm32_cryp_next_in(cryp, src, 1);
1622
1623                        stm32_cryp_write(cryp, CRYP_DIN, *(u32 *)d8);
1624                        i++;
1625
1626                        cryp->total_in -= min_t(size_t, 2, cryp->total_in);
1627                }
1628        }
1629
1630        /* Write next u32 */
1631        for (; i < AES_BLOCK_32; i++) {
1632                /* Build an u32 */
1633                memset(d8, 0, sizeof(u32));
1634                for (k = 0; k < sizeof(u32); k++) {
1635                        d8[k] = *((u8 *)src);
1636                        src = stm32_cryp_next_in(cryp, src, 1);
1637
1638                        cryp->total_in -= min_t(size_t, 1, cryp->total_in);
1639                        if ((cryp->total_in_save - cryp->total_in) == alen)
1640                                break;
1641                }
1642
1643                stm32_cryp_write(cryp, CRYP_DIN, *(u32 *)d8);
1644
1645                if ((cryp->total_in_save - cryp->total_in) == alen) {
1646                        /* Write padding if needed */
1647                        for (j = i + 1; j < AES_BLOCK_32; j++)
1648                                stm32_cryp_write(cryp, CRYP_DIN, 0);
1649
1650                        /* Wait for completion */
1651                        err = stm32_cryp_wait_busy(cryp);
1652                        if (err) {
1653                                dev_err(cryp->dev, "Timeout (ccm header)\n");
1654                                return stm32_cryp_finish_req(cryp, err);
1655                        }
1656
1657                        if (stm32_cryp_get_input_text_len(cryp)) {
1658                                /* Phase 3 : payload */
1659                                cfg = stm32_cryp_read(cryp, CRYP_CR);
1660                                cfg &= ~CR_CRYPEN;
1661                                stm32_cryp_write(cryp, CRYP_CR, cfg);
1662
1663                                cfg &= ~CR_PH_MASK;
1664                                cfg |= CR_PH_PAYLOAD;
1665                                cfg |= CR_CRYPEN;
1666                                stm32_cryp_write(cryp, CRYP_CR, cfg);
1667                        } else {
1668                                /* Phase 4 : tag */
1669                                stm32_cryp_write(cryp, CRYP_IMSCR, 0);
1670                                stm32_cryp_finish_req(cryp, 0);
1671                        }
1672
1673                        break;
1674                }
1675        }
1676}
1677
1678static irqreturn_t stm32_cryp_irq_thread(int irq, void *arg)
1679{
1680        struct stm32_cryp *cryp = arg;
1681        u32 ph;
1682
1683        if (cryp->irq_status & MISR_OUT)
1684                /* Output FIFO IRQ: read data */
1685                if (unlikely(stm32_cryp_irq_read_data(cryp))) {
1686                        /* All bytes processed, finish */
1687                        stm32_cryp_write(cryp, CRYP_IMSCR, 0);
1688                        stm32_cryp_finish_req(cryp, 0);
1689                        return IRQ_HANDLED;
1690                }
1691
1692        if (cryp->irq_status & MISR_IN) {
1693                if (is_gcm(cryp)) {
1694                        ph = stm32_cryp_read(cryp, CRYP_CR) & CR_PH_MASK;
1695                        if (unlikely(ph == CR_PH_HEADER))
1696                                /* Write Header */
1697                                stm32_cryp_irq_write_gcm_header(cryp);
1698                        else
1699                                /* Input FIFO IRQ: write data */
1700                                stm32_cryp_irq_write_data(cryp);
1701                        cryp->gcm_ctr++;
1702                } else if (is_ccm(cryp)) {
1703                        ph = stm32_cryp_read(cryp, CRYP_CR) & CR_PH_MASK;
1704                        if (unlikely(ph == CR_PH_HEADER))
1705                                /* Write Header */
1706                                stm32_cryp_irq_write_ccm_header(cryp);
1707                        else
1708                                /* Input FIFO IRQ: write data */
1709                                stm32_cryp_irq_write_data(cryp);
1710                } else {
1711                        /* Input FIFO IRQ: write data */
1712                        stm32_cryp_irq_write_data(cryp);
1713                }
1714        }
1715
1716        return IRQ_HANDLED;
1717}
1718
1719static irqreturn_t stm32_cryp_irq(int irq, void *arg)
1720{
1721        struct stm32_cryp *cryp = arg;
1722
1723        cryp->irq_status = stm32_cryp_read(cryp, CRYP_MISR);
1724
1725        return IRQ_WAKE_THREAD;
1726}
1727
1728static struct skcipher_alg crypto_algs[] = {
1729{
1730        .base.cra_name          = "ecb(aes)",
1731        .base.cra_driver_name   = "stm32-ecb-aes",
1732        .base.cra_priority      = 200,
1733        .base.cra_flags         = CRYPTO_ALG_ASYNC,
1734        .base.cra_blocksize     = AES_BLOCK_SIZE,
1735        .base.cra_ctxsize       = sizeof(struct stm32_cryp_ctx),
1736        .base.cra_alignmask     = 0xf,
1737        .base.cra_module        = THIS_MODULE,
1738
1739        .init                   = stm32_cryp_init_tfm,
1740        .min_keysize            = AES_MIN_KEY_SIZE,
1741        .max_keysize            = AES_MAX_KEY_SIZE,
1742        .setkey                 = stm32_cryp_aes_setkey,
1743        .encrypt                = stm32_cryp_aes_ecb_encrypt,
1744        .decrypt                = stm32_cryp_aes_ecb_decrypt,
1745},
1746{
1747        .base.cra_name          = "cbc(aes)",
1748        .base.cra_driver_name   = "stm32-cbc-aes",
1749        .base.cra_priority      = 200,
1750        .base.cra_flags         = CRYPTO_ALG_ASYNC,
1751        .base.cra_blocksize     = AES_BLOCK_SIZE,
1752        .base.cra_ctxsize       = sizeof(struct stm32_cryp_ctx),
1753        .base.cra_alignmask     = 0xf,
1754        .base.cra_module        = THIS_MODULE,
1755
1756        .init                   = stm32_cryp_init_tfm,
1757        .min_keysize            = AES_MIN_KEY_SIZE,
1758        .max_keysize            = AES_MAX_KEY_SIZE,
1759        .ivsize                 = AES_BLOCK_SIZE,
1760        .setkey                 = stm32_cryp_aes_setkey,
1761        .encrypt                = stm32_cryp_aes_cbc_encrypt,
1762        .decrypt                = stm32_cryp_aes_cbc_decrypt,
1763},
1764{
1765        .base.cra_name          = "ctr(aes)",
1766        .base.cra_driver_name   = "stm32-ctr-aes",
1767        .base.cra_priority      = 200,
1768        .base.cra_flags         = CRYPTO_ALG_ASYNC,
1769        .base.cra_blocksize     = 1,
1770        .base.cra_ctxsize       = sizeof(struct stm32_cryp_ctx),
1771        .base.cra_alignmask     = 0xf,
1772        .base.cra_module        = THIS_MODULE,
1773
1774        .init                   = stm32_cryp_init_tfm,
1775        .min_keysize            = AES_MIN_KEY_SIZE,
1776        .max_keysize            = AES_MAX_KEY_SIZE,
1777        .ivsize                 = AES_BLOCK_SIZE,
1778        .setkey                 = stm32_cryp_aes_setkey,
1779        .encrypt                = stm32_cryp_aes_ctr_encrypt,
1780        .decrypt                = stm32_cryp_aes_ctr_decrypt,
1781},
1782{
1783        .base.cra_name          = "ecb(des)",
1784        .base.cra_driver_name   = "stm32-ecb-des",
1785        .base.cra_priority      = 200,
1786        .base.cra_flags         = CRYPTO_ALG_ASYNC,
1787        .base.cra_blocksize     = DES_BLOCK_SIZE,
1788        .base.cra_ctxsize       = sizeof(struct stm32_cryp_ctx),
1789        .base.cra_alignmask     = 0xf,
1790        .base.cra_module        = THIS_MODULE,
1791
1792        .init                   = stm32_cryp_init_tfm,
1793        .min_keysize            = DES_BLOCK_SIZE,
1794        .max_keysize            = DES_BLOCK_SIZE,
1795        .setkey                 = stm32_cryp_des_setkey,
1796        .encrypt                = stm32_cryp_des_ecb_encrypt,
1797        .decrypt                = stm32_cryp_des_ecb_decrypt,
1798},
1799{
1800        .base.cra_name          = "cbc(des)",
1801        .base.cra_driver_name   = "stm32-cbc-des",
1802        .base.cra_priority      = 200,
1803        .base.cra_flags         = CRYPTO_ALG_ASYNC,
1804        .base.cra_blocksize     = DES_BLOCK_SIZE,
1805        .base.cra_ctxsize       = sizeof(struct stm32_cryp_ctx),
1806        .base.cra_alignmask     = 0xf,
1807        .base.cra_module        = THIS_MODULE,
1808
1809        .init                   = stm32_cryp_init_tfm,
1810        .min_keysize            = DES_BLOCK_SIZE,
1811        .max_keysize            = DES_BLOCK_SIZE,
1812        .ivsize                 = DES_BLOCK_SIZE,
1813        .setkey                 = stm32_cryp_des_setkey,
1814        .encrypt                = stm32_cryp_des_cbc_encrypt,
1815        .decrypt                = stm32_cryp_des_cbc_decrypt,
1816},
1817{
1818        .base.cra_name          = "ecb(des3_ede)",
1819        .base.cra_driver_name   = "stm32-ecb-des3",
1820        .base.cra_priority      = 200,
1821        .base.cra_flags         = CRYPTO_ALG_ASYNC,
1822        .base.cra_blocksize     = DES_BLOCK_SIZE,
1823        .base.cra_ctxsize       = sizeof(struct stm32_cryp_ctx),
1824        .base.cra_alignmask     = 0xf,
1825        .base.cra_module        = THIS_MODULE,
1826
1827        .init                   = stm32_cryp_init_tfm,
1828        .min_keysize            = 3 * DES_BLOCK_SIZE,
1829        .max_keysize            = 3 * DES_BLOCK_SIZE,
1830        .setkey                 = stm32_cryp_tdes_setkey,
1831        .encrypt                = stm32_cryp_tdes_ecb_encrypt,
1832        .decrypt                = stm32_cryp_tdes_ecb_decrypt,
1833},
1834{
1835        .base.cra_name          = "cbc(des3_ede)",
1836        .base.cra_driver_name   = "stm32-cbc-des3",
1837        .base.cra_priority      = 200,
1838        .base.cra_flags         = CRYPTO_ALG_ASYNC,
1839        .base.cra_blocksize     = DES_BLOCK_SIZE,
1840        .base.cra_ctxsize       = sizeof(struct stm32_cryp_ctx),
1841        .base.cra_alignmask     = 0xf,
1842        .base.cra_module        = THIS_MODULE,
1843
1844        .init                   = stm32_cryp_init_tfm,
1845        .min_keysize            = 3 * DES_BLOCK_SIZE,
1846        .max_keysize            = 3 * DES_BLOCK_SIZE,
1847        .ivsize                 = DES_BLOCK_SIZE,
1848        .setkey                 = stm32_cryp_tdes_setkey,
1849        .encrypt                = stm32_cryp_tdes_cbc_encrypt,
1850        .decrypt                = stm32_cryp_tdes_cbc_decrypt,
1851},
1852};
1853
1854static struct aead_alg aead_algs[] = {
1855{
1856        .setkey         = stm32_cryp_aes_aead_setkey,
1857        .setauthsize    = stm32_cryp_aes_gcm_setauthsize,
1858        .encrypt        = stm32_cryp_aes_gcm_encrypt,
1859        .decrypt        = stm32_cryp_aes_gcm_decrypt,
1860        .init           = stm32_cryp_aes_aead_init,
1861        .ivsize         = 12,
1862        .maxauthsize    = AES_BLOCK_SIZE,
1863
1864        .base = {
1865                .cra_name               = "gcm(aes)",
1866                .cra_driver_name        = "stm32-gcm-aes",
1867                .cra_priority           = 200,
1868                .cra_flags              = CRYPTO_ALG_ASYNC,
1869                .cra_blocksize          = 1,
1870                .cra_ctxsize            = sizeof(struct stm32_cryp_ctx),
1871                .cra_alignmask          = 0xf,
1872                .cra_module             = THIS_MODULE,
1873        },
1874},
1875{
1876        .setkey         = stm32_cryp_aes_aead_setkey,
1877        .setauthsize    = stm32_cryp_aes_ccm_setauthsize,
1878        .encrypt        = stm32_cryp_aes_ccm_encrypt,
1879        .decrypt        = stm32_cryp_aes_ccm_decrypt,
1880        .init           = stm32_cryp_aes_aead_init,
1881        .ivsize         = AES_BLOCK_SIZE,
1882        .maxauthsize    = AES_BLOCK_SIZE,
1883
1884        .base = {
1885                .cra_name               = "ccm(aes)",
1886                .cra_driver_name        = "stm32-ccm-aes",
1887                .cra_priority           = 200,
1888                .cra_flags              = CRYPTO_ALG_ASYNC,
1889                .cra_blocksize          = 1,
1890                .cra_ctxsize            = sizeof(struct stm32_cryp_ctx),
1891                .cra_alignmask          = 0xf,
1892                .cra_module             = THIS_MODULE,
1893        },
1894},
1895};
1896
1897static const struct stm32_cryp_caps f7_data = {
1898        .swap_final = true,
1899        .padding_wa = true,
1900};
1901
1902static const struct stm32_cryp_caps mp1_data = {
1903        .swap_final = false,
1904        .padding_wa = false,
1905};
1906
1907static const struct of_device_id stm32_dt_ids[] = {
1908        { .compatible = "st,stm32f756-cryp", .data = &f7_data},
1909        { .compatible = "st,stm32mp1-cryp", .data = &mp1_data},
1910        {},
1911};
1912MODULE_DEVICE_TABLE(of, stm32_dt_ids);
1913
1914static int stm32_cryp_probe(struct platform_device *pdev)
1915{
1916        struct device *dev = &pdev->dev;
1917        struct stm32_cryp *cryp;
1918        struct reset_control *rst;
1919        int irq, ret;
1920
1921        cryp = devm_kzalloc(dev, sizeof(*cryp), GFP_KERNEL);
1922        if (!cryp)
1923                return -ENOMEM;
1924
1925        cryp->caps = of_device_get_match_data(dev);
1926        if (!cryp->caps)
1927                return -ENODEV;
1928
1929        cryp->dev = dev;
1930
1931        cryp->regs = devm_platform_ioremap_resource(pdev, 0);
1932        if (IS_ERR(cryp->regs))
1933                return PTR_ERR(cryp->regs);
1934
1935        irq = platform_get_irq(pdev, 0);
1936        if (irq < 0)
1937                return irq;
1938
1939        ret = devm_request_threaded_irq(dev, irq, stm32_cryp_irq,
1940                                        stm32_cryp_irq_thread, IRQF_ONESHOT,
1941                                        dev_name(dev), cryp);
1942        if (ret) {
1943                dev_err(dev, "Cannot grab IRQ\n");
1944                return ret;
1945        }
1946
1947        cryp->clk = devm_clk_get(dev, NULL);
1948        if (IS_ERR(cryp->clk)) {
1949                dev_err(dev, "Could not get clock\n");
1950                return PTR_ERR(cryp->clk);
1951        }
1952
1953        ret = clk_prepare_enable(cryp->clk);
1954        if (ret) {
1955                dev_err(cryp->dev, "Failed to enable clock\n");
1956                return ret;
1957        }
1958
1959        pm_runtime_set_autosuspend_delay(dev, CRYP_AUTOSUSPEND_DELAY);
1960        pm_runtime_use_autosuspend(dev);
1961
1962        pm_runtime_get_noresume(dev);
1963        pm_runtime_set_active(dev);
1964        pm_runtime_enable(dev);
1965
1966        rst = devm_reset_control_get(dev, NULL);
1967        if (!IS_ERR(rst)) {
1968                reset_control_assert(rst);
1969                udelay(2);
1970                reset_control_deassert(rst);
1971        }
1972
1973        platform_set_drvdata(pdev, cryp);
1974
1975        spin_lock(&cryp_list.lock);
1976        list_add(&cryp->list, &cryp_list.dev_list);
1977        spin_unlock(&cryp_list.lock);
1978
1979        /* Initialize crypto engine */
1980        cryp->engine = crypto_engine_alloc_init(dev, 1);
1981        if (!cryp->engine) {
1982                dev_err(dev, "Could not init crypto engine\n");
1983                ret = -ENOMEM;
1984                goto err_engine1;
1985        }
1986
1987        ret = crypto_engine_start(cryp->engine);
1988        if (ret) {
1989                dev_err(dev, "Could not start crypto engine\n");
1990                goto err_engine2;
1991        }
1992
1993        ret = crypto_register_skciphers(crypto_algs, ARRAY_SIZE(crypto_algs));
1994        if (ret) {
1995                dev_err(dev, "Could not register algs\n");
1996                goto err_algs;
1997        }
1998
1999        ret = crypto_register_aeads(aead_algs, ARRAY_SIZE(aead_algs));
2000        if (ret)
2001                goto err_aead_algs;
2002
2003        dev_info(dev, "Initialized\n");
2004
2005        pm_runtime_put_sync(dev);
2006
2007        return 0;
2008
2009err_aead_algs:
2010        crypto_unregister_skciphers(crypto_algs, ARRAY_SIZE(crypto_algs));
2011err_algs:
2012err_engine2:
2013        crypto_engine_exit(cryp->engine);
2014err_engine1:
2015        spin_lock(&cryp_list.lock);
2016        list_del(&cryp->list);
2017        spin_unlock(&cryp_list.lock);
2018
2019        pm_runtime_disable(dev);
2020        pm_runtime_put_noidle(dev);
2021        pm_runtime_disable(dev);
2022        pm_runtime_put_noidle(dev);
2023
2024        clk_disable_unprepare(cryp->clk);
2025
2026        return ret;
2027}
2028
2029static int stm32_cryp_remove(struct platform_device *pdev)
2030{
2031        struct stm32_cryp *cryp = platform_get_drvdata(pdev);
2032        int ret;
2033
2034        if (!cryp)
2035                return -ENODEV;
2036
2037        ret = pm_runtime_get_sync(cryp->dev);
2038        if (ret < 0)
2039                return ret;
2040
2041        crypto_unregister_aeads(aead_algs, ARRAY_SIZE(aead_algs));
2042        crypto_unregister_skciphers(crypto_algs, ARRAY_SIZE(crypto_algs));
2043
2044        crypto_engine_exit(cryp->engine);
2045
2046        spin_lock(&cryp_list.lock);
2047        list_del(&cryp->list);
2048        spin_unlock(&cryp_list.lock);
2049
2050        pm_runtime_disable(cryp->dev);
2051        pm_runtime_put_noidle(cryp->dev);
2052
2053        clk_disable_unprepare(cryp->clk);
2054
2055        return 0;
2056}
2057
2058#ifdef CONFIG_PM
2059static int stm32_cryp_runtime_suspend(struct device *dev)
2060{
2061        struct stm32_cryp *cryp = dev_get_drvdata(dev);
2062
2063        clk_disable_unprepare(cryp->clk);
2064
2065        return 0;
2066}
2067
2068static int stm32_cryp_runtime_resume(struct device *dev)
2069{
2070        struct stm32_cryp *cryp = dev_get_drvdata(dev);
2071        int ret;
2072
2073        ret = clk_prepare_enable(cryp->clk);
2074        if (ret) {
2075                dev_err(cryp->dev, "Failed to prepare_enable clock\n");
2076                return ret;
2077        }
2078
2079        return 0;
2080}
2081#endif
2082
2083static const struct dev_pm_ops stm32_cryp_pm_ops = {
2084        SET_SYSTEM_SLEEP_PM_OPS(pm_runtime_force_suspend,
2085                                pm_runtime_force_resume)
2086        SET_RUNTIME_PM_OPS(stm32_cryp_runtime_suspend,
2087                           stm32_cryp_runtime_resume, NULL)
2088};
2089
2090static struct platform_driver stm32_cryp_driver = {
2091        .probe  = stm32_cryp_probe,
2092        .remove = stm32_cryp_remove,
2093        .driver = {
2094                .name           = DRIVER_NAME,
2095                .pm             = &stm32_cryp_pm_ops,
2096                .of_match_table = stm32_dt_ids,
2097        },
2098};
2099
2100module_platform_driver(stm32_cryp_driver);
2101
2102MODULE_AUTHOR("Fabien Dessenne <fabien.dessenne@st.com>");
2103MODULE_DESCRIPTION("STMicrolectronics STM32 CRYP hardware driver");
2104MODULE_LICENSE("GPL");
2105