linux/drivers/crypto/inside-secure/safexcel_hash.c
<<
>>
Prefs
   1// SPDX-License-Identifier: GPL-2.0
   2/*
   3 * Copyright (C) 2017 Marvell
   4 *
   5 * Antoine Tenart <antoine.tenart@free-electrons.com>
   6 */
   7
   8#include <crypto/aes.h>
   9#include <crypto/hmac.h>
  10#include <crypto/md5.h>
  11#include <crypto/sha.h>
  12#include <crypto/sha3.h>
  13#include <crypto/skcipher.h>
  14#include <crypto/sm3.h>
  15#include <linux/device.h>
  16#include <linux/dma-mapping.h>
  17#include <linux/dmapool.h>
  18
  19#include "safexcel.h"
  20
  21struct safexcel_ahash_ctx {
  22        struct safexcel_context base;
  23
  24        u32 alg;
  25        u8  key_sz;
  26        bool cbcmac;
  27        bool do_fallback;
  28        bool fb_init_done;
  29        bool fb_do_setkey;
  30
  31        struct crypto_cipher *kaes;
  32        struct crypto_ahash *fback;
  33        struct crypto_shash *shpre;
  34        struct shash_desc *shdesc;
  35};
  36
  37struct safexcel_ahash_req {
  38        bool last_req;
  39        bool finish;
  40        bool hmac;
  41        bool needs_inv;
  42        bool hmac_zlen;
  43        bool len_is_le;
  44        bool not_first;
  45        bool xcbcmac;
  46
  47        int nents;
  48        dma_addr_t result_dma;
  49
  50        u32 digest;
  51
  52        u8 state_sz;    /* expected state size, only set once */
  53        u8 block_sz;    /* block size, only set once */
  54        u8 digest_sz;   /* output digest size, only set once */
  55        __le32 state[SHA3_512_BLOCK_SIZE /
  56                     sizeof(__le32)] __aligned(sizeof(__le32));
  57
  58        u64 len;
  59        u64 processed;
  60
  61        u8 cache[HASH_CACHE_SIZE] __aligned(sizeof(u32));
  62        dma_addr_t cache_dma;
  63        unsigned int cache_sz;
  64
  65        u8 cache_next[HASH_CACHE_SIZE] __aligned(sizeof(u32));
  66};
  67
  68static inline u64 safexcel_queued_len(struct safexcel_ahash_req *req)
  69{
  70        return req->len - req->processed;
  71}
  72
  73static void safexcel_hash_token(struct safexcel_command_desc *cdesc,
  74                                u32 input_length, u32 result_length,
  75                                bool cbcmac)
  76{
  77        struct safexcel_token *token =
  78                (struct safexcel_token *)cdesc->control_data.token;
  79
  80        token[0].opcode = EIP197_TOKEN_OPCODE_DIRECTION;
  81        token[0].packet_length = input_length;
  82        token[0].instructions = EIP197_TOKEN_INS_TYPE_HASH;
  83
  84        input_length &= 15;
  85        if (unlikely(cbcmac && input_length)) {
  86                token[0].stat =  0;
  87                token[1].opcode = EIP197_TOKEN_OPCODE_INSERT;
  88                token[1].packet_length = 16 - input_length;
  89                token[1].stat = EIP197_TOKEN_STAT_LAST_HASH;
  90                token[1].instructions = EIP197_TOKEN_INS_TYPE_HASH;
  91        } else {
  92                token[0].stat = EIP197_TOKEN_STAT_LAST_HASH;
  93                eip197_noop_token(&token[1]);
  94        }
  95
  96        token[2].opcode = EIP197_TOKEN_OPCODE_INSERT;
  97        token[2].stat = EIP197_TOKEN_STAT_LAST_HASH |
  98                        EIP197_TOKEN_STAT_LAST_PACKET;
  99        token[2].packet_length = result_length;
 100        token[2].instructions = EIP197_TOKEN_INS_TYPE_OUTPUT |
 101                                EIP197_TOKEN_INS_INSERT_HASH_DIGEST;
 102
 103        eip197_noop_token(&token[3]);
 104}
 105
 106static void safexcel_context_control(struct safexcel_ahash_ctx *ctx,
 107                                     struct safexcel_ahash_req *req,
 108                                     struct safexcel_command_desc *cdesc)
 109{
 110        struct safexcel_crypto_priv *priv = ctx->base.priv;
 111        u64 count = 0;
 112
 113        cdesc->control_data.control0 = ctx->alg;
 114        cdesc->control_data.control1 = 0;
 115
 116        /*
 117         * Copy the input digest if needed, and setup the context
 118         * fields. Do this now as we need it to setup the first command
 119         * descriptor.
 120         */
 121        if (unlikely(req->digest == CONTEXT_CONTROL_DIGEST_XCM)) {
 122                if (req->xcbcmac)
 123                        memcpy(ctx->base.ctxr->data, &ctx->base.ipad, ctx->key_sz);
 124                else
 125                        memcpy(ctx->base.ctxr->data, req->state, req->state_sz);
 126
 127                if (!req->finish && req->xcbcmac)
 128                        cdesc->control_data.control0 |=
 129                                CONTEXT_CONTROL_DIGEST_XCM |
 130                                CONTEXT_CONTROL_TYPE_HASH_OUT  |
 131                                CONTEXT_CONTROL_NO_FINISH_HASH |
 132                                CONTEXT_CONTROL_SIZE(req->state_sz /
 133                                                     sizeof(u32));
 134                else
 135                        cdesc->control_data.control0 |=
 136                                CONTEXT_CONTROL_DIGEST_XCM |
 137                                CONTEXT_CONTROL_TYPE_HASH_OUT  |
 138                                CONTEXT_CONTROL_SIZE(req->state_sz /
 139                                                     sizeof(u32));
 140                return;
 141        } else if (!req->processed) {
 142                /* First - and possibly only - block of basic hash only */
 143                if (req->finish)
 144                        cdesc->control_data.control0 |= req->digest |
 145                                CONTEXT_CONTROL_TYPE_HASH_OUT |
 146                                CONTEXT_CONTROL_RESTART_HASH  |
 147                                /* ensure its not 0! */
 148                                CONTEXT_CONTROL_SIZE(1);
 149                else
 150                        cdesc->control_data.control0 |= req->digest |
 151                                CONTEXT_CONTROL_TYPE_HASH_OUT  |
 152                                CONTEXT_CONTROL_RESTART_HASH   |
 153                                CONTEXT_CONTROL_NO_FINISH_HASH |
 154                                /* ensure its not 0! */
 155                                CONTEXT_CONTROL_SIZE(1);
 156                return;
 157        }
 158
 159        /* Hash continuation or HMAC, setup (inner) digest from state */
 160        memcpy(ctx->base.ctxr->data, req->state, req->state_sz);
 161
 162        if (req->finish) {
 163                /* Compute digest count for hash/HMAC finish operations */
 164                if ((req->digest == CONTEXT_CONTROL_DIGEST_PRECOMPUTED) ||
 165                    req->hmac_zlen || (req->processed != req->block_sz)) {
 166                        count = req->processed / EIP197_COUNTER_BLOCK_SIZE;
 167
 168                        /* This is a hardware limitation, as the
 169                         * counter must fit into an u32. This represents
 170                         * a fairly big amount of input data, so we
 171                         * shouldn't see this.
 172                         */
 173                        if (unlikely(count & 0xffffffff00000000ULL)) {
 174                                dev_warn(priv->dev,
 175                                         "Input data is too big\n");
 176                                return;
 177                        }
 178                }
 179
 180                if ((req->digest == CONTEXT_CONTROL_DIGEST_PRECOMPUTED) ||
 181                    /* Special case: zero length HMAC */
 182                    req->hmac_zlen ||
 183                    /* PE HW < 4.4 cannot do HMAC continue, fake using hash */
 184                    (req->processed != req->block_sz)) {
 185                        /* Basic hash continue operation, need digest + cnt */
 186                        cdesc->control_data.control0 |=
 187                                CONTEXT_CONTROL_SIZE((req->state_sz >> 2) + 1) |
 188                                CONTEXT_CONTROL_TYPE_HASH_OUT |
 189                                CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
 190                        /* For zero-len HMAC, don't finalize, already padded! */
 191                        if (req->hmac_zlen)
 192                                cdesc->control_data.control0 |=
 193                                        CONTEXT_CONTROL_NO_FINISH_HASH;
 194                        cdesc->control_data.control1 |=
 195                                CONTEXT_CONTROL_DIGEST_CNT;
 196                        ctx->base.ctxr->data[req->state_sz >> 2] =
 197                                cpu_to_le32(count);
 198                        req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
 199
 200                        /* Clear zero-length HMAC flag for next operation! */
 201                        req->hmac_zlen = false;
 202                } else { /* HMAC */
 203                        /* Need outer digest for HMAC finalization */
 204                        memcpy(ctx->base.ctxr->data + (req->state_sz >> 2),
 205                               &ctx->base.opad, req->state_sz);
 206
 207                        /* Single pass HMAC - no digest count */
 208                        cdesc->control_data.control0 |=
 209                                CONTEXT_CONTROL_SIZE(req->state_sz >> 1) |
 210                                CONTEXT_CONTROL_TYPE_HASH_OUT |
 211                                CONTEXT_CONTROL_DIGEST_HMAC;
 212                }
 213        } else { /* Hash continuation, do not finish yet */
 214                cdesc->control_data.control0 |=
 215                        CONTEXT_CONTROL_SIZE(req->state_sz >> 2) |
 216                        CONTEXT_CONTROL_DIGEST_PRECOMPUTED |
 217                        CONTEXT_CONTROL_TYPE_HASH_OUT |
 218                        CONTEXT_CONTROL_NO_FINISH_HASH;
 219        }
 220}
 221
 222static int safexcel_ahash_enqueue(struct ahash_request *areq);
 223
 224static int safexcel_handle_req_result(struct safexcel_crypto_priv *priv,
 225                                      int ring,
 226                                      struct crypto_async_request *async,
 227                                      bool *should_complete, int *ret)
 228{
 229        struct safexcel_result_desc *rdesc;
 230        struct ahash_request *areq = ahash_request_cast(async);
 231        struct crypto_ahash *ahash = crypto_ahash_reqtfm(areq);
 232        struct safexcel_ahash_req *sreq = ahash_request_ctx(areq);
 233        struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(ahash);
 234        u64 cache_len;
 235
 236        *ret = 0;
 237
 238        rdesc = safexcel_ring_next_rptr(priv, &priv->ring[ring].rdr);
 239        if (IS_ERR(rdesc)) {
 240                dev_err(priv->dev,
 241                        "hash: result: could not retrieve the result descriptor\n");
 242                *ret = PTR_ERR(rdesc);
 243        } else {
 244                *ret = safexcel_rdesc_check_errors(priv, rdesc);
 245        }
 246
 247        safexcel_complete(priv, ring);
 248
 249        if (sreq->nents) {
 250                dma_unmap_sg(priv->dev, areq->src, sreq->nents, DMA_TO_DEVICE);
 251                sreq->nents = 0;
 252        }
 253
 254        if (sreq->result_dma) {
 255                dma_unmap_single(priv->dev, sreq->result_dma, sreq->digest_sz,
 256                                 DMA_FROM_DEVICE);
 257                sreq->result_dma = 0;
 258        }
 259
 260        if (sreq->cache_dma) {
 261                dma_unmap_single(priv->dev, sreq->cache_dma, sreq->cache_sz,
 262                                 DMA_TO_DEVICE);
 263                sreq->cache_dma = 0;
 264                sreq->cache_sz = 0;
 265        }
 266
 267        if (sreq->finish) {
 268                if (sreq->hmac &&
 269                    (sreq->digest != CONTEXT_CONTROL_DIGEST_HMAC)) {
 270                        /* Faking HMAC using hash - need to do outer hash */
 271                        memcpy(sreq->cache, sreq->state,
 272                               crypto_ahash_digestsize(ahash));
 273
 274                        memcpy(sreq->state, &ctx->base.opad, sreq->digest_sz);
 275
 276                        sreq->len = sreq->block_sz +
 277                                    crypto_ahash_digestsize(ahash);
 278                        sreq->processed = sreq->block_sz;
 279                        sreq->hmac = 0;
 280
 281                        if (priv->flags & EIP197_TRC_CACHE)
 282                                ctx->base.needs_inv = true;
 283                        areq->nbytes = 0;
 284                        safexcel_ahash_enqueue(areq);
 285
 286                        *should_complete = false; /* Not done yet */
 287                        return 1;
 288                }
 289
 290                if (unlikely(sreq->digest == CONTEXT_CONTROL_DIGEST_XCM &&
 291                             ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_CRC32)) {
 292                        /* Undo final XOR with 0xffffffff ...*/
 293                        *(__le32 *)areq->result = ~sreq->state[0];
 294                } else {
 295                        memcpy(areq->result, sreq->state,
 296                               crypto_ahash_digestsize(ahash));
 297                }
 298        }
 299
 300        cache_len = safexcel_queued_len(sreq);
 301        if (cache_len)
 302                memcpy(sreq->cache, sreq->cache_next, cache_len);
 303
 304        *should_complete = true;
 305
 306        return 1;
 307}
 308
 309static int safexcel_ahash_send_req(struct crypto_async_request *async, int ring,
 310                                   int *commands, int *results)
 311{
 312        struct ahash_request *areq = ahash_request_cast(async);
 313        struct safexcel_ahash_req *req = ahash_request_ctx(areq);
 314        struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
 315        struct safexcel_crypto_priv *priv = ctx->base.priv;
 316        struct safexcel_command_desc *cdesc, *first_cdesc = NULL;
 317        struct safexcel_result_desc *rdesc;
 318        struct scatterlist *sg;
 319        struct safexcel_token *dmmy;
 320        int i, extra = 0, n_cdesc = 0, ret = 0, cache_len, skip = 0;
 321        u64 queued, len;
 322
 323        queued = safexcel_queued_len(req);
 324        if (queued <= HASH_CACHE_SIZE)
 325                cache_len = queued;
 326        else
 327                cache_len = queued - areq->nbytes;
 328
 329        if (!req->finish && !req->last_req) {
 330                /* If this is not the last request and the queued data does not
 331                 * fit into full cache blocks, cache it for the next send call.
 332                 */
 333                extra = queued & (HASH_CACHE_SIZE - 1);
 334
 335                /* If this is not the last request and the queued data
 336                 * is a multiple of a block, cache the last one for now.
 337                 */
 338                if (!extra)
 339                        extra = HASH_CACHE_SIZE;
 340
 341                sg_pcopy_to_buffer(areq->src, sg_nents(areq->src),
 342                                   req->cache_next, extra,
 343                                   areq->nbytes - extra);
 344
 345                queued -= extra;
 346
 347                if (!queued) {
 348                        *commands = 0;
 349                        *results = 0;
 350                        return 0;
 351                }
 352
 353                extra = 0;
 354        }
 355
 356        if (unlikely(req->xcbcmac && req->processed > AES_BLOCK_SIZE)) {
 357                if (unlikely(cache_len < AES_BLOCK_SIZE)) {
 358                        /*
 359                         * Cache contains less than 1 full block, complete.
 360                         */
 361                        extra = AES_BLOCK_SIZE - cache_len;
 362                        if (queued > cache_len) {
 363                                /* More data follows: borrow bytes */
 364                                u64 tmp = queued - cache_len;
 365
 366                                skip = min_t(u64, tmp, extra);
 367                                sg_pcopy_to_buffer(areq->src,
 368                                        sg_nents(areq->src),
 369                                        req->cache + cache_len,
 370                                        skip, 0);
 371                        }
 372                        extra -= skip;
 373                        memset(req->cache + cache_len + skip, 0, extra);
 374                        if (!ctx->cbcmac && extra) {
 375                                // 10- padding for XCBCMAC & CMAC
 376                                req->cache[cache_len + skip] = 0x80;
 377                                // HW will use K2 iso K3 - compensate!
 378                                for (i = 0; i < AES_BLOCK_SIZE / 4; i++) {
 379                                        u32 *cache = (void *)req->cache;
 380                                        u32 *ipad = ctx->base.ipad.word;
 381                                        u32 x;
 382
 383                                        x = ipad[i] ^ ipad[i + 4];
 384                                        cache[i] ^= swab(x);
 385                                }
 386                        }
 387                        cache_len = AES_BLOCK_SIZE;
 388                        queued = queued + extra;
 389                }
 390
 391                /* XCBC continue: XOR previous result into 1st word */
 392                crypto_xor(req->cache, (const u8 *)req->state, AES_BLOCK_SIZE);
 393        }
 394
 395        len = queued;
 396        /* Add a command descriptor for the cached data, if any */
 397        if (cache_len) {
 398                req->cache_dma = dma_map_single(priv->dev, req->cache,
 399                                                cache_len, DMA_TO_DEVICE);
 400                if (dma_mapping_error(priv->dev, req->cache_dma))
 401                        return -EINVAL;
 402
 403                req->cache_sz = cache_len;
 404                first_cdesc = safexcel_add_cdesc(priv, ring, 1,
 405                                                 (cache_len == len),
 406                                                 req->cache_dma, cache_len,
 407                                                 len, ctx->base.ctxr_dma,
 408                                                 &dmmy);
 409                if (IS_ERR(first_cdesc)) {
 410                        ret = PTR_ERR(first_cdesc);
 411                        goto unmap_cache;
 412                }
 413                n_cdesc++;
 414
 415                queued -= cache_len;
 416                if (!queued)
 417                        goto send_command;
 418        }
 419
 420        /* Now handle the current ahash request buffer(s) */
 421        req->nents = dma_map_sg(priv->dev, areq->src,
 422                                sg_nents_for_len(areq->src,
 423                                                 areq->nbytes),
 424                                DMA_TO_DEVICE);
 425        if (!req->nents) {
 426                ret = -ENOMEM;
 427                goto cdesc_rollback;
 428        }
 429
 430        for_each_sg(areq->src, sg, req->nents, i) {
 431                int sglen = sg_dma_len(sg);
 432
 433                if (unlikely(sglen <= skip)) {
 434                        skip -= sglen;
 435                        continue;
 436                }
 437
 438                /* Do not overflow the request */
 439                if ((queued + skip) <= sglen)
 440                        sglen = queued;
 441                else
 442                        sglen -= skip;
 443
 444                cdesc = safexcel_add_cdesc(priv, ring, !n_cdesc,
 445                                           !(queued - sglen),
 446                                           sg_dma_address(sg) + skip, sglen,
 447                                           len, ctx->base.ctxr_dma, &dmmy);
 448                if (IS_ERR(cdesc)) {
 449                        ret = PTR_ERR(cdesc);
 450                        goto unmap_sg;
 451                }
 452
 453                if (!n_cdesc)
 454                        first_cdesc = cdesc;
 455                n_cdesc++;
 456
 457                queued -= sglen;
 458                if (!queued)
 459                        break;
 460                skip = 0;
 461        }
 462
 463send_command:
 464        /* Setup the context options */
 465        safexcel_context_control(ctx, req, first_cdesc);
 466
 467        /* Add the token */
 468        safexcel_hash_token(first_cdesc, len, req->digest_sz, ctx->cbcmac);
 469
 470        req->result_dma = dma_map_single(priv->dev, req->state, req->digest_sz,
 471                                         DMA_FROM_DEVICE);
 472        if (dma_mapping_error(priv->dev, req->result_dma)) {
 473                ret = -EINVAL;
 474                goto unmap_sg;
 475        }
 476
 477        /* Add a result descriptor */
 478        rdesc = safexcel_add_rdesc(priv, ring, 1, 1, req->result_dma,
 479                                   req->digest_sz);
 480        if (IS_ERR(rdesc)) {
 481                ret = PTR_ERR(rdesc);
 482                goto unmap_result;
 483        }
 484
 485        safexcel_rdr_req_set(priv, ring, rdesc, &areq->base);
 486
 487        req->processed += len - extra;
 488
 489        *commands = n_cdesc;
 490        *results = 1;
 491        return 0;
 492
 493unmap_result:
 494        dma_unmap_single(priv->dev, req->result_dma, req->digest_sz,
 495                         DMA_FROM_DEVICE);
 496unmap_sg:
 497        if (req->nents) {
 498                dma_unmap_sg(priv->dev, areq->src, req->nents, DMA_TO_DEVICE);
 499                req->nents = 0;
 500        }
 501cdesc_rollback:
 502        for (i = 0; i < n_cdesc; i++)
 503                safexcel_ring_rollback_wptr(priv, &priv->ring[ring].cdr);
 504unmap_cache:
 505        if (req->cache_dma) {
 506                dma_unmap_single(priv->dev, req->cache_dma, req->cache_sz,
 507                                 DMA_TO_DEVICE);
 508                req->cache_dma = 0;
 509                req->cache_sz = 0;
 510        }
 511
 512        return ret;
 513}
 514
 515static int safexcel_handle_inv_result(struct safexcel_crypto_priv *priv,
 516                                      int ring,
 517                                      struct crypto_async_request *async,
 518                                      bool *should_complete, int *ret)
 519{
 520        struct safexcel_result_desc *rdesc;
 521        struct ahash_request *areq = ahash_request_cast(async);
 522        struct crypto_ahash *ahash = crypto_ahash_reqtfm(areq);
 523        struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(ahash);
 524        int enq_ret;
 525
 526        *ret = 0;
 527
 528        rdesc = safexcel_ring_next_rptr(priv, &priv->ring[ring].rdr);
 529        if (IS_ERR(rdesc)) {
 530                dev_err(priv->dev,
 531                        "hash: invalidate: could not retrieve the result descriptor\n");
 532                *ret = PTR_ERR(rdesc);
 533        } else {
 534                *ret = safexcel_rdesc_check_errors(priv, rdesc);
 535        }
 536
 537        safexcel_complete(priv, ring);
 538
 539        if (ctx->base.exit_inv) {
 540                dma_pool_free(priv->context_pool, ctx->base.ctxr,
 541                              ctx->base.ctxr_dma);
 542
 543                *should_complete = true;
 544                return 1;
 545        }
 546
 547        ring = safexcel_select_ring(priv);
 548        ctx->base.ring = ring;
 549
 550        spin_lock_bh(&priv->ring[ring].queue_lock);
 551        enq_ret = crypto_enqueue_request(&priv->ring[ring].queue, async);
 552        spin_unlock_bh(&priv->ring[ring].queue_lock);
 553
 554        if (enq_ret != -EINPROGRESS)
 555                *ret = enq_ret;
 556
 557        queue_work(priv->ring[ring].workqueue,
 558                   &priv->ring[ring].work_data.work);
 559
 560        *should_complete = false;
 561
 562        return 1;
 563}
 564
 565static int safexcel_handle_result(struct safexcel_crypto_priv *priv, int ring,
 566                                  struct crypto_async_request *async,
 567                                  bool *should_complete, int *ret)
 568{
 569        struct ahash_request *areq = ahash_request_cast(async);
 570        struct safexcel_ahash_req *req = ahash_request_ctx(areq);
 571        int err;
 572
 573        BUG_ON(!(priv->flags & EIP197_TRC_CACHE) && req->needs_inv);
 574
 575        if (req->needs_inv) {
 576                req->needs_inv = false;
 577                err = safexcel_handle_inv_result(priv, ring, async,
 578                                                 should_complete, ret);
 579        } else {
 580                err = safexcel_handle_req_result(priv, ring, async,
 581                                                 should_complete, ret);
 582        }
 583
 584        return err;
 585}
 586
 587static int safexcel_ahash_send_inv(struct crypto_async_request *async,
 588                                   int ring, int *commands, int *results)
 589{
 590        struct ahash_request *areq = ahash_request_cast(async);
 591        struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
 592        int ret;
 593
 594        ret = safexcel_invalidate_cache(async, ctx->base.priv,
 595                                        ctx->base.ctxr_dma, ring);
 596        if (unlikely(ret))
 597                return ret;
 598
 599        *commands = 1;
 600        *results = 1;
 601
 602        return 0;
 603}
 604
 605static int safexcel_ahash_send(struct crypto_async_request *async,
 606                               int ring, int *commands, int *results)
 607{
 608        struct ahash_request *areq = ahash_request_cast(async);
 609        struct safexcel_ahash_req *req = ahash_request_ctx(areq);
 610        int ret;
 611
 612        if (req->needs_inv)
 613                ret = safexcel_ahash_send_inv(async, ring, commands, results);
 614        else
 615                ret = safexcel_ahash_send_req(async, ring, commands, results);
 616
 617        return ret;
 618}
 619
 620static int safexcel_ahash_exit_inv(struct crypto_tfm *tfm)
 621{
 622        struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
 623        struct safexcel_crypto_priv *priv = ctx->base.priv;
 624        EIP197_REQUEST_ON_STACK(req, ahash, EIP197_AHASH_REQ_SIZE);
 625        struct safexcel_ahash_req *rctx = ahash_request_ctx(req);
 626        struct safexcel_inv_result result = {};
 627        int ring = ctx->base.ring;
 628
 629        memset(req, 0, EIP197_AHASH_REQ_SIZE);
 630
 631        /* create invalidation request */
 632        init_completion(&result.completion);
 633        ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
 634                                   safexcel_inv_complete, &result);
 635
 636        ahash_request_set_tfm(req, __crypto_ahash_cast(tfm));
 637        ctx = crypto_tfm_ctx(req->base.tfm);
 638        ctx->base.exit_inv = true;
 639        rctx->needs_inv = true;
 640
 641        spin_lock_bh(&priv->ring[ring].queue_lock);
 642        crypto_enqueue_request(&priv->ring[ring].queue, &req->base);
 643        spin_unlock_bh(&priv->ring[ring].queue_lock);
 644
 645        queue_work(priv->ring[ring].workqueue,
 646                   &priv->ring[ring].work_data.work);
 647
 648        wait_for_completion(&result.completion);
 649
 650        if (result.error) {
 651                dev_warn(priv->dev, "hash: completion error (%d)\n",
 652                         result.error);
 653                return result.error;
 654        }
 655
 656        return 0;
 657}
 658
 659/* safexcel_ahash_cache: cache data until at least one request can be sent to
 660 * the engine, aka. when there is at least 1 block size in the pipe.
 661 */
 662static int safexcel_ahash_cache(struct ahash_request *areq)
 663{
 664        struct safexcel_ahash_req *req = ahash_request_ctx(areq);
 665        u64 cache_len;
 666
 667        /* cache_len: everything accepted by the driver but not sent yet,
 668         * tot sz handled by update() - last req sz - tot sz handled by send()
 669         */
 670        cache_len = safexcel_queued_len(req);
 671
 672        /*
 673         * In case there isn't enough bytes to proceed (less than a
 674         * block size), cache the data until we have enough.
 675         */
 676        if (cache_len + areq->nbytes <= HASH_CACHE_SIZE) {
 677                sg_pcopy_to_buffer(areq->src, sg_nents(areq->src),
 678                                   req->cache + cache_len,
 679                                   areq->nbytes, 0);
 680                return 0;
 681        }
 682
 683        /* We couldn't cache all the data */
 684        return -E2BIG;
 685}
 686
 687static int safexcel_ahash_enqueue(struct ahash_request *areq)
 688{
 689        struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
 690        struct safexcel_ahash_req *req = ahash_request_ctx(areq);
 691        struct safexcel_crypto_priv *priv = ctx->base.priv;
 692        int ret, ring;
 693
 694        req->needs_inv = false;
 695
 696        if (ctx->base.ctxr) {
 697                if (priv->flags & EIP197_TRC_CACHE && !ctx->base.needs_inv &&
 698                     /* invalidate for *any* non-XCBC continuation */
 699                   ((req->not_first && !req->xcbcmac) ||
 700                     /* invalidate if (i)digest changed */
 701                     memcmp(ctx->base.ctxr->data, req->state, req->state_sz) ||
 702                     /* invalidate for HMAC finish with odigest changed */
 703                     (req->finish && req->hmac &&
 704                      memcmp(ctx->base.ctxr->data + (req->state_sz>>2),
 705                             &ctx->base.opad, req->state_sz))))
 706                        /*
 707                         * We're still setting needs_inv here, even though it is
 708                         * cleared right away, because the needs_inv flag can be
 709                         * set in other functions and we want to keep the same
 710                         * logic.
 711                         */
 712                        ctx->base.needs_inv = true;
 713
 714                if (ctx->base.needs_inv) {
 715                        ctx->base.needs_inv = false;
 716                        req->needs_inv = true;
 717                }
 718        } else {
 719                ctx->base.ring = safexcel_select_ring(priv);
 720                ctx->base.ctxr = dma_pool_zalloc(priv->context_pool,
 721                                                 EIP197_GFP_FLAGS(areq->base),
 722                                                 &ctx->base.ctxr_dma);
 723                if (!ctx->base.ctxr)
 724                        return -ENOMEM;
 725        }
 726        req->not_first = true;
 727
 728        ring = ctx->base.ring;
 729
 730        spin_lock_bh(&priv->ring[ring].queue_lock);
 731        ret = crypto_enqueue_request(&priv->ring[ring].queue, &areq->base);
 732        spin_unlock_bh(&priv->ring[ring].queue_lock);
 733
 734        queue_work(priv->ring[ring].workqueue,
 735                   &priv->ring[ring].work_data.work);
 736
 737        return ret;
 738}
 739
 740static int safexcel_ahash_update(struct ahash_request *areq)
 741{
 742        struct safexcel_ahash_req *req = ahash_request_ctx(areq);
 743        int ret;
 744
 745        /* If the request is 0 length, do nothing */
 746        if (!areq->nbytes)
 747                return 0;
 748
 749        /* Add request to the cache if it fits */
 750        ret = safexcel_ahash_cache(areq);
 751
 752        /* Update total request length */
 753        req->len += areq->nbytes;
 754
 755        /* If not all data could fit into the cache, go process the excess.
 756         * Also go process immediately for an HMAC IV precompute, which
 757         * will never be finished at all, but needs to be processed anyway.
 758         */
 759        if ((ret && !req->finish) || req->last_req)
 760                return safexcel_ahash_enqueue(areq);
 761
 762        return 0;
 763}
 764
 765static int safexcel_ahash_final(struct ahash_request *areq)
 766{
 767        struct safexcel_ahash_req *req = ahash_request_ctx(areq);
 768        struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
 769
 770        req->finish = true;
 771
 772        if (unlikely(!req->len && !areq->nbytes)) {
 773                /*
 774                 * If we have an overall 0 length *hash* request:
 775                 * The HW cannot do 0 length hash, so we provide the correct
 776                 * result directly here.
 777                 */
 778                if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_MD5)
 779                        memcpy(areq->result, md5_zero_message_hash,
 780                               MD5_DIGEST_SIZE);
 781                else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SHA1)
 782                        memcpy(areq->result, sha1_zero_message_hash,
 783                               SHA1_DIGEST_SIZE);
 784                else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SHA224)
 785                        memcpy(areq->result, sha224_zero_message_hash,
 786                               SHA224_DIGEST_SIZE);
 787                else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SHA256)
 788                        memcpy(areq->result, sha256_zero_message_hash,
 789                               SHA256_DIGEST_SIZE);
 790                else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SHA384)
 791                        memcpy(areq->result, sha384_zero_message_hash,
 792                               SHA384_DIGEST_SIZE);
 793                else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SHA512)
 794                        memcpy(areq->result, sha512_zero_message_hash,
 795                               SHA512_DIGEST_SIZE);
 796                else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SM3) {
 797                        memcpy(areq->result,
 798                               EIP197_SM3_ZEROM_HASH, SM3_DIGEST_SIZE);
 799                }
 800
 801                return 0;
 802        } else if (unlikely(req->digest == CONTEXT_CONTROL_DIGEST_XCM &&
 803                            ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_MD5 &&
 804                            req->len == sizeof(u32) && !areq->nbytes)) {
 805                /* Zero length CRC32 */
 806                memcpy(areq->result, &ctx->base.ipad, sizeof(u32));
 807                return 0;
 808        } else if (unlikely(ctx->cbcmac && req->len == AES_BLOCK_SIZE &&
 809                            !areq->nbytes)) {
 810                /* Zero length CBC MAC */
 811                memset(areq->result, 0, AES_BLOCK_SIZE);
 812                return 0;
 813        } else if (unlikely(req->xcbcmac && req->len == AES_BLOCK_SIZE &&
 814                            !areq->nbytes)) {
 815                /* Zero length (X)CBC/CMAC */
 816                int i;
 817
 818                for (i = 0; i < AES_BLOCK_SIZE / sizeof(u32); i++) {
 819                        u32 *result = (void *)areq->result;
 820
 821                        /* K3 */
 822                        result[i] = swab(ctx->base.ipad.word[i + 4]);
 823                }
 824                areq->result[0] ^= 0x80;                        // 10- padding
 825                crypto_cipher_encrypt_one(ctx->kaes, areq->result, areq->result);
 826                return 0;
 827        } else if (unlikely(req->hmac &&
 828                            (req->len == req->block_sz) &&
 829                            !areq->nbytes)) {
 830                /*
 831                 * If we have an overall 0 length *HMAC* request:
 832                 * For HMAC, we need to finalize the inner digest
 833                 * and then perform the outer hash.
 834                 */
 835
 836                /* generate pad block in the cache */
 837                /* start with a hash block of all zeroes */
 838                memset(req->cache, 0, req->block_sz);
 839                /* set the first byte to 0x80 to 'append a 1 bit' */
 840                req->cache[0] = 0x80;
 841                /* add the length in bits in the last 2 bytes */
 842                if (req->len_is_le) {
 843                        /* Little endian length word (e.g. MD5) */
 844                        req->cache[req->block_sz-8] = (req->block_sz << 3) &
 845                                                      255;
 846                        req->cache[req->block_sz-7] = (req->block_sz >> 5);
 847                } else {
 848                        /* Big endian length word (e.g. any SHA) */
 849                        req->cache[req->block_sz-2] = (req->block_sz >> 5);
 850                        req->cache[req->block_sz-1] = (req->block_sz << 3) &
 851                                                      255;
 852                }
 853
 854                req->len += req->block_sz; /* plus 1 hash block */
 855
 856                /* Set special zero-length HMAC flag */
 857                req->hmac_zlen = true;
 858
 859                /* Finalize HMAC */
 860                req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
 861        } else if (req->hmac) {
 862                /* Finalize HMAC */
 863                req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
 864        }
 865
 866        return safexcel_ahash_enqueue(areq);
 867}
 868
 869static int safexcel_ahash_finup(struct ahash_request *areq)
 870{
 871        struct safexcel_ahash_req *req = ahash_request_ctx(areq);
 872
 873        req->finish = true;
 874
 875        safexcel_ahash_update(areq);
 876        return safexcel_ahash_final(areq);
 877}
 878
 879static int safexcel_ahash_export(struct ahash_request *areq, void *out)
 880{
 881        struct safexcel_ahash_req *req = ahash_request_ctx(areq);
 882        struct safexcel_ahash_export_state *export = out;
 883
 884        export->len = req->len;
 885        export->processed = req->processed;
 886
 887        export->digest = req->digest;
 888
 889        memcpy(export->state, req->state, req->state_sz);
 890        memcpy(export->cache, req->cache, HASH_CACHE_SIZE);
 891
 892        return 0;
 893}
 894
 895static int safexcel_ahash_import(struct ahash_request *areq, const void *in)
 896{
 897        struct safexcel_ahash_req *req = ahash_request_ctx(areq);
 898        const struct safexcel_ahash_export_state *export = in;
 899        int ret;
 900
 901        ret = crypto_ahash_init(areq);
 902        if (ret)
 903                return ret;
 904
 905        req->len = export->len;
 906        req->processed = export->processed;
 907
 908        req->digest = export->digest;
 909
 910        memcpy(req->cache, export->cache, HASH_CACHE_SIZE);
 911        memcpy(req->state, export->state, req->state_sz);
 912
 913        return 0;
 914}
 915
 916static int safexcel_ahash_cra_init(struct crypto_tfm *tfm)
 917{
 918        struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
 919        struct safexcel_alg_template *tmpl =
 920                container_of(__crypto_ahash_alg(tfm->__crt_alg),
 921                             struct safexcel_alg_template, alg.ahash);
 922
 923        ctx->base.priv = tmpl->priv;
 924        ctx->base.send = safexcel_ahash_send;
 925        ctx->base.handle_result = safexcel_handle_result;
 926        ctx->fb_do_setkey = false;
 927
 928        crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm),
 929                                 sizeof(struct safexcel_ahash_req));
 930        return 0;
 931}
 932
 933static int safexcel_sha1_init(struct ahash_request *areq)
 934{
 935        struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
 936        struct safexcel_ahash_req *req = ahash_request_ctx(areq);
 937
 938        memset(req, 0, sizeof(*req));
 939
 940        ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA1;
 941        req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
 942        req->state_sz = SHA1_DIGEST_SIZE;
 943        req->digest_sz = SHA1_DIGEST_SIZE;
 944        req->block_sz = SHA1_BLOCK_SIZE;
 945
 946        return 0;
 947}
 948
 949static int safexcel_sha1_digest(struct ahash_request *areq)
 950{
 951        int ret = safexcel_sha1_init(areq);
 952
 953        if (ret)
 954                return ret;
 955
 956        return safexcel_ahash_finup(areq);
 957}
 958
 959static void safexcel_ahash_cra_exit(struct crypto_tfm *tfm)
 960{
 961        struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
 962        struct safexcel_crypto_priv *priv = ctx->base.priv;
 963        int ret;
 964
 965        /* context not allocated, skip invalidation */
 966        if (!ctx->base.ctxr)
 967                return;
 968
 969        if (priv->flags & EIP197_TRC_CACHE) {
 970                ret = safexcel_ahash_exit_inv(tfm);
 971                if (ret)
 972                        dev_warn(priv->dev, "hash: invalidation error %d\n", ret);
 973        } else {
 974                dma_pool_free(priv->context_pool, ctx->base.ctxr,
 975                              ctx->base.ctxr_dma);
 976        }
 977}
 978
 979struct safexcel_alg_template safexcel_alg_sha1 = {
 980        .type = SAFEXCEL_ALG_TYPE_AHASH,
 981        .algo_mask = SAFEXCEL_ALG_SHA1,
 982        .alg.ahash = {
 983                .init = safexcel_sha1_init,
 984                .update = safexcel_ahash_update,
 985                .final = safexcel_ahash_final,
 986                .finup = safexcel_ahash_finup,
 987                .digest = safexcel_sha1_digest,
 988                .export = safexcel_ahash_export,
 989                .import = safexcel_ahash_import,
 990                .halg = {
 991                        .digestsize = SHA1_DIGEST_SIZE,
 992                        .statesize = sizeof(struct safexcel_ahash_export_state),
 993                        .base = {
 994                                .cra_name = "sha1",
 995                                .cra_driver_name = "safexcel-sha1",
 996                                .cra_priority = SAFEXCEL_CRA_PRIORITY,
 997                                .cra_flags = CRYPTO_ALG_ASYNC |
 998                                             CRYPTO_ALG_ALLOCATES_MEMORY |
 999                                             CRYPTO_ALG_KERN_DRIVER_ONLY,
1000                                .cra_blocksize = SHA1_BLOCK_SIZE,
1001                                .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1002                                .cra_init = safexcel_ahash_cra_init,
1003                                .cra_exit = safexcel_ahash_cra_exit,
1004                                .cra_module = THIS_MODULE,
1005                        },
1006                },
1007        },
1008};
1009
1010static int safexcel_hmac_sha1_init(struct ahash_request *areq)
1011{
1012        struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1013        struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1014
1015        memset(req, 0, sizeof(*req));
1016
1017        /* Start from ipad precompute */
1018        memcpy(req->state, &ctx->base.ipad, SHA1_DIGEST_SIZE);
1019        /* Already processed the key^ipad part now! */
1020        req->len        = SHA1_BLOCK_SIZE;
1021        req->processed  = SHA1_BLOCK_SIZE;
1022
1023        ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA1;
1024        req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1025        req->state_sz = SHA1_DIGEST_SIZE;
1026        req->digest_sz = SHA1_DIGEST_SIZE;
1027        req->block_sz = SHA1_BLOCK_SIZE;
1028        req->hmac = true;
1029
1030        return 0;
1031}
1032
1033static int safexcel_hmac_sha1_digest(struct ahash_request *areq)
1034{
1035        int ret = safexcel_hmac_sha1_init(areq);
1036
1037        if (ret)
1038                return ret;
1039
1040        return safexcel_ahash_finup(areq);
1041}
1042
1043struct safexcel_ahash_result {
1044        struct completion completion;
1045        int error;
1046};
1047
1048static void safexcel_ahash_complete(struct crypto_async_request *req, int error)
1049{
1050        struct safexcel_ahash_result *result = req->data;
1051
1052        if (error == -EINPROGRESS)
1053                return;
1054
1055        result->error = error;
1056        complete(&result->completion);
1057}
1058
1059static int safexcel_hmac_init_pad(struct ahash_request *areq,
1060                                  unsigned int blocksize, const u8 *key,
1061                                  unsigned int keylen, u8 *ipad, u8 *opad)
1062{
1063        struct safexcel_ahash_result result;
1064        struct scatterlist sg;
1065        int ret, i;
1066        u8 *keydup;
1067
1068        if (keylen <= blocksize) {
1069                memcpy(ipad, key, keylen);
1070        } else {
1071                keydup = kmemdup(key, keylen, GFP_KERNEL);
1072                if (!keydup)
1073                        return -ENOMEM;
1074
1075                ahash_request_set_callback(areq, CRYPTO_TFM_REQ_MAY_BACKLOG,
1076                                           safexcel_ahash_complete, &result);
1077                sg_init_one(&sg, keydup, keylen);
1078                ahash_request_set_crypt(areq, &sg, ipad, keylen);
1079                init_completion(&result.completion);
1080
1081                ret = crypto_ahash_digest(areq);
1082                if (ret == -EINPROGRESS || ret == -EBUSY) {
1083                        wait_for_completion_interruptible(&result.completion);
1084                        ret = result.error;
1085                }
1086
1087                /* Avoid leaking */
1088                kfree_sensitive(keydup);
1089
1090                if (ret)
1091                        return ret;
1092
1093                keylen = crypto_ahash_digestsize(crypto_ahash_reqtfm(areq));
1094        }
1095
1096        memset(ipad + keylen, 0, blocksize - keylen);
1097        memcpy(opad, ipad, blocksize);
1098
1099        for (i = 0; i < blocksize; i++) {
1100                ipad[i] ^= HMAC_IPAD_VALUE;
1101                opad[i] ^= HMAC_OPAD_VALUE;
1102        }
1103
1104        return 0;
1105}
1106
1107static int safexcel_hmac_init_iv(struct ahash_request *areq,
1108                                 unsigned int blocksize, u8 *pad, void *state)
1109{
1110        struct safexcel_ahash_result result;
1111        struct safexcel_ahash_req *req;
1112        struct scatterlist sg;
1113        int ret;
1114
1115        ahash_request_set_callback(areq, CRYPTO_TFM_REQ_MAY_BACKLOG,
1116                                   safexcel_ahash_complete, &result);
1117        sg_init_one(&sg, pad, blocksize);
1118        ahash_request_set_crypt(areq, &sg, pad, blocksize);
1119        init_completion(&result.completion);
1120
1121        ret = crypto_ahash_init(areq);
1122        if (ret)
1123                return ret;
1124
1125        req = ahash_request_ctx(areq);
1126        req->hmac = true;
1127        req->last_req = true;
1128
1129        ret = crypto_ahash_update(areq);
1130        if (ret && ret != -EINPROGRESS && ret != -EBUSY)
1131                return ret;
1132
1133        wait_for_completion_interruptible(&result.completion);
1134        if (result.error)
1135                return result.error;
1136
1137        return crypto_ahash_export(areq, state);
1138}
1139
1140static int __safexcel_hmac_setkey(const char *alg, const u8 *key,
1141                                  unsigned int keylen,
1142                                  void *istate, void *ostate)
1143{
1144        struct ahash_request *areq;
1145        struct crypto_ahash *tfm;
1146        unsigned int blocksize;
1147        u8 *ipad, *opad;
1148        int ret;
1149
1150        tfm = crypto_alloc_ahash(alg, 0, 0);
1151        if (IS_ERR(tfm))
1152                return PTR_ERR(tfm);
1153
1154        areq = ahash_request_alloc(tfm, GFP_KERNEL);
1155        if (!areq) {
1156                ret = -ENOMEM;
1157                goto free_ahash;
1158        }
1159
1160        crypto_ahash_clear_flags(tfm, ~0);
1161        blocksize = crypto_tfm_alg_blocksize(crypto_ahash_tfm(tfm));
1162
1163        ipad = kcalloc(2, blocksize, GFP_KERNEL);
1164        if (!ipad) {
1165                ret = -ENOMEM;
1166                goto free_request;
1167        }
1168
1169        opad = ipad + blocksize;
1170
1171        ret = safexcel_hmac_init_pad(areq, blocksize, key, keylen, ipad, opad);
1172        if (ret)
1173                goto free_ipad;
1174
1175        ret = safexcel_hmac_init_iv(areq, blocksize, ipad, istate);
1176        if (ret)
1177                goto free_ipad;
1178
1179        ret = safexcel_hmac_init_iv(areq, blocksize, opad, ostate);
1180
1181free_ipad:
1182        kfree(ipad);
1183free_request:
1184        ahash_request_free(areq);
1185free_ahash:
1186        crypto_free_ahash(tfm);
1187
1188        return ret;
1189}
1190
1191int safexcel_hmac_setkey(struct safexcel_context *base, const u8 *key,
1192                         unsigned int keylen, const char *alg,
1193                         unsigned int state_sz)
1194{
1195        struct safexcel_crypto_priv *priv = base->priv;
1196        struct safexcel_ahash_export_state istate, ostate;
1197        int ret;
1198
1199        ret = __safexcel_hmac_setkey(alg, key, keylen, &istate, &ostate);
1200        if (ret)
1201                return ret;
1202
1203        if (priv->flags & EIP197_TRC_CACHE && base->ctxr &&
1204            (memcmp(&base->ipad, istate.state, state_sz) ||
1205             memcmp(&base->opad, ostate.state, state_sz)))
1206                base->needs_inv = true;
1207
1208        memcpy(&base->ipad, &istate.state, state_sz);
1209        memcpy(&base->opad, &ostate.state, state_sz);
1210
1211        return 0;
1212}
1213
1214static int safexcel_hmac_alg_setkey(struct crypto_ahash *tfm, const u8 *key,
1215                                    unsigned int keylen, const char *alg,
1216                                    unsigned int state_sz)
1217{
1218        struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
1219
1220        return safexcel_hmac_setkey(&ctx->base, key, keylen, alg, state_sz);
1221}
1222
1223static int safexcel_hmac_sha1_setkey(struct crypto_ahash *tfm, const u8 *key,
1224                                     unsigned int keylen)
1225{
1226        return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-sha1",
1227                                        SHA1_DIGEST_SIZE);
1228}
1229
1230struct safexcel_alg_template safexcel_alg_hmac_sha1 = {
1231        .type = SAFEXCEL_ALG_TYPE_AHASH,
1232        .algo_mask = SAFEXCEL_ALG_SHA1,
1233        .alg.ahash = {
1234                .init = safexcel_hmac_sha1_init,
1235                .update = safexcel_ahash_update,
1236                .final = safexcel_ahash_final,
1237                .finup = safexcel_ahash_finup,
1238                .digest = safexcel_hmac_sha1_digest,
1239                .setkey = safexcel_hmac_sha1_setkey,
1240                .export = safexcel_ahash_export,
1241                .import = safexcel_ahash_import,
1242                .halg = {
1243                        .digestsize = SHA1_DIGEST_SIZE,
1244                        .statesize = sizeof(struct safexcel_ahash_export_state),
1245                        .base = {
1246                                .cra_name = "hmac(sha1)",
1247                                .cra_driver_name = "safexcel-hmac-sha1",
1248                                .cra_priority = SAFEXCEL_CRA_PRIORITY,
1249                                .cra_flags = CRYPTO_ALG_ASYNC |
1250                                             CRYPTO_ALG_ALLOCATES_MEMORY |
1251                                             CRYPTO_ALG_KERN_DRIVER_ONLY,
1252                                .cra_blocksize = SHA1_BLOCK_SIZE,
1253                                .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1254                                .cra_init = safexcel_ahash_cra_init,
1255                                .cra_exit = safexcel_ahash_cra_exit,
1256                                .cra_module = THIS_MODULE,
1257                        },
1258                },
1259        },
1260};
1261
1262static int safexcel_sha256_init(struct ahash_request *areq)
1263{
1264        struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1265        struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1266
1267        memset(req, 0, sizeof(*req));
1268
1269        ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA256;
1270        req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1271        req->state_sz = SHA256_DIGEST_SIZE;
1272        req->digest_sz = SHA256_DIGEST_SIZE;
1273        req->block_sz = SHA256_BLOCK_SIZE;
1274
1275        return 0;
1276}
1277
1278static int safexcel_sha256_digest(struct ahash_request *areq)
1279{
1280        int ret = safexcel_sha256_init(areq);
1281
1282        if (ret)
1283                return ret;
1284
1285        return safexcel_ahash_finup(areq);
1286}
1287
1288struct safexcel_alg_template safexcel_alg_sha256 = {
1289        .type = SAFEXCEL_ALG_TYPE_AHASH,
1290        .algo_mask = SAFEXCEL_ALG_SHA2_256,
1291        .alg.ahash = {
1292                .init = safexcel_sha256_init,
1293                .update = safexcel_ahash_update,
1294                .final = safexcel_ahash_final,
1295                .finup = safexcel_ahash_finup,
1296                .digest = safexcel_sha256_digest,
1297                .export = safexcel_ahash_export,
1298                .import = safexcel_ahash_import,
1299                .halg = {
1300                        .digestsize = SHA256_DIGEST_SIZE,
1301                        .statesize = sizeof(struct safexcel_ahash_export_state),
1302                        .base = {
1303                                .cra_name = "sha256",
1304                                .cra_driver_name = "safexcel-sha256",
1305                                .cra_priority = SAFEXCEL_CRA_PRIORITY,
1306                                .cra_flags = CRYPTO_ALG_ASYNC |
1307                                             CRYPTO_ALG_ALLOCATES_MEMORY |
1308                                             CRYPTO_ALG_KERN_DRIVER_ONLY,
1309                                .cra_blocksize = SHA256_BLOCK_SIZE,
1310                                .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1311                                .cra_init = safexcel_ahash_cra_init,
1312                                .cra_exit = safexcel_ahash_cra_exit,
1313                                .cra_module = THIS_MODULE,
1314                        },
1315                },
1316        },
1317};
1318
1319static int safexcel_sha224_init(struct ahash_request *areq)
1320{
1321        struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1322        struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1323
1324        memset(req, 0, sizeof(*req));
1325
1326        ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA224;
1327        req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1328        req->state_sz = SHA256_DIGEST_SIZE;
1329        req->digest_sz = SHA256_DIGEST_SIZE;
1330        req->block_sz = SHA256_BLOCK_SIZE;
1331
1332        return 0;
1333}
1334
1335static int safexcel_sha224_digest(struct ahash_request *areq)
1336{
1337        int ret = safexcel_sha224_init(areq);
1338
1339        if (ret)
1340                return ret;
1341
1342        return safexcel_ahash_finup(areq);
1343}
1344
1345struct safexcel_alg_template safexcel_alg_sha224 = {
1346        .type = SAFEXCEL_ALG_TYPE_AHASH,
1347        .algo_mask = SAFEXCEL_ALG_SHA2_256,
1348        .alg.ahash = {
1349                .init = safexcel_sha224_init,
1350                .update = safexcel_ahash_update,
1351                .final = safexcel_ahash_final,
1352                .finup = safexcel_ahash_finup,
1353                .digest = safexcel_sha224_digest,
1354                .export = safexcel_ahash_export,
1355                .import = safexcel_ahash_import,
1356                .halg = {
1357                        .digestsize = SHA224_DIGEST_SIZE,
1358                        .statesize = sizeof(struct safexcel_ahash_export_state),
1359                        .base = {
1360                                .cra_name = "sha224",
1361                                .cra_driver_name = "safexcel-sha224",
1362                                .cra_priority = SAFEXCEL_CRA_PRIORITY,
1363                                .cra_flags = CRYPTO_ALG_ASYNC |
1364                                             CRYPTO_ALG_ALLOCATES_MEMORY |
1365                                             CRYPTO_ALG_KERN_DRIVER_ONLY,
1366                                .cra_blocksize = SHA224_BLOCK_SIZE,
1367                                .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1368                                .cra_init = safexcel_ahash_cra_init,
1369                                .cra_exit = safexcel_ahash_cra_exit,
1370                                .cra_module = THIS_MODULE,
1371                        },
1372                },
1373        },
1374};
1375
1376static int safexcel_hmac_sha224_setkey(struct crypto_ahash *tfm, const u8 *key,
1377                                       unsigned int keylen)
1378{
1379        return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-sha224",
1380                                        SHA256_DIGEST_SIZE);
1381}
1382
1383static int safexcel_hmac_sha224_init(struct ahash_request *areq)
1384{
1385        struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1386        struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1387
1388        memset(req, 0, sizeof(*req));
1389
1390        /* Start from ipad precompute */
1391        memcpy(req->state, &ctx->base.ipad, SHA256_DIGEST_SIZE);
1392        /* Already processed the key^ipad part now! */
1393        req->len        = SHA256_BLOCK_SIZE;
1394        req->processed  = SHA256_BLOCK_SIZE;
1395
1396        ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA224;
1397        req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1398        req->state_sz = SHA256_DIGEST_SIZE;
1399        req->digest_sz = SHA256_DIGEST_SIZE;
1400        req->block_sz = SHA256_BLOCK_SIZE;
1401        req->hmac = true;
1402
1403        return 0;
1404}
1405
1406static int safexcel_hmac_sha224_digest(struct ahash_request *areq)
1407{
1408        int ret = safexcel_hmac_sha224_init(areq);
1409
1410        if (ret)
1411                return ret;
1412
1413        return safexcel_ahash_finup(areq);
1414}
1415
1416struct safexcel_alg_template safexcel_alg_hmac_sha224 = {
1417        .type = SAFEXCEL_ALG_TYPE_AHASH,
1418        .algo_mask = SAFEXCEL_ALG_SHA2_256,
1419        .alg.ahash = {
1420                .init = safexcel_hmac_sha224_init,
1421                .update = safexcel_ahash_update,
1422                .final = safexcel_ahash_final,
1423                .finup = safexcel_ahash_finup,
1424                .digest = safexcel_hmac_sha224_digest,
1425                .setkey = safexcel_hmac_sha224_setkey,
1426                .export = safexcel_ahash_export,
1427                .import = safexcel_ahash_import,
1428                .halg = {
1429                        .digestsize = SHA224_DIGEST_SIZE,
1430                        .statesize = sizeof(struct safexcel_ahash_export_state),
1431                        .base = {
1432                                .cra_name = "hmac(sha224)",
1433                                .cra_driver_name = "safexcel-hmac-sha224",
1434                                .cra_priority = SAFEXCEL_CRA_PRIORITY,
1435                                .cra_flags = CRYPTO_ALG_ASYNC |
1436                                             CRYPTO_ALG_ALLOCATES_MEMORY |
1437                                             CRYPTO_ALG_KERN_DRIVER_ONLY,
1438                                .cra_blocksize = SHA224_BLOCK_SIZE,
1439                                .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1440                                .cra_init = safexcel_ahash_cra_init,
1441                                .cra_exit = safexcel_ahash_cra_exit,
1442                                .cra_module = THIS_MODULE,
1443                        },
1444                },
1445        },
1446};
1447
1448static int safexcel_hmac_sha256_setkey(struct crypto_ahash *tfm, const u8 *key,
1449                                     unsigned int keylen)
1450{
1451        return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-sha256",
1452                                        SHA256_DIGEST_SIZE);
1453}
1454
1455static int safexcel_hmac_sha256_init(struct ahash_request *areq)
1456{
1457        struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1458        struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1459
1460        memset(req, 0, sizeof(*req));
1461
1462        /* Start from ipad precompute */
1463        memcpy(req->state, &ctx->base.ipad, SHA256_DIGEST_SIZE);
1464        /* Already processed the key^ipad part now! */
1465        req->len        = SHA256_BLOCK_SIZE;
1466        req->processed  = SHA256_BLOCK_SIZE;
1467
1468        ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA256;
1469        req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1470        req->state_sz = SHA256_DIGEST_SIZE;
1471        req->digest_sz = SHA256_DIGEST_SIZE;
1472        req->block_sz = SHA256_BLOCK_SIZE;
1473        req->hmac = true;
1474
1475        return 0;
1476}
1477
1478static int safexcel_hmac_sha256_digest(struct ahash_request *areq)
1479{
1480        int ret = safexcel_hmac_sha256_init(areq);
1481
1482        if (ret)
1483                return ret;
1484
1485        return safexcel_ahash_finup(areq);
1486}
1487
1488struct safexcel_alg_template safexcel_alg_hmac_sha256 = {
1489        .type = SAFEXCEL_ALG_TYPE_AHASH,
1490        .algo_mask = SAFEXCEL_ALG_SHA2_256,
1491        .alg.ahash = {
1492                .init = safexcel_hmac_sha256_init,
1493                .update = safexcel_ahash_update,
1494                .final = safexcel_ahash_final,
1495                .finup = safexcel_ahash_finup,
1496                .digest = safexcel_hmac_sha256_digest,
1497                .setkey = safexcel_hmac_sha256_setkey,
1498                .export = safexcel_ahash_export,
1499                .import = safexcel_ahash_import,
1500                .halg = {
1501                        .digestsize = SHA256_DIGEST_SIZE,
1502                        .statesize = sizeof(struct safexcel_ahash_export_state),
1503                        .base = {
1504                                .cra_name = "hmac(sha256)",
1505                                .cra_driver_name = "safexcel-hmac-sha256",
1506                                .cra_priority = SAFEXCEL_CRA_PRIORITY,
1507                                .cra_flags = CRYPTO_ALG_ASYNC |
1508                                             CRYPTO_ALG_ALLOCATES_MEMORY |
1509                                             CRYPTO_ALG_KERN_DRIVER_ONLY,
1510                                .cra_blocksize = SHA256_BLOCK_SIZE,
1511                                .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1512                                .cra_init = safexcel_ahash_cra_init,
1513                                .cra_exit = safexcel_ahash_cra_exit,
1514                                .cra_module = THIS_MODULE,
1515                        },
1516                },
1517        },
1518};
1519
1520static int safexcel_sha512_init(struct ahash_request *areq)
1521{
1522        struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1523        struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1524
1525        memset(req, 0, sizeof(*req));
1526
1527        ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA512;
1528        req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1529        req->state_sz = SHA512_DIGEST_SIZE;
1530        req->digest_sz = SHA512_DIGEST_SIZE;
1531        req->block_sz = SHA512_BLOCK_SIZE;
1532
1533        return 0;
1534}
1535
1536static int safexcel_sha512_digest(struct ahash_request *areq)
1537{
1538        int ret = safexcel_sha512_init(areq);
1539
1540        if (ret)
1541                return ret;
1542
1543        return safexcel_ahash_finup(areq);
1544}
1545
1546struct safexcel_alg_template safexcel_alg_sha512 = {
1547        .type = SAFEXCEL_ALG_TYPE_AHASH,
1548        .algo_mask = SAFEXCEL_ALG_SHA2_512,
1549        .alg.ahash = {
1550                .init = safexcel_sha512_init,
1551                .update = safexcel_ahash_update,
1552                .final = safexcel_ahash_final,
1553                .finup = safexcel_ahash_finup,
1554                .digest = safexcel_sha512_digest,
1555                .export = safexcel_ahash_export,
1556                .import = safexcel_ahash_import,
1557                .halg = {
1558                        .digestsize = SHA512_DIGEST_SIZE,
1559                        .statesize = sizeof(struct safexcel_ahash_export_state),
1560                        .base = {
1561                                .cra_name = "sha512",
1562                                .cra_driver_name = "safexcel-sha512",
1563                                .cra_priority = SAFEXCEL_CRA_PRIORITY,
1564                                .cra_flags = CRYPTO_ALG_ASYNC |
1565                                             CRYPTO_ALG_ALLOCATES_MEMORY |
1566                                             CRYPTO_ALG_KERN_DRIVER_ONLY,
1567                                .cra_blocksize = SHA512_BLOCK_SIZE,
1568                                .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1569                                .cra_init = safexcel_ahash_cra_init,
1570                                .cra_exit = safexcel_ahash_cra_exit,
1571                                .cra_module = THIS_MODULE,
1572                        },
1573                },
1574        },
1575};
1576
1577static int safexcel_sha384_init(struct ahash_request *areq)
1578{
1579        struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1580        struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1581
1582        memset(req, 0, sizeof(*req));
1583
1584        ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA384;
1585        req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1586        req->state_sz = SHA512_DIGEST_SIZE;
1587        req->digest_sz = SHA512_DIGEST_SIZE;
1588        req->block_sz = SHA512_BLOCK_SIZE;
1589
1590        return 0;
1591}
1592
1593static int safexcel_sha384_digest(struct ahash_request *areq)
1594{
1595        int ret = safexcel_sha384_init(areq);
1596
1597        if (ret)
1598                return ret;
1599
1600        return safexcel_ahash_finup(areq);
1601}
1602
1603struct safexcel_alg_template safexcel_alg_sha384 = {
1604        .type = SAFEXCEL_ALG_TYPE_AHASH,
1605        .algo_mask = SAFEXCEL_ALG_SHA2_512,
1606        .alg.ahash = {
1607                .init = safexcel_sha384_init,
1608                .update = safexcel_ahash_update,
1609                .final = safexcel_ahash_final,
1610                .finup = safexcel_ahash_finup,
1611                .digest = safexcel_sha384_digest,
1612                .export = safexcel_ahash_export,
1613                .import = safexcel_ahash_import,
1614                .halg = {
1615                        .digestsize = SHA384_DIGEST_SIZE,
1616                        .statesize = sizeof(struct safexcel_ahash_export_state),
1617                        .base = {
1618                                .cra_name = "sha384",
1619                                .cra_driver_name = "safexcel-sha384",
1620                                .cra_priority = SAFEXCEL_CRA_PRIORITY,
1621                                .cra_flags = CRYPTO_ALG_ASYNC |
1622                                             CRYPTO_ALG_ALLOCATES_MEMORY |
1623                                             CRYPTO_ALG_KERN_DRIVER_ONLY,
1624                                .cra_blocksize = SHA384_BLOCK_SIZE,
1625                                .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1626                                .cra_init = safexcel_ahash_cra_init,
1627                                .cra_exit = safexcel_ahash_cra_exit,
1628                                .cra_module = THIS_MODULE,
1629                        },
1630                },
1631        },
1632};
1633
1634static int safexcel_hmac_sha512_setkey(struct crypto_ahash *tfm, const u8 *key,
1635                                       unsigned int keylen)
1636{
1637        return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-sha512",
1638                                        SHA512_DIGEST_SIZE);
1639}
1640
1641static int safexcel_hmac_sha512_init(struct ahash_request *areq)
1642{
1643        struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1644        struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1645
1646        memset(req, 0, sizeof(*req));
1647
1648        /* Start from ipad precompute */
1649        memcpy(req->state, &ctx->base.ipad, SHA512_DIGEST_SIZE);
1650        /* Already processed the key^ipad part now! */
1651        req->len        = SHA512_BLOCK_SIZE;
1652        req->processed  = SHA512_BLOCK_SIZE;
1653
1654        ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA512;
1655        req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1656        req->state_sz = SHA512_DIGEST_SIZE;
1657        req->digest_sz = SHA512_DIGEST_SIZE;
1658        req->block_sz = SHA512_BLOCK_SIZE;
1659        req->hmac = true;
1660
1661        return 0;
1662}
1663
1664static int safexcel_hmac_sha512_digest(struct ahash_request *areq)
1665{
1666        int ret = safexcel_hmac_sha512_init(areq);
1667
1668        if (ret)
1669                return ret;
1670
1671        return safexcel_ahash_finup(areq);
1672}
1673
1674struct safexcel_alg_template safexcel_alg_hmac_sha512 = {
1675        .type = SAFEXCEL_ALG_TYPE_AHASH,
1676        .algo_mask = SAFEXCEL_ALG_SHA2_512,
1677        .alg.ahash = {
1678                .init = safexcel_hmac_sha512_init,
1679                .update = safexcel_ahash_update,
1680                .final = safexcel_ahash_final,
1681                .finup = safexcel_ahash_finup,
1682                .digest = safexcel_hmac_sha512_digest,
1683                .setkey = safexcel_hmac_sha512_setkey,
1684                .export = safexcel_ahash_export,
1685                .import = safexcel_ahash_import,
1686                .halg = {
1687                        .digestsize = SHA512_DIGEST_SIZE,
1688                        .statesize = sizeof(struct safexcel_ahash_export_state),
1689                        .base = {
1690                                .cra_name = "hmac(sha512)",
1691                                .cra_driver_name = "safexcel-hmac-sha512",
1692                                .cra_priority = SAFEXCEL_CRA_PRIORITY,
1693                                .cra_flags = CRYPTO_ALG_ASYNC |
1694                                             CRYPTO_ALG_ALLOCATES_MEMORY |
1695                                             CRYPTO_ALG_KERN_DRIVER_ONLY,
1696                                .cra_blocksize = SHA512_BLOCK_SIZE,
1697                                .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1698                                .cra_init = safexcel_ahash_cra_init,
1699                                .cra_exit = safexcel_ahash_cra_exit,
1700                                .cra_module = THIS_MODULE,
1701                        },
1702                },
1703        },
1704};
1705
1706static int safexcel_hmac_sha384_setkey(struct crypto_ahash *tfm, const u8 *key,
1707                                       unsigned int keylen)
1708{
1709        return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-sha384",
1710                                        SHA512_DIGEST_SIZE);
1711}
1712
1713static int safexcel_hmac_sha384_init(struct ahash_request *areq)
1714{
1715        struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1716        struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1717
1718        memset(req, 0, sizeof(*req));
1719
1720        /* Start from ipad precompute */
1721        memcpy(req->state, &ctx->base.ipad, SHA512_DIGEST_SIZE);
1722        /* Already processed the key^ipad part now! */
1723        req->len        = SHA512_BLOCK_SIZE;
1724        req->processed  = SHA512_BLOCK_SIZE;
1725
1726        ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA384;
1727        req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1728        req->state_sz = SHA512_DIGEST_SIZE;
1729        req->digest_sz = SHA512_DIGEST_SIZE;
1730        req->block_sz = SHA512_BLOCK_SIZE;
1731        req->hmac = true;
1732
1733        return 0;
1734}
1735
1736static int safexcel_hmac_sha384_digest(struct ahash_request *areq)
1737{
1738        int ret = safexcel_hmac_sha384_init(areq);
1739
1740        if (ret)
1741                return ret;
1742
1743        return safexcel_ahash_finup(areq);
1744}
1745
1746struct safexcel_alg_template safexcel_alg_hmac_sha384 = {
1747        .type = SAFEXCEL_ALG_TYPE_AHASH,
1748        .algo_mask = SAFEXCEL_ALG_SHA2_512,
1749        .alg.ahash = {
1750                .init = safexcel_hmac_sha384_init,
1751                .update = safexcel_ahash_update,
1752                .final = safexcel_ahash_final,
1753                .finup = safexcel_ahash_finup,
1754                .digest = safexcel_hmac_sha384_digest,
1755                .setkey = safexcel_hmac_sha384_setkey,
1756                .export = safexcel_ahash_export,
1757                .import = safexcel_ahash_import,
1758                .halg = {
1759                        .digestsize = SHA384_DIGEST_SIZE,
1760                        .statesize = sizeof(struct safexcel_ahash_export_state),
1761                        .base = {
1762                                .cra_name = "hmac(sha384)",
1763                                .cra_driver_name = "safexcel-hmac-sha384",
1764                                .cra_priority = SAFEXCEL_CRA_PRIORITY,
1765                                .cra_flags = CRYPTO_ALG_ASYNC |
1766                                             CRYPTO_ALG_ALLOCATES_MEMORY |
1767                                             CRYPTO_ALG_KERN_DRIVER_ONLY,
1768                                .cra_blocksize = SHA384_BLOCK_SIZE,
1769                                .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1770                                .cra_init = safexcel_ahash_cra_init,
1771                                .cra_exit = safexcel_ahash_cra_exit,
1772                                .cra_module = THIS_MODULE,
1773                        },
1774                },
1775        },
1776};
1777
1778static int safexcel_md5_init(struct ahash_request *areq)
1779{
1780        struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1781        struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1782
1783        memset(req, 0, sizeof(*req));
1784
1785        ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_MD5;
1786        req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1787        req->state_sz = MD5_DIGEST_SIZE;
1788        req->digest_sz = MD5_DIGEST_SIZE;
1789        req->block_sz = MD5_HMAC_BLOCK_SIZE;
1790
1791        return 0;
1792}
1793
1794static int safexcel_md5_digest(struct ahash_request *areq)
1795{
1796        int ret = safexcel_md5_init(areq);
1797
1798        if (ret)
1799                return ret;
1800
1801        return safexcel_ahash_finup(areq);
1802}
1803
1804struct safexcel_alg_template safexcel_alg_md5 = {
1805        .type = SAFEXCEL_ALG_TYPE_AHASH,
1806        .algo_mask = SAFEXCEL_ALG_MD5,
1807        .alg.ahash = {
1808                .init = safexcel_md5_init,
1809                .update = safexcel_ahash_update,
1810                .final = safexcel_ahash_final,
1811                .finup = safexcel_ahash_finup,
1812                .digest = safexcel_md5_digest,
1813                .export = safexcel_ahash_export,
1814                .import = safexcel_ahash_import,
1815                .halg = {
1816                        .digestsize = MD5_DIGEST_SIZE,
1817                        .statesize = sizeof(struct safexcel_ahash_export_state),
1818                        .base = {
1819                                .cra_name = "md5",
1820                                .cra_driver_name = "safexcel-md5",
1821                                .cra_priority = SAFEXCEL_CRA_PRIORITY,
1822                                .cra_flags = CRYPTO_ALG_ASYNC |
1823                                             CRYPTO_ALG_ALLOCATES_MEMORY |
1824                                             CRYPTO_ALG_KERN_DRIVER_ONLY,
1825                                .cra_blocksize = MD5_HMAC_BLOCK_SIZE,
1826                                .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1827                                .cra_init = safexcel_ahash_cra_init,
1828                                .cra_exit = safexcel_ahash_cra_exit,
1829                                .cra_module = THIS_MODULE,
1830                        },
1831                },
1832        },
1833};
1834
1835static int safexcel_hmac_md5_init(struct ahash_request *areq)
1836{
1837        struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1838        struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1839
1840        memset(req, 0, sizeof(*req));
1841
1842        /* Start from ipad precompute */
1843        memcpy(req->state, &ctx->base.ipad, MD5_DIGEST_SIZE);
1844        /* Already processed the key^ipad part now! */
1845        req->len        = MD5_HMAC_BLOCK_SIZE;
1846        req->processed  = MD5_HMAC_BLOCK_SIZE;
1847
1848        ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_MD5;
1849        req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1850        req->state_sz = MD5_DIGEST_SIZE;
1851        req->digest_sz = MD5_DIGEST_SIZE;
1852        req->block_sz = MD5_HMAC_BLOCK_SIZE;
1853        req->len_is_le = true; /* MD5 is little endian! ... */
1854        req->hmac = true;
1855
1856        return 0;
1857}
1858
1859static int safexcel_hmac_md5_setkey(struct crypto_ahash *tfm, const u8 *key,
1860                                     unsigned int keylen)
1861{
1862        return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-md5",
1863                                        MD5_DIGEST_SIZE);
1864}
1865
1866static int safexcel_hmac_md5_digest(struct ahash_request *areq)
1867{
1868        int ret = safexcel_hmac_md5_init(areq);
1869
1870        if (ret)
1871                return ret;
1872
1873        return safexcel_ahash_finup(areq);
1874}
1875
1876struct safexcel_alg_template safexcel_alg_hmac_md5 = {
1877        .type = SAFEXCEL_ALG_TYPE_AHASH,
1878        .algo_mask = SAFEXCEL_ALG_MD5,
1879        .alg.ahash = {
1880                .init = safexcel_hmac_md5_init,
1881                .update = safexcel_ahash_update,
1882                .final = safexcel_ahash_final,
1883                .finup = safexcel_ahash_finup,
1884                .digest = safexcel_hmac_md5_digest,
1885                .setkey = safexcel_hmac_md5_setkey,
1886                .export = safexcel_ahash_export,
1887                .import = safexcel_ahash_import,
1888                .halg = {
1889                        .digestsize = MD5_DIGEST_SIZE,
1890                        .statesize = sizeof(struct safexcel_ahash_export_state),
1891                        .base = {
1892                                .cra_name = "hmac(md5)",
1893                                .cra_driver_name = "safexcel-hmac-md5",
1894                                .cra_priority = SAFEXCEL_CRA_PRIORITY,
1895                                .cra_flags = CRYPTO_ALG_ASYNC |
1896                                             CRYPTO_ALG_ALLOCATES_MEMORY |
1897                                             CRYPTO_ALG_KERN_DRIVER_ONLY,
1898                                .cra_blocksize = MD5_HMAC_BLOCK_SIZE,
1899                                .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1900                                .cra_init = safexcel_ahash_cra_init,
1901                                .cra_exit = safexcel_ahash_cra_exit,
1902                                .cra_module = THIS_MODULE,
1903                        },
1904                },
1905        },
1906};
1907
1908static int safexcel_crc32_cra_init(struct crypto_tfm *tfm)
1909{
1910        struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
1911        int ret = safexcel_ahash_cra_init(tfm);
1912
1913        /* Default 'key' is all zeroes */
1914        memset(&ctx->base.ipad, 0, sizeof(u32));
1915        return ret;
1916}
1917
1918static int safexcel_crc32_init(struct ahash_request *areq)
1919{
1920        struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1921        struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1922
1923        memset(req, 0, sizeof(*req));
1924
1925        /* Start from loaded key */
1926        req->state[0]   = cpu_to_le32(~ctx->base.ipad.word[0]);
1927        /* Set processed to non-zero to enable invalidation detection */
1928        req->len        = sizeof(u32);
1929        req->processed  = sizeof(u32);
1930
1931        ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_CRC32;
1932        req->digest = CONTEXT_CONTROL_DIGEST_XCM;
1933        req->state_sz = sizeof(u32);
1934        req->digest_sz = sizeof(u32);
1935        req->block_sz = sizeof(u32);
1936
1937        return 0;
1938}
1939
1940static int safexcel_crc32_setkey(struct crypto_ahash *tfm, const u8 *key,
1941                                 unsigned int keylen)
1942{
1943        struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
1944
1945        if (keylen != sizeof(u32))
1946                return -EINVAL;
1947
1948        memcpy(&ctx->base.ipad, key, sizeof(u32));
1949        return 0;
1950}
1951
1952static int safexcel_crc32_digest(struct ahash_request *areq)
1953{
1954        return safexcel_crc32_init(areq) ?: safexcel_ahash_finup(areq);
1955}
1956
1957struct safexcel_alg_template safexcel_alg_crc32 = {
1958        .type = SAFEXCEL_ALG_TYPE_AHASH,
1959        .algo_mask = 0,
1960        .alg.ahash = {
1961                .init = safexcel_crc32_init,
1962                .update = safexcel_ahash_update,
1963                .final = safexcel_ahash_final,
1964                .finup = safexcel_ahash_finup,
1965                .digest = safexcel_crc32_digest,
1966                .setkey = safexcel_crc32_setkey,
1967                .export = safexcel_ahash_export,
1968                .import = safexcel_ahash_import,
1969                .halg = {
1970                        .digestsize = sizeof(u32),
1971                        .statesize = sizeof(struct safexcel_ahash_export_state),
1972                        .base = {
1973                                .cra_name = "crc32",
1974                                .cra_driver_name = "safexcel-crc32",
1975                                .cra_priority = SAFEXCEL_CRA_PRIORITY,
1976                                .cra_flags = CRYPTO_ALG_OPTIONAL_KEY |
1977                                             CRYPTO_ALG_ASYNC |
1978                                             CRYPTO_ALG_ALLOCATES_MEMORY |
1979                                             CRYPTO_ALG_KERN_DRIVER_ONLY,
1980                                .cra_blocksize = 1,
1981                                .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1982                                .cra_init = safexcel_crc32_cra_init,
1983                                .cra_exit = safexcel_ahash_cra_exit,
1984                                .cra_module = THIS_MODULE,
1985                        },
1986                },
1987        },
1988};
1989
1990static int safexcel_cbcmac_init(struct ahash_request *areq)
1991{
1992        struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1993        struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1994
1995        memset(req, 0, sizeof(*req));
1996
1997        /* Start from loaded keys */
1998        memcpy(req->state, &ctx->base.ipad, ctx->key_sz);
1999        /* Set processed to non-zero to enable invalidation detection */
2000        req->len        = AES_BLOCK_SIZE;
2001        req->processed  = AES_BLOCK_SIZE;
2002
2003        req->digest   = CONTEXT_CONTROL_DIGEST_XCM;
2004        req->state_sz = ctx->key_sz;
2005        req->digest_sz = AES_BLOCK_SIZE;
2006        req->block_sz = AES_BLOCK_SIZE;
2007        req->xcbcmac  = true;
2008
2009        return 0;
2010}
2011
2012static int safexcel_cbcmac_setkey(struct crypto_ahash *tfm, const u8 *key,
2013                                 unsigned int len)
2014{
2015        struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
2016        struct crypto_aes_ctx aes;
2017        int ret, i;
2018
2019        ret = aes_expandkey(&aes, key, len);
2020        if (ret)
2021                return ret;
2022
2023        memset(&ctx->base.ipad, 0, 2 * AES_BLOCK_SIZE);
2024        for (i = 0; i < len / sizeof(u32); i++)
2025                ctx->base.ipad.be[i + 8] = cpu_to_be32(aes.key_enc[i]);
2026
2027        if (len == AES_KEYSIZE_192) {
2028                ctx->alg    = CONTEXT_CONTROL_CRYPTO_ALG_XCBC192;
2029                ctx->key_sz = AES_MAX_KEY_SIZE + 2 * AES_BLOCK_SIZE;
2030        } else if (len == AES_KEYSIZE_256) {
2031                ctx->alg    = CONTEXT_CONTROL_CRYPTO_ALG_XCBC256;
2032                ctx->key_sz = AES_MAX_KEY_SIZE + 2 * AES_BLOCK_SIZE;
2033        } else {
2034                ctx->alg    = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128;
2035                ctx->key_sz = AES_MIN_KEY_SIZE + 2 * AES_BLOCK_SIZE;
2036        }
2037        ctx->cbcmac  = true;
2038
2039        memzero_explicit(&aes, sizeof(aes));
2040        return 0;
2041}
2042
2043static int safexcel_cbcmac_digest(struct ahash_request *areq)
2044{
2045        return safexcel_cbcmac_init(areq) ?: safexcel_ahash_finup(areq);
2046}
2047
2048struct safexcel_alg_template safexcel_alg_cbcmac = {
2049        .type = SAFEXCEL_ALG_TYPE_AHASH,
2050        .algo_mask = 0,
2051        .alg.ahash = {
2052                .init = safexcel_cbcmac_init,
2053                .update = safexcel_ahash_update,
2054                .final = safexcel_ahash_final,
2055                .finup = safexcel_ahash_finup,
2056                .digest = safexcel_cbcmac_digest,
2057                .setkey = safexcel_cbcmac_setkey,
2058                .export = safexcel_ahash_export,
2059                .import = safexcel_ahash_import,
2060                .halg = {
2061                        .digestsize = AES_BLOCK_SIZE,
2062                        .statesize = sizeof(struct safexcel_ahash_export_state),
2063                        .base = {
2064                                .cra_name = "cbcmac(aes)",
2065                                .cra_driver_name = "safexcel-cbcmac-aes",
2066                                .cra_priority = SAFEXCEL_CRA_PRIORITY,
2067                                .cra_flags = CRYPTO_ALG_ASYNC |
2068                                             CRYPTO_ALG_ALLOCATES_MEMORY |
2069                                             CRYPTO_ALG_KERN_DRIVER_ONLY,
2070                                .cra_blocksize = 1,
2071                                .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2072                                .cra_init = safexcel_ahash_cra_init,
2073                                .cra_exit = safexcel_ahash_cra_exit,
2074                                .cra_module = THIS_MODULE,
2075                        },
2076                },
2077        },
2078};
2079
2080static int safexcel_xcbcmac_setkey(struct crypto_ahash *tfm, const u8 *key,
2081                                 unsigned int len)
2082{
2083        struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
2084        struct crypto_aes_ctx aes;
2085        u32 key_tmp[3 * AES_BLOCK_SIZE / sizeof(u32)];
2086        int ret, i;
2087
2088        ret = aes_expandkey(&aes, key, len);
2089        if (ret)
2090                return ret;
2091
2092        /* precompute the XCBC key material */
2093        crypto_cipher_clear_flags(ctx->kaes, CRYPTO_TFM_REQ_MASK);
2094        crypto_cipher_set_flags(ctx->kaes, crypto_ahash_get_flags(tfm) &
2095                                CRYPTO_TFM_REQ_MASK);
2096        ret = crypto_cipher_setkey(ctx->kaes, key, len);
2097        if (ret)
2098                return ret;
2099
2100        crypto_cipher_encrypt_one(ctx->kaes, (u8 *)key_tmp + 2 * AES_BLOCK_SIZE,
2101                "\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1");
2102        crypto_cipher_encrypt_one(ctx->kaes, (u8 *)key_tmp,
2103                "\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2");
2104        crypto_cipher_encrypt_one(ctx->kaes, (u8 *)key_tmp + AES_BLOCK_SIZE,
2105                "\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3");
2106        for (i = 0; i < 3 * AES_BLOCK_SIZE / sizeof(u32); i++)
2107                ctx->base.ipad.word[i] = swab(key_tmp[i]);
2108
2109        crypto_cipher_clear_flags(ctx->kaes, CRYPTO_TFM_REQ_MASK);
2110        crypto_cipher_set_flags(ctx->kaes, crypto_ahash_get_flags(tfm) &
2111                                CRYPTO_TFM_REQ_MASK);
2112        ret = crypto_cipher_setkey(ctx->kaes,
2113                                   (u8 *)key_tmp + 2 * AES_BLOCK_SIZE,
2114                                   AES_MIN_KEY_SIZE);
2115        if (ret)
2116                return ret;
2117
2118        ctx->alg    = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128;
2119        ctx->key_sz = AES_MIN_KEY_SIZE + 2 * AES_BLOCK_SIZE;
2120        ctx->cbcmac = false;
2121
2122        memzero_explicit(&aes, sizeof(aes));
2123        return 0;
2124}
2125
2126static int safexcel_xcbcmac_cra_init(struct crypto_tfm *tfm)
2127{
2128        struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
2129
2130        safexcel_ahash_cra_init(tfm);
2131        ctx->kaes = crypto_alloc_cipher("aes", 0, 0);
2132        return PTR_ERR_OR_ZERO(ctx->kaes);
2133}
2134
2135static void safexcel_xcbcmac_cra_exit(struct crypto_tfm *tfm)
2136{
2137        struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
2138
2139        crypto_free_cipher(ctx->kaes);
2140        safexcel_ahash_cra_exit(tfm);
2141}
2142
2143struct safexcel_alg_template safexcel_alg_xcbcmac = {
2144        .type = SAFEXCEL_ALG_TYPE_AHASH,
2145        .algo_mask = 0,
2146        .alg.ahash = {
2147                .init = safexcel_cbcmac_init,
2148                .update = safexcel_ahash_update,
2149                .final = safexcel_ahash_final,
2150                .finup = safexcel_ahash_finup,
2151                .digest = safexcel_cbcmac_digest,
2152                .setkey = safexcel_xcbcmac_setkey,
2153                .export = safexcel_ahash_export,
2154                .import = safexcel_ahash_import,
2155                .halg = {
2156                        .digestsize = AES_BLOCK_SIZE,
2157                        .statesize = sizeof(struct safexcel_ahash_export_state),
2158                        .base = {
2159                                .cra_name = "xcbc(aes)",
2160                                .cra_driver_name = "safexcel-xcbc-aes",
2161                                .cra_priority = SAFEXCEL_CRA_PRIORITY,
2162                                .cra_flags = CRYPTO_ALG_ASYNC |
2163                                             CRYPTO_ALG_ALLOCATES_MEMORY |
2164                                             CRYPTO_ALG_KERN_DRIVER_ONLY,
2165                                .cra_blocksize = AES_BLOCK_SIZE,
2166                                .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2167                                .cra_init = safexcel_xcbcmac_cra_init,
2168                                .cra_exit = safexcel_xcbcmac_cra_exit,
2169                                .cra_module = THIS_MODULE,
2170                        },
2171                },
2172        },
2173};
2174
2175static int safexcel_cmac_setkey(struct crypto_ahash *tfm, const u8 *key,
2176                                unsigned int len)
2177{
2178        struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
2179        struct crypto_aes_ctx aes;
2180        __be64 consts[4];
2181        u64 _const[2];
2182        u8 msb_mask, gfmask;
2183        int ret, i;
2184
2185        ret = aes_expandkey(&aes, key, len);
2186        if (ret)
2187                return ret;
2188
2189        for (i = 0; i < len / sizeof(u32); i++)
2190                ctx->base.ipad.word[i + 8] = swab(aes.key_enc[i]);
2191
2192        /* precompute the CMAC key material */
2193        crypto_cipher_clear_flags(ctx->kaes, CRYPTO_TFM_REQ_MASK);
2194        crypto_cipher_set_flags(ctx->kaes, crypto_ahash_get_flags(tfm) &
2195                                CRYPTO_TFM_REQ_MASK);
2196        ret = crypto_cipher_setkey(ctx->kaes, key, len);
2197        if (ret)
2198                return ret;
2199
2200        /* code below borrowed from crypto/cmac.c */
2201        /* encrypt the zero block */
2202        memset(consts, 0, AES_BLOCK_SIZE);
2203        crypto_cipher_encrypt_one(ctx->kaes, (u8 *)consts, (u8 *)consts);
2204
2205        gfmask = 0x87;
2206        _const[0] = be64_to_cpu(consts[1]);
2207        _const[1] = be64_to_cpu(consts[0]);
2208
2209        /* gf(2^128) multiply zero-ciphertext with u and u^2 */
2210        for (i = 0; i < 4; i += 2) {
2211                msb_mask = ((s64)_const[1] >> 63) & gfmask;
2212                _const[1] = (_const[1] << 1) | (_const[0] >> 63);
2213                _const[0] = (_const[0] << 1) ^ msb_mask;
2214
2215                consts[i + 0] = cpu_to_be64(_const[1]);
2216                consts[i + 1] = cpu_to_be64(_const[0]);
2217        }
2218        /* end of code borrowed from crypto/cmac.c */
2219
2220        for (i = 0; i < 2 * AES_BLOCK_SIZE / sizeof(u32); i++)
2221                ctx->base.ipad.be[i] = cpu_to_be32(((u32 *)consts)[i]);
2222
2223        if (len == AES_KEYSIZE_192) {
2224                ctx->alg    = CONTEXT_CONTROL_CRYPTO_ALG_XCBC192;
2225                ctx->key_sz = AES_MAX_KEY_SIZE + 2 * AES_BLOCK_SIZE;
2226        } else if (len == AES_KEYSIZE_256) {
2227                ctx->alg    = CONTEXT_CONTROL_CRYPTO_ALG_XCBC256;
2228                ctx->key_sz = AES_MAX_KEY_SIZE + 2 * AES_BLOCK_SIZE;
2229        } else {
2230                ctx->alg    = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128;
2231                ctx->key_sz = AES_MIN_KEY_SIZE + 2 * AES_BLOCK_SIZE;
2232        }
2233        ctx->cbcmac = false;
2234
2235        memzero_explicit(&aes, sizeof(aes));
2236        return 0;
2237}
2238
2239struct safexcel_alg_template safexcel_alg_cmac = {
2240        .type = SAFEXCEL_ALG_TYPE_AHASH,
2241        .algo_mask = 0,
2242        .alg.ahash = {
2243                .init = safexcel_cbcmac_init,
2244                .update = safexcel_ahash_update,
2245                .final = safexcel_ahash_final,
2246                .finup = safexcel_ahash_finup,
2247                .digest = safexcel_cbcmac_digest,
2248                .setkey = safexcel_cmac_setkey,
2249                .export = safexcel_ahash_export,
2250                .import = safexcel_ahash_import,
2251                .halg = {
2252                        .digestsize = AES_BLOCK_SIZE,
2253                        .statesize = sizeof(struct safexcel_ahash_export_state),
2254                        .base = {
2255                                .cra_name = "cmac(aes)",
2256                                .cra_driver_name = "safexcel-cmac-aes",
2257                                .cra_priority = SAFEXCEL_CRA_PRIORITY,
2258                                .cra_flags = CRYPTO_ALG_ASYNC |
2259                                             CRYPTO_ALG_ALLOCATES_MEMORY |
2260                                             CRYPTO_ALG_KERN_DRIVER_ONLY,
2261                                .cra_blocksize = AES_BLOCK_SIZE,
2262                                .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2263                                .cra_init = safexcel_xcbcmac_cra_init,
2264                                .cra_exit = safexcel_xcbcmac_cra_exit,
2265                                .cra_module = THIS_MODULE,
2266                        },
2267                },
2268        },
2269};
2270
2271static int safexcel_sm3_init(struct ahash_request *areq)
2272{
2273        struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
2274        struct safexcel_ahash_req *req = ahash_request_ctx(areq);
2275
2276        memset(req, 0, sizeof(*req));
2277
2278        ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SM3;
2279        req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
2280        req->state_sz = SM3_DIGEST_SIZE;
2281        req->digest_sz = SM3_DIGEST_SIZE;
2282        req->block_sz = SM3_BLOCK_SIZE;
2283
2284        return 0;
2285}
2286
2287static int safexcel_sm3_digest(struct ahash_request *areq)
2288{
2289        int ret = safexcel_sm3_init(areq);
2290
2291        if (ret)
2292                return ret;
2293
2294        return safexcel_ahash_finup(areq);
2295}
2296
2297struct safexcel_alg_template safexcel_alg_sm3 = {
2298        .type = SAFEXCEL_ALG_TYPE_AHASH,
2299        .algo_mask = SAFEXCEL_ALG_SM3,
2300        .alg.ahash = {
2301                .init = safexcel_sm3_init,
2302                .update = safexcel_ahash_update,
2303                .final = safexcel_ahash_final,
2304                .finup = safexcel_ahash_finup,
2305                .digest = safexcel_sm3_digest,
2306                .export = safexcel_ahash_export,
2307                .import = safexcel_ahash_import,
2308                .halg = {
2309                        .digestsize = SM3_DIGEST_SIZE,
2310                        .statesize = sizeof(struct safexcel_ahash_export_state),
2311                        .base = {
2312                                .cra_name = "sm3",
2313                                .cra_driver_name = "safexcel-sm3",
2314                                .cra_priority = SAFEXCEL_CRA_PRIORITY,
2315                                .cra_flags = CRYPTO_ALG_ASYNC |
2316                                             CRYPTO_ALG_ALLOCATES_MEMORY |
2317                                             CRYPTO_ALG_KERN_DRIVER_ONLY,
2318                                .cra_blocksize = SM3_BLOCK_SIZE,
2319                                .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2320                                .cra_init = safexcel_ahash_cra_init,
2321                                .cra_exit = safexcel_ahash_cra_exit,
2322                                .cra_module = THIS_MODULE,
2323                        },
2324                },
2325        },
2326};
2327
2328static int safexcel_hmac_sm3_setkey(struct crypto_ahash *tfm, const u8 *key,
2329                                    unsigned int keylen)
2330{
2331        return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-sm3",
2332                                        SM3_DIGEST_SIZE);
2333}
2334
2335static int safexcel_hmac_sm3_init(struct ahash_request *areq)
2336{
2337        struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
2338        struct safexcel_ahash_req *req = ahash_request_ctx(areq);
2339
2340        memset(req, 0, sizeof(*req));
2341
2342        /* Start from ipad precompute */
2343        memcpy(req->state, &ctx->base.ipad, SM3_DIGEST_SIZE);
2344        /* Already processed the key^ipad part now! */
2345        req->len        = SM3_BLOCK_SIZE;
2346        req->processed  = SM3_BLOCK_SIZE;
2347
2348        ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SM3;
2349        req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
2350        req->state_sz = SM3_DIGEST_SIZE;
2351        req->digest_sz = SM3_DIGEST_SIZE;
2352        req->block_sz = SM3_BLOCK_SIZE;
2353        req->hmac = true;
2354
2355        return 0;
2356}
2357
2358static int safexcel_hmac_sm3_digest(struct ahash_request *areq)
2359{
2360        int ret = safexcel_hmac_sm3_init(areq);
2361
2362        if (ret)
2363                return ret;
2364
2365        return safexcel_ahash_finup(areq);
2366}
2367
2368struct safexcel_alg_template safexcel_alg_hmac_sm3 = {
2369        .type = SAFEXCEL_ALG_TYPE_AHASH,
2370        .algo_mask = SAFEXCEL_ALG_SM3,
2371        .alg.ahash = {
2372                .init = safexcel_hmac_sm3_init,
2373                .update = safexcel_ahash_update,
2374                .final = safexcel_ahash_final,
2375                .finup = safexcel_ahash_finup,
2376                .digest = safexcel_hmac_sm3_digest,
2377                .setkey = safexcel_hmac_sm3_setkey,
2378                .export = safexcel_ahash_export,
2379                .import = safexcel_ahash_import,
2380                .halg = {
2381                        .digestsize = SM3_DIGEST_SIZE,
2382                        .statesize = sizeof(struct safexcel_ahash_export_state),
2383                        .base = {
2384                                .cra_name = "hmac(sm3)",
2385                                .cra_driver_name = "safexcel-hmac-sm3",
2386                                .cra_priority = SAFEXCEL_CRA_PRIORITY,
2387                                .cra_flags = CRYPTO_ALG_ASYNC |
2388                                             CRYPTO_ALG_ALLOCATES_MEMORY |
2389                                             CRYPTO_ALG_KERN_DRIVER_ONLY,
2390                                .cra_blocksize = SM3_BLOCK_SIZE,
2391                                .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2392                                .cra_init = safexcel_ahash_cra_init,
2393                                .cra_exit = safexcel_ahash_cra_exit,
2394                                .cra_module = THIS_MODULE,
2395                        },
2396                },
2397        },
2398};
2399
2400static int safexcel_sha3_224_init(struct ahash_request *areq)
2401{
2402        struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2403        struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2404        struct safexcel_ahash_req *req = ahash_request_ctx(areq);
2405
2406        memset(req, 0, sizeof(*req));
2407
2408        ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_224;
2409        req->digest = CONTEXT_CONTROL_DIGEST_INITIAL;
2410        req->state_sz = SHA3_224_DIGEST_SIZE;
2411        req->digest_sz = SHA3_224_DIGEST_SIZE;
2412        req->block_sz = SHA3_224_BLOCK_SIZE;
2413        ctx->do_fallback = false;
2414        ctx->fb_init_done = false;
2415        return 0;
2416}
2417
2418static int safexcel_sha3_fbcheck(struct ahash_request *req)
2419{
2420        struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
2421        struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2422        struct ahash_request *subreq = ahash_request_ctx(req);
2423        int ret = 0;
2424
2425        if (ctx->do_fallback) {
2426                ahash_request_set_tfm(subreq, ctx->fback);
2427                ahash_request_set_callback(subreq, req->base.flags,
2428                                           req->base.complete, req->base.data);
2429                ahash_request_set_crypt(subreq, req->src, req->result,
2430                                        req->nbytes);
2431                if (!ctx->fb_init_done) {
2432                        if (ctx->fb_do_setkey) {
2433                                /* Set fallback cipher HMAC key */
2434                                u8 key[SHA3_224_BLOCK_SIZE];
2435
2436                                memcpy(key, &ctx->base.ipad,
2437                                       crypto_ahash_blocksize(ctx->fback) / 2);
2438                                memcpy(key +
2439                                       crypto_ahash_blocksize(ctx->fback) / 2,
2440                                       &ctx->base.opad,
2441                                       crypto_ahash_blocksize(ctx->fback) / 2);
2442                                ret = crypto_ahash_setkey(ctx->fback, key,
2443                                        crypto_ahash_blocksize(ctx->fback));
2444                                memzero_explicit(key,
2445                                        crypto_ahash_blocksize(ctx->fback));
2446                                ctx->fb_do_setkey = false;
2447                        }
2448                        ret = ret ?: crypto_ahash_init(subreq);
2449                        ctx->fb_init_done = true;
2450                }
2451        }
2452        return ret;
2453}
2454
2455static int safexcel_sha3_update(struct ahash_request *req)
2456{
2457        struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
2458        struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2459        struct ahash_request *subreq = ahash_request_ctx(req);
2460
2461        ctx->do_fallback = true;
2462        return safexcel_sha3_fbcheck(req) ?: crypto_ahash_update(subreq);
2463}
2464
2465static int safexcel_sha3_final(struct ahash_request *req)
2466{
2467        struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
2468        struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2469        struct ahash_request *subreq = ahash_request_ctx(req);
2470
2471        ctx->do_fallback = true;
2472        return safexcel_sha3_fbcheck(req) ?: crypto_ahash_final(subreq);
2473}
2474
2475static int safexcel_sha3_finup(struct ahash_request *req)
2476{
2477        struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
2478        struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2479        struct ahash_request *subreq = ahash_request_ctx(req);
2480
2481        ctx->do_fallback |= !req->nbytes;
2482        if (ctx->do_fallback)
2483                /* Update or ex/import happened or len 0, cannot use the HW */
2484                return safexcel_sha3_fbcheck(req) ?:
2485                       crypto_ahash_finup(subreq);
2486        else
2487                return safexcel_ahash_finup(req);
2488}
2489
2490static int safexcel_sha3_digest_fallback(struct ahash_request *req)
2491{
2492        struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
2493        struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2494        struct ahash_request *subreq = ahash_request_ctx(req);
2495
2496        ctx->do_fallback = true;
2497        ctx->fb_init_done = false;
2498        return safexcel_sha3_fbcheck(req) ?: crypto_ahash_finup(subreq);
2499}
2500
2501static int safexcel_sha3_224_digest(struct ahash_request *req)
2502{
2503        if (req->nbytes)
2504                return safexcel_sha3_224_init(req) ?: safexcel_ahash_finup(req);
2505
2506        /* HW cannot do zero length hash, use fallback instead */
2507        return safexcel_sha3_digest_fallback(req);
2508}
2509
2510static int safexcel_sha3_export(struct ahash_request *req, void *out)
2511{
2512        struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
2513        struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2514        struct ahash_request *subreq = ahash_request_ctx(req);
2515
2516        ctx->do_fallback = true;
2517        return safexcel_sha3_fbcheck(req) ?: crypto_ahash_export(subreq, out);
2518}
2519
2520static int safexcel_sha3_import(struct ahash_request *req, const void *in)
2521{
2522        struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
2523        struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2524        struct ahash_request *subreq = ahash_request_ctx(req);
2525
2526        ctx->do_fallback = true;
2527        return safexcel_sha3_fbcheck(req) ?: crypto_ahash_import(subreq, in);
2528        // return safexcel_ahash_import(req, in);
2529}
2530
2531static int safexcel_sha3_cra_init(struct crypto_tfm *tfm)
2532{
2533        struct crypto_ahash *ahash = __crypto_ahash_cast(tfm);
2534        struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
2535
2536        safexcel_ahash_cra_init(tfm);
2537
2538        /* Allocate fallback implementation */
2539        ctx->fback = crypto_alloc_ahash(crypto_tfm_alg_name(tfm), 0,
2540                                        CRYPTO_ALG_ASYNC |
2541                                        CRYPTO_ALG_NEED_FALLBACK);
2542        if (IS_ERR(ctx->fback))
2543                return PTR_ERR(ctx->fback);
2544
2545        /* Update statesize from fallback algorithm! */
2546        crypto_hash_alg_common(ahash)->statesize =
2547                crypto_ahash_statesize(ctx->fback);
2548        crypto_ahash_set_reqsize(ahash, max(sizeof(struct safexcel_ahash_req),
2549                                            sizeof(struct ahash_request) +
2550                                            crypto_ahash_reqsize(ctx->fback)));
2551        return 0;
2552}
2553
2554static void safexcel_sha3_cra_exit(struct crypto_tfm *tfm)
2555{
2556        struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
2557
2558        crypto_free_ahash(ctx->fback);
2559        safexcel_ahash_cra_exit(tfm);
2560}
2561
2562struct safexcel_alg_template safexcel_alg_sha3_224 = {
2563        .type = SAFEXCEL_ALG_TYPE_AHASH,
2564        .algo_mask = SAFEXCEL_ALG_SHA3,
2565        .alg.ahash = {
2566                .init = safexcel_sha3_224_init,
2567                .update = safexcel_sha3_update,
2568                .final = safexcel_sha3_final,
2569                .finup = safexcel_sha3_finup,
2570                .digest = safexcel_sha3_224_digest,
2571                .export = safexcel_sha3_export,
2572                .import = safexcel_sha3_import,
2573                .halg = {
2574                        .digestsize = SHA3_224_DIGEST_SIZE,
2575                        .statesize = sizeof(struct safexcel_ahash_export_state),
2576                        .base = {
2577                                .cra_name = "sha3-224",
2578                                .cra_driver_name = "safexcel-sha3-224",
2579                                .cra_priority = SAFEXCEL_CRA_PRIORITY,
2580                                .cra_flags = CRYPTO_ALG_ASYNC |
2581                                             CRYPTO_ALG_KERN_DRIVER_ONLY |
2582                                             CRYPTO_ALG_NEED_FALLBACK,
2583                                .cra_blocksize = SHA3_224_BLOCK_SIZE,
2584                                .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2585                                .cra_init = safexcel_sha3_cra_init,
2586                                .cra_exit = safexcel_sha3_cra_exit,
2587                                .cra_module = THIS_MODULE,
2588                        },
2589                },
2590        },
2591};
2592
2593static int safexcel_sha3_256_init(struct ahash_request *areq)
2594{
2595        struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2596        struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2597        struct safexcel_ahash_req *req = ahash_request_ctx(areq);
2598
2599        memset(req, 0, sizeof(*req));
2600
2601        ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_256;
2602        req->digest = CONTEXT_CONTROL_DIGEST_INITIAL;
2603        req->state_sz = SHA3_256_DIGEST_SIZE;
2604        req->digest_sz = SHA3_256_DIGEST_SIZE;
2605        req->block_sz = SHA3_256_BLOCK_SIZE;
2606        ctx->do_fallback = false;
2607        ctx->fb_init_done = false;
2608        return 0;
2609}
2610
2611static int safexcel_sha3_256_digest(struct ahash_request *req)
2612{
2613        if (req->nbytes)
2614                return safexcel_sha3_256_init(req) ?: safexcel_ahash_finup(req);
2615
2616        /* HW cannot do zero length hash, use fallback instead */
2617        return safexcel_sha3_digest_fallback(req);
2618}
2619
2620struct safexcel_alg_template safexcel_alg_sha3_256 = {
2621        .type = SAFEXCEL_ALG_TYPE_AHASH,
2622        .algo_mask = SAFEXCEL_ALG_SHA3,
2623        .alg.ahash = {
2624                .init = safexcel_sha3_256_init,
2625                .update = safexcel_sha3_update,
2626                .final = safexcel_sha3_final,
2627                .finup = safexcel_sha3_finup,
2628                .digest = safexcel_sha3_256_digest,
2629                .export = safexcel_sha3_export,
2630                .import = safexcel_sha3_import,
2631                .halg = {
2632                        .digestsize = SHA3_256_DIGEST_SIZE,
2633                        .statesize = sizeof(struct safexcel_ahash_export_state),
2634                        .base = {
2635                                .cra_name = "sha3-256",
2636                                .cra_driver_name = "safexcel-sha3-256",
2637                                .cra_priority = SAFEXCEL_CRA_PRIORITY,
2638                                .cra_flags = CRYPTO_ALG_ASYNC |
2639                                             CRYPTO_ALG_KERN_DRIVER_ONLY |
2640                                             CRYPTO_ALG_NEED_FALLBACK,
2641                                .cra_blocksize = SHA3_256_BLOCK_SIZE,
2642                                .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2643                                .cra_init = safexcel_sha3_cra_init,
2644                                .cra_exit = safexcel_sha3_cra_exit,
2645                                .cra_module = THIS_MODULE,
2646                        },
2647                },
2648        },
2649};
2650
2651static int safexcel_sha3_384_init(struct ahash_request *areq)
2652{
2653        struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2654        struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2655        struct safexcel_ahash_req *req = ahash_request_ctx(areq);
2656
2657        memset(req, 0, sizeof(*req));
2658
2659        ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_384;
2660        req->digest = CONTEXT_CONTROL_DIGEST_INITIAL;
2661        req->state_sz = SHA3_384_DIGEST_SIZE;
2662        req->digest_sz = SHA3_384_DIGEST_SIZE;
2663        req->block_sz = SHA3_384_BLOCK_SIZE;
2664        ctx->do_fallback = false;
2665        ctx->fb_init_done = false;
2666        return 0;
2667}
2668
2669static int safexcel_sha3_384_digest(struct ahash_request *req)
2670{
2671        if (req->nbytes)
2672                return safexcel_sha3_384_init(req) ?: safexcel_ahash_finup(req);
2673
2674        /* HW cannot do zero length hash, use fallback instead */
2675        return safexcel_sha3_digest_fallback(req);
2676}
2677
2678struct safexcel_alg_template safexcel_alg_sha3_384 = {
2679        .type = SAFEXCEL_ALG_TYPE_AHASH,
2680        .algo_mask = SAFEXCEL_ALG_SHA3,
2681        .alg.ahash = {
2682                .init = safexcel_sha3_384_init,
2683                .update = safexcel_sha3_update,
2684                .final = safexcel_sha3_final,
2685                .finup = safexcel_sha3_finup,
2686                .digest = safexcel_sha3_384_digest,
2687                .export = safexcel_sha3_export,
2688                .import = safexcel_sha3_import,
2689                .halg = {
2690                        .digestsize = SHA3_384_DIGEST_SIZE,
2691                        .statesize = sizeof(struct safexcel_ahash_export_state),
2692                        .base = {
2693                                .cra_name = "sha3-384",
2694                                .cra_driver_name = "safexcel-sha3-384",
2695                                .cra_priority = SAFEXCEL_CRA_PRIORITY,
2696                                .cra_flags = CRYPTO_ALG_ASYNC |
2697                                             CRYPTO_ALG_KERN_DRIVER_ONLY |
2698                                             CRYPTO_ALG_NEED_FALLBACK,
2699                                .cra_blocksize = SHA3_384_BLOCK_SIZE,
2700                                .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2701                                .cra_init = safexcel_sha3_cra_init,
2702                                .cra_exit = safexcel_sha3_cra_exit,
2703                                .cra_module = THIS_MODULE,
2704                        },
2705                },
2706        },
2707};
2708
2709static int safexcel_sha3_512_init(struct ahash_request *areq)
2710{
2711        struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2712        struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2713        struct safexcel_ahash_req *req = ahash_request_ctx(areq);
2714
2715        memset(req, 0, sizeof(*req));
2716
2717        ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_512;
2718        req->digest = CONTEXT_CONTROL_DIGEST_INITIAL;
2719        req->state_sz = SHA3_512_DIGEST_SIZE;
2720        req->digest_sz = SHA3_512_DIGEST_SIZE;
2721        req->block_sz = SHA3_512_BLOCK_SIZE;
2722        ctx->do_fallback = false;
2723        ctx->fb_init_done = false;
2724        return 0;
2725}
2726
2727static int safexcel_sha3_512_digest(struct ahash_request *req)
2728{
2729        if (req->nbytes)
2730                return safexcel_sha3_512_init(req) ?: safexcel_ahash_finup(req);
2731
2732        /* HW cannot do zero length hash, use fallback instead */
2733        return safexcel_sha3_digest_fallback(req);
2734}
2735
2736struct safexcel_alg_template safexcel_alg_sha3_512 = {
2737        .type = SAFEXCEL_ALG_TYPE_AHASH,
2738        .algo_mask = SAFEXCEL_ALG_SHA3,
2739        .alg.ahash = {
2740                .init = safexcel_sha3_512_init,
2741                .update = safexcel_sha3_update,
2742                .final = safexcel_sha3_final,
2743                .finup = safexcel_sha3_finup,
2744                .digest = safexcel_sha3_512_digest,
2745                .export = safexcel_sha3_export,
2746                .import = safexcel_sha3_import,
2747                .halg = {
2748                        .digestsize = SHA3_512_DIGEST_SIZE,
2749                        .statesize = sizeof(struct safexcel_ahash_export_state),
2750                        .base = {
2751                                .cra_name = "sha3-512",
2752                                .cra_driver_name = "safexcel-sha3-512",
2753                                .cra_priority = SAFEXCEL_CRA_PRIORITY,
2754                                .cra_flags = CRYPTO_ALG_ASYNC |
2755                                             CRYPTO_ALG_KERN_DRIVER_ONLY |
2756                                             CRYPTO_ALG_NEED_FALLBACK,
2757                                .cra_blocksize = SHA3_512_BLOCK_SIZE,
2758                                .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2759                                .cra_init = safexcel_sha3_cra_init,
2760                                .cra_exit = safexcel_sha3_cra_exit,
2761                                .cra_module = THIS_MODULE,
2762                        },
2763                },
2764        },
2765};
2766
2767static int safexcel_hmac_sha3_cra_init(struct crypto_tfm *tfm, const char *alg)
2768{
2769        struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
2770        int ret;
2771
2772        ret = safexcel_sha3_cra_init(tfm);
2773        if (ret)
2774                return ret;
2775
2776        /* Allocate precalc basic digest implementation */
2777        ctx->shpre = crypto_alloc_shash(alg, 0, CRYPTO_ALG_NEED_FALLBACK);
2778        if (IS_ERR(ctx->shpre))
2779                return PTR_ERR(ctx->shpre);
2780
2781        ctx->shdesc = kmalloc(sizeof(*ctx->shdesc) +
2782                              crypto_shash_descsize(ctx->shpre), GFP_KERNEL);
2783        if (!ctx->shdesc) {
2784                crypto_free_shash(ctx->shpre);
2785                return -ENOMEM;
2786        }
2787        ctx->shdesc->tfm = ctx->shpre;
2788        return 0;
2789}
2790
2791static void safexcel_hmac_sha3_cra_exit(struct crypto_tfm *tfm)
2792{
2793        struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
2794
2795        crypto_free_ahash(ctx->fback);
2796        crypto_free_shash(ctx->shpre);
2797        kfree(ctx->shdesc);
2798        safexcel_ahash_cra_exit(tfm);
2799}
2800
2801static int safexcel_hmac_sha3_setkey(struct crypto_ahash *tfm, const u8 *key,
2802                                     unsigned int keylen)
2803{
2804        struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2805        int ret = 0;
2806
2807        if (keylen > crypto_ahash_blocksize(tfm)) {
2808                /*
2809                 * If the key is larger than the blocksize, then hash it
2810                 * first using our fallback cipher
2811                 */
2812                ret = crypto_shash_digest(ctx->shdesc, key, keylen,
2813                                          ctx->base.ipad.byte);
2814                keylen = crypto_shash_digestsize(ctx->shpre);
2815
2816                /*
2817                 * If the digest is larger than half the blocksize, we need to
2818                 * move the rest to opad due to the way our HMAC infra works.
2819                 */
2820                if (keylen > crypto_ahash_blocksize(tfm) / 2)
2821                        /* Buffers overlap, need to use memmove iso memcpy! */
2822                        memmove(&ctx->base.opad,
2823                                ctx->base.ipad.byte +
2824                                        crypto_ahash_blocksize(tfm) / 2,
2825                                keylen - crypto_ahash_blocksize(tfm) / 2);
2826        } else {
2827                /*
2828                 * Copy the key to our ipad & opad buffers
2829                 * Note that ipad and opad each contain one half of the key,
2830                 * to match the existing HMAC driver infrastructure.
2831                 */
2832                if (keylen <= crypto_ahash_blocksize(tfm) / 2) {
2833                        memcpy(&ctx->base.ipad, key, keylen);
2834                } else {
2835                        memcpy(&ctx->base.ipad, key,
2836                               crypto_ahash_blocksize(tfm) / 2);
2837                        memcpy(&ctx->base.opad,
2838                               key + crypto_ahash_blocksize(tfm) / 2,
2839                               keylen - crypto_ahash_blocksize(tfm) / 2);
2840                }
2841        }
2842
2843        /* Pad key with zeroes */
2844        if (keylen <= crypto_ahash_blocksize(tfm) / 2) {
2845                memset(ctx->base.ipad.byte + keylen, 0,
2846                       crypto_ahash_blocksize(tfm) / 2 - keylen);
2847                memset(&ctx->base.opad, 0, crypto_ahash_blocksize(tfm) / 2);
2848        } else {
2849                memset(ctx->base.opad.byte + keylen -
2850                       crypto_ahash_blocksize(tfm) / 2, 0,
2851                       crypto_ahash_blocksize(tfm) - keylen);
2852        }
2853
2854        /* If doing fallback, still need to set the new key! */
2855        ctx->fb_do_setkey = true;
2856        return ret;
2857}
2858
2859static int safexcel_hmac_sha3_224_init(struct ahash_request *areq)
2860{
2861        struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2862        struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2863        struct safexcel_ahash_req *req = ahash_request_ctx(areq);
2864
2865        memset(req, 0, sizeof(*req));
2866
2867        /* Copy (half of) the key */
2868        memcpy(req->state, &ctx->base.ipad, SHA3_224_BLOCK_SIZE / 2);
2869        /* Start of HMAC should have len == processed == blocksize */
2870        req->len        = SHA3_224_BLOCK_SIZE;
2871        req->processed  = SHA3_224_BLOCK_SIZE;
2872        ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_224;
2873        req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
2874        req->state_sz = SHA3_224_BLOCK_SIZE / 2;
2875        req->digest_sz = SHA3_224_DIGEST_SIZE;
2876        req->block_sz = SHA3_224_BLOCK_SIZE;
2877        req->hmac = true;
2878        ctx->do_fallback = false;
2879        ctx->fb_init_done = false;
2880        return 0;
2881}
2882
2883static int safexcel_hmac_sha3_224_digest(struct ahash_request *req)
2884{
2885        if (req->nbytes)
2886                return safexcel_hmac_sha3_224_init(req) ?:
2887                       safexcel_ahash_finup(req);
2888
2889        /* HW cannot do zero length HMAC, use fallback instead */
2890        return safexcel_sha3_digest_fallback(req);
2891}
2892
2893static int safexcel_hmac_sha3_224_cra_init(struct crypto_tfm *tfm)
2894{
2895        return safexcel_hmac_sha3_cra_init(tfm, "sha3-224");
2896}
2897
2898struct safexcel_alg_template safexcel_alg_hmac_sha3_224 = {
2899        .type = SAFEXCEL_ALG_TYPE_AHASH,
2900        .algo_mask = SAFEXCEL_ALG_SHA3,
2901        .alg.ahash = {
2902                .init = safexcel_hmac_sha3_224_init,
2903                .update = safexcel_sha3_update,
2904                .final = safexcel_sha3_final,
2905                .finup = safexcel_sha3_finup,
2906                .digest = safexcel_hmac_sha3_224_digest,
2907                .setkey = safexcel_hmac_sha3_setkey,
2908                .export = safexcel_sha3_export,
2909                .import = safexcel_sha3_import,
2910                .halg = {
2911                        .digestsize = SHA3_224_DIGEST_SIZE,
2912                        .statesize = sizeof(struct safexcel_ahash_export_state),
2913                        .base = {
2914                                .cra_name = "hmac(sha3-224)",
2915                                .cra_driver_name = "safexcel-hmac-sha3-224",
2916                                .cra_priority = SAFEXCEL_CRA_PRIORITY,
2917                                .cra_flags = CRYPTO_ALG_ASYNC |
2918                                             CRYPTO_ALG_KERN_DRIVER_ONLY |
2919                                             CRYPTO_ALG_NEED_FALLBACK,
2920                                .cra_blocksize = SHA3_224_BLOCK_SIZE,
2921                                .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2922                                .cra_init = safexcel_hmac_sha3_224_cra_init,
2923                                .cra_exit = safexcel_hmac_sha3_cra_exit,
2924                                .cra_module = THIS_MODULE,
2925                        },
2926                },
2927        },
2928};
2929
2930static int safexcel_hmac_sha3_256_init(struct ahash_request *areq)
2931{
2932        struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2933        struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2934        struct safexcel_ahash_req *req = ahash_request_ctx(areq);
2935
2936        memset(req, 0, sizeof(*req));
2937
2938        /* Copy (half of) the key */
2939        memcpy(req->state, &ctx->base.ipad, SHA3_256_BLOCK_SIZE / 2);
2940        /* Start of HMAC should have len == processed == blocksize */
2941        req->len        = SHA3_256_BLOCK_SIZE;
2942        req->processed  = SHA3_256_BLOCK_SIZE;
2943        ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_256;
2944        req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
2945        req->state_sz = SHA3_256_BLOCK_SIZE / 2;
2946        req->digest_sz = SHA3_256_DIGEST_SIZE;
2947        req->block_sz = SHA3_256_BLOCK_SIZE;
2948        req->hmac = true;
2949        ctx->do_fallback = false;
2950        ctx->fb_init_done = false;
2951        return 0;
2952}
2953
2954static int safexcel_hmac_sha3_256_digest(struct ahash_request *req)
2955{
2956        if (req->nbytes)
2957                return safexcel_hmac_sha3_256_init(req) ?:
2958                       safexcel_ahash_finup(req);
2959
2960        /* HW cannot do zero length HMAC, use fallback instead */
2961        return safexcel_sha3_digest_fallback(req);
2962}
2963
2964static int safexcel_hmac_sha3_256_cra_init(struct crypto_tfm *tfm)
2965{
2966        return safexcel_hmac_sha3_cra_init(tfm, "sha3-256");
2967}
2968
2969struct safexcel_alg_template safexcel_alg_hmac_sha3_256 = {
2970        .type = SAFEXCEL_ALG_TYPE_AHASH,
2971        .algo_mask = SAFEXCEL_ALG_SHA3,
2972        .alg.ahash = {
2973                .init = safexcel_hmac_sha3_256_init,
2974                .update = safexcel_sha3_update,
2975                .final = safexcel_sha3_final,
2976                .finup = safexcel_sha3_finup,
2977                .digest = safexcel_hmac_sha3_256_digest,
2978                .setkey = safexcel_hmac_sha3_setkey,
2979                .export = safexcel_sha3_export,
2980                .import = safexcel_sha3_import,
2981                .halg = {
2982                        .digestsize = SHA3_256_DIGEST_SIZE,
2983                        .statesize = sizeof(struct safexcel_ahash_export_state),
2984                        .base = {
2985                                .cra_name = "hmac(sha3-256)",
2986                                .cra_driver_name = "safexcel-hmac-sha3-256",
2987                                .cra_priority = SAFEXCEL_CRA_PRIORITY,
2988                                .cra_flags = CRYPTO_ALG_ASYNC |
2989                                             CRYPTO_ALG_KERN_DRIVER_ONLY |
2990                                             CRYPTO_ALG_NEED_FALLBACK,
2991                                .cra_blocksize = SHA3_256_BLOCK_SIZE,
2992                                .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2993                                .cra_init = safexcel_hmac_sha3_256_cra_init,
2994                                .cra_exit = safexcel_hmac_sha3_cra_exit,
2995                                .cra_module = THIS_MODULE,
2996                        },
2997                },
2998        },
2999};
3000
3001static int safexcel_hmac_sha3_384_init(struct ahash_request *areq)
3002{
3003        struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
3004        struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
3005        struct safexcel_ahash_req *req = ahash_request_ctx(areq);
3006
3007        memset(req, 0, sizeof(*req));
3008
3009        /* Copy (half of) the key */
3010        memcpy(req->state, &ctx->base.ipad, SHA3_384_BLOCK_SIZE / 2);
3011        /* Start of HMAC should have len == processed == blocksize */
3012        req->len        = SHA3_384_BLOCK_SIZE;
3013        req->processed  = SHA3_384_BLOCK_SIZE;
3014        ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_384;
3015        req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
3016        req->state_sz = SHA3_384_BLOCK_SIZE / 2;
3017        req->digest_sz = SHA3_384_DIGEST_SIZE;
3018        req->block_sz = SHA3_384_BLOCK_SIZE;
3019        req->hmac = true;
3020        ctx->do_fallback = false;
3021        ctx->fb_init_done = false;
3022        return 0;
3023}
3024
3025static int safexcel_hmac_sha3_384_digest(struct ahash_request *req)
3026{
3027        if (req->nbytes)
3028                return safexcel_hmac_sha3_384_init(req) ?:
3029                       safexcel_ahash_finup(req);
3030
3031        /* HW cannot do zero length HMAC, use fallback instead */
3032        return safexcel_sha3_digest_fallback(req);
3033}
3034
3035static int safexcel_hmac_sha3_384_cra_init(struct crypto_tfm *tfm)
3036{
3037        return safexcel_hmac_sha3_cra_init(tfm, "sha3-384");
3038}
3039
3040struct safexcel_alg_template safexcel_alg_hmac_sha3_384 = {
3041        .type = SAFEXCEL_ALG_TYPE_AHASH,
3042        .algo_mask = SAFEXCEL_ALG_SHA3,
3043        .alg.ahash = {
3044                .init = safexcel_hmac_sha3_384_init,
3045                .update = safexcel_sha3_update,
3046                .final = safexcel_sha3_final,
3047                .finup = safexcel_sha3_finup,
3048                .digest = safexcel_hmac_sha3_384_digest,
3049                .setkey = safexcel_hmac_sha3_setkey,
3050                .export = safexcel_sha3_export,
3051                .import = safexcel_sha3_import,
3052                .halg = {
3053                        .digestsize = SHA3_384_DIGEST_SIZE,
3054                        .statesize = sizeof(struct safexcel_ahash_export_state),
3055                        .base = {
3056                                .cra_name = "hmac(sha3-384)",
3057                                .cra_driver_name = "safexcel-hmac-sha3-384",
3058                                .cra_priority = SAFEXCEL_CRA_PRIORITY,
3059                                .cra_flags = CRYPTO_ALG_ASYNC |
3060                                             CRYPTO_ALG_KERN_DRIVER_ONLY |
3061                                             CRYPTO_ALG_NEED_FALLBACK,
3062                                .cra_blocksize = SHA3_384_BLOCK_SIZE,
3063                                .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
3064                                .cra_init = safexcel_hmac_sha3_384_cra_init,
3065                                .cra_exit = safexcel_hmac_sha3_cra_exit,
3066                                .cra_module = THIS_MODULE,
3067                        },
3068                },
3069        },
3070};
3071
3072static int safexcel_hmac_sha3_512_init(struct ahash_request *areq)
3073{
3074        struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
3075        struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
3076        struct safexcel_ahash_req *req = ahash_request_ctx(areq);
3077
3078        memset(req, 0, sizeof(*req));
3079
3080        /* Copy (half of) the key */
3081        memcpy(req->state, &ctx->base.ipad, SHA3_512_BLOCK_SIZE / 2);
3082        /* Start of HMAC should have len == processed == blocksize */
3083        req->len        = SHA3_512_BLOCK_SIZE;
3084        req->processed  = SHA3_512_BLOCK_SIZE;
3085        ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_512;
3086        req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
3087        req->state_sz = SHA3_512_BLOCK_SIZE / 2;
3088        req->digest_sz = SHA3_512_DIGEST_SIZE;
3089        req->block_sz = SHA3_512_BLOCK_SIZE;
3090        req->hmac = true;
3091        ctx->do_fallback = false;
3092        ctx->fb_init_done = false;
3093        return 0;
3094}
3095
3096static int safexcel_hmac_sha3_512_digest(struct ahash_request *req)
3097{
3098        if (req->nbytes)
3099                return safexcel_hmac_sha3_512_init(req) ?:
3100                       safexcel_ahash_finup(req);
3101
3102        /* HW cannot do zero length HMAC, use fallback instead */
3103        return safexcel_sha3_digest_fallback(req);
3104}
3105
3106static int safexcel_hmac_sha3_512_cra_init(struct crypto_tfm *tfm)
3107{
3108        return safexcel_hmac_sha3_cra_init(tfm, "sha3-512");
3109}
3110struct safexcel_alg_template safexcel_alg_hmac_sha3_512 = {
3111        .type = SAFEXCEL_ALG_TYPE_AHASH,
3112        .algo_mask = SAFEXCEL_ALG_SHA3,
3113        .alg.ahash = {
3114                .init = safexcel_hmac_sha3_512_init,
3115                .update = safexcel_sha3_update,
3116                .final = safexcel_sha3_final,
3117                .finup = safexcel_sha3_finup,
3118                .digest = safexcel_hmac_sha3_512_digest,
3119                .setkey = safexcel_hmac_sha3_setkey,
3120                .export = safexcel_sha3_export,
3121                .import = safexcel_sha3_import,
3122                .halg = {
3123                        .digestsize = SHA3_512_DIGEST_SIZE,
3124                        .statesize = sizeof(struct safexcel_ahash_export_state),
3125                        .base = {
3126                                .cra_name = "hmac(sha3-512)",
3127                                .cra_driver_name = "safexcel-hmac-sha3-512",
3128                                .cra_priority = SAFEXCEL_CRA_PRIORITY,
3129                                .cra_flags = CRYPTO_ALG_ASYNC |
3130                                             CRYPTO_ALG_KERN_DRIVER_ONLY |
3131                                             CRYPTO_ALG_NEED_FALLBACK,
3132                                .cra_blocksize = SHA3_512_BLOCK_SIZE,
3133                                .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
3134                                .cra_init = safexcel_hmac_sha3_512_cra_init,
3135                                .cra_exit = safexcel_hmac_sha3_cra_exit,
3136                                .cra_module = THIS_MODULE,
3137                        },
3138                },
3139        },
3140};
3141