linux/drivers/crypto/inside-secure/safexcel_hash.c
<<
>>
Prefs
   1// SPDX-License-Identifier: GPL-2.0
   2/*
   3 * Copyright (C) 2017 Marvell
   4 *
   5 * Antoine Tenart <antoine.tenart@free-electrons.com>
   6 */
   7
   8#include <crypto/aes.h>
   9#include <crypto/hmac.h>
  10#include <crypto/md5.h>
  11#include <crypto/sha.h>
  12#include <crypto/sha3.h>
  13#include <crypto/skcipher.h>
  14#include <crypto/sm3.h>
  15#include <linux/device.h>
  16#include <linux/dma-mapping.h>
  17#include <linux/dmapool.h>
  18
  19#include "safexcel.h"
  20
  21struct safexcel_ahash_ctx {
  22        struct safexcel_context base;
  23        struct safexcel_crypto_priv *priv;
  24
  25        u32 alg;
  26        u8  key_sz;
  27        bool cbcmac;
  28        bool do_fallback;
  29        bool fb_init_done;
  30        bool fb_do_setkey;
  31
  32        __le32 ipad[SHA3_512_BLOCK_SIZE / sizeof(__le32)];
  33        __le32 opad[SHA3_512_BLOCK_SIZE / sizeof(__le32)];
  34
  35        struct crypto_cipher *kaes;
  36        struct crypto_ahash *fback;
  37        struct crypto_shash *shpre;
  38        struct shash_desc *shdesc;
  39};
  40
  41struct safexcel_ahash_req {
  42        bool last_req;
  43        bool finish;
  44        bool hmac;
  45        bool needs_inv;
  46        bool hmac_zlen;
  47        bool len_is_le;
  48        bool not_first;
  49        bool xcbcmac;
  50
  51        int nents;
  52        dma_addr_t result_dma;
  53
  54        u32 digest;
  55
  56        u8 state_sz;    /* expected state size, only set once */
  57        u8 block_sz;    /* block size, only set once */
  58        u8 digest_sz;   /* output digest size, only set once */
  59        __le32 state[SHA3_512_BLOCK_SIZE /
  60                     sizeof(__le32)] __aligned(sizeof(__le32));
  61
  62        u64 len;
  63        u64 processed;
  64
  65        u8 cache[HASH_CACHE_SIZE] __aligned(sizeof(u32));
  66        dma_addr_t cache_dma;
  67        unsigned int cache_sz;
  68
  69        u8 cache_next[HASH_CACHE_SIZE] __aligned(sizeof(u32));
  70};
  71
  72static inline u64 safexcel_queued_len(struct safexcel_ahash_req *req)
  73{
  74        return req->len - req->processed;
  75}
  76
  77static void safexcel_hash_token(struct safexcel_command_desc *cdesc,
  78                                u32 input_length, u32 result_length,
  79                                bool cbcmac)
  80{
  81        struct safexcel_token *token =
  82                (struct safexcel_token *)cdesc->control_data.token;
  83
  84        token[0].opcode = EIP197_TOKEN_OPCODE_DIRECTION;
  85        token[0].packet_length = input_length;
  86        token[0].instructions = EIP197_TOKEN_INS_TYPE_HASH;
  87
  88        input_length &= 15;
  89        if (unlikely(cbcmac && input_length)) {
  90                token[0].stat =  0;
  91                token[1].opcode = EIP197_TOKEN_OPCODE_INSERT;
  92                token[1].packet_length = 16 - input_length;
  93                token[1].stat = EIP197_TOKEN_STAT_LAST_HASH;
  94                token[1].instructions = EIP197_TOKEN_INS_TYPE_HASH;
  95        } else {
  96                token[0].stat = EIP197_TOKEN_STAT_LAST_HASH;
  97                eip197_noop_token(&token[1]);
  98        }
  99
 100        token[2].opcode = EIP197_TOKEN_OPCODE_INSERT;
 101        token[2].stat = EIP197_TOKEN_STAT_LAST_HASH |
 102                        EIP197_TOKEN_STAT_LAST_PACKET;
 103        token[2].packet_length = result_length;
 104        token[2].instructions = EIP197_TOKEN_INS_TYPE_OUTPUT |
 105                                EIP197_TOKEN_INS_INSERT_HASH_DIGEST;
 106
 107        eip197_noop_token(&token[3]);
 108}
 109
 110static void safexcel_context_control(struct safexcel_ahash_ctx *ctx,
 111                                     struct safexcel_ahash_req *req,
 112                                     struct safexcel_command_desc *cdesc)
 113{
 114        struct safexcel_crypto_priv *priv = ctx->priv;
 115        u64 count = 0;
 116
 117        cdesc->control_data.control0 = ctx->alg;
 118        cdesc->control_data.control1 = 0;
 119
 120        /*
 121         * Copy the input digest if needed, and setup the context
 122         * fields. Do this now as we need it to setup the first command
 123         * descriptor.
 124         */
 125        if (unlikely(req->digest == CONTEXT_CONTROL_DIGEST_XCM)) {
 126                if (req->xcbcmac)
 127                        memcpy(ctx->base.ctxr->data, ctx->ipad, ctx->key_sz);
 128                else
 129                        memcpy(ctx->base.ctxr->data, req->state, req->state_sz);
 130
 131                if (!req->finish && req->xcbcmac)
 132                        cdesc->control_data.control0 |=
 133                                CONTEXT_CONTROL_DIGEST_XCM |
 134                                CONTEXT_CONTROL_TYPE_HASH_OUT  |
 135                                CONTEXT_CONTROL_NO_FINISH_HASH |
 136                                CONTEXT_CONTROL_SIZE(req->state_sz /
 137                                                     sizeof(u32));
 138                else
 139                        cdesc->control_data.control0 |=
 140                                CONTEXT_CONTROL_DIGEST_XCM |
 141                                CONTEXT_CONTROL_TYPE_HASH_OUT  |
 142                                CONTEXT_CONTROL_SIZE(req->state_sz /
 143                                                     sizeof(u32));
 144                return;
 145        } else if (!req->processed) {
 146                /* First - and possibly only - block of basic hash only */
 147                if (req->finish)
 148                        cdesc->control_data.control0 |= req->digest |
 149                                CONTEXT_CONTROL_TYPE_HASH_OUT |
 150                                CONTEXT_CONTROL_RESTART_HASH  |
 151                                /* ensure its not 0! */
 152                                CONTEXT_CONTROL_SIZE(1);
 153                else
 154                        cdesc->control_data.control0 |= req->digest |
 155                                CONTEXT_CONTROL_TYPE_HASH_OUT  |
 156                                CONTEXT_CONTROL_RESTART_HASH   |
 157                                CONTEXT_CONTROL_NO_FINISH_HASH |
 158                                /* ensure its not 0! */
 159                                CONTEXT_CONTROL_SIZE(1);
 160                return;
 161        }
 162
 163        /* Hash continuation or HMAC, setup (inner) digest from state */
 164        memcpy(ctx->base.ctxr->data, req->state, req->state_sz);
 165
 166        if (req->finish) {
 167                /* Compute digest count for hash/HMAC finish operations */
 168                if ((req->digest == CONTEXT_CONTROL_DIGEST_PRECOMPUTED) ||
 169                    req->hmac_zlen || (req->processed != req->block_sz)) {
 170                        count = req->processed / EIP197_COUNTER_BLOCK_SIZE;
 171
 172                        /* This is a hardware limitation, as the
 173                         * counter must fit into an u32. This represents
 174                         * a fairly big amount of input data, so we
 175                         * shouldn't see this.
 176                         */
 177                        if (unlikely(count & 0xffffffff00000000ULL)) {
 178                                dev_warn(priv->dev,
 179                                         "Input data is too big\n");
 180                                return;
 181                        }
 182                }
 183
 184                if ((req->digest == CONTEXT_CONTROL_DIGEST_PRECOMPUTED) ||
 185                    /* Special case: zero length HMAC */
 186                    req->hmac_zlen ||
 187                    /* PE HW < 4.4 cannot do HMAC continue, fake using hash */
 188                    (req->processed != req->block_sz)) {
 189                        /* Basic hash continue operation, need digest + cnt */
 190                        cdesc->control_data.control0 |=
 191                                CONTEXT_CONTROL_SIZE((req->state_sz >> 2) + 1) |
 192                                CONTEXT_CONTROL_TYPE_HASH_OUT |
 193                                CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
 194                        /* For zero-len HMAC, don't finalize, already padded! */
 195                        if (req->hmac_zlen)
 196                                cdesc->control_data.control0 |=
 197                                        CONTEXT_CONTROL_NO_FINISH_HASH;
 198                        cdesc->control_data.control1 |=
 199                                CONTEXT_CONTROL_DIGEST_CNT;
 200                        ctx->base.ctxr->data[req->state_sz >> 2] =
 201                                cpu_to_le32(count);
 202                        req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
 203
 204                        /* Clear zero-length HMAC flag for next operation! */
 205                        req->hmac_zlen = false;
 206                } else { /* HMAC */
 207                        /* Need outer digest for HMAC finalization */
 208                        memcpy(ctx->base.ctxr->data + (req->state_sz >> 2),
 209                               ctx->opad, req->state_sz);
 210
 211                        /* Single pass HMAC - no digest count */
 212                        cdesc->control_data.control0 |=
 213                                CONTEXT_CONTROL_SIZE(req->state_sz >> 1) |
 214                                CONTEXT_CONTROL_TYPE_HASH_OUT |
 215                                CONTEXT_CONTROL_DIGEST_HMAC;
 216                }
 217        } else { /* Hash continuation, do not finish yet */
 218                cdesc->control_data.control0 |=
 219                        CONTEXT_CONTROL_SIZE(req->state_sz >> 2) |
 220                        CONTEXT_CONTROL_DIGEST_PRECOMPUTED |
 221                        CONTEXT_CONTROL_TYPE_HASH_OUT |
 222                        CONTEXT_CONTROL_NO_FINISH_HASH;
 223        }
 224}
 225
 226static int safexcel_ahash_enqueue(struct ahash_request *areq);
 227
 228static int safexcel_handle_req_result(struct safexcel_crypto_priv *priv,
 229                                      int ring,
 230                                      struct crypto_async_request *async,
 231                                      bool *should_complete, int *ret)
 232{
 233        struct safexcel_result_desc *rdesc;
 234        struct ahash_request *areq = ahash_request_cast(async);
 235        struct crypto_ahash *ahash = crypto_ahash_reqtfm(areq);
 236        struct safexcel_ahash_req *sreq = ahash_request_ctx(areq);
 237        struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(ahash);
 238        u64 cache_len;
 239
 240        *ret = 0;
 241
 242        rdesc = safexcel_ring_next_rptr(priv, &priv->ring[ring].rdr);
 243        if (IS_ERR(rdesc)) {
 244                dev_err(priv->dev,
 245                        "hash: result: could not retrieve the result descriptor\n");
 246                *ret = PTR_ERR(rdesc);
 247        } else {
 248                *ret = safexcel_rdesc_check_errors(priv, rdesc);
 249        }
 250
 251        safexcel_complete(priv, ring);
 252
 253        if (sreq->nents) {
 254                dma_unmap_sg(priv->dev, areq->src, sreq->nents, DMA_TO_DEVICE);
 255                sreq->nents = 0;
 256        }
 257
 258        if (sreq->result_dma) {
 259                dma_unmap_single(priv->dev, sreq->result_dma, sreq->digest_sz,
 260                                 DMA_FROM_DEVICE);
 261                sreq->result_dma = 0;
 262        }
 263
 264        if (sreq->cache_dma) {
 265                dma_unmap_single(priv->dev, sreq->cache_dma, sreq->cache_sz,
 266                                 DMA_TO_DEVICE);
 267                sreq->cache_dma = 0;
 268                sreq->cache_sz = 0;
 269        }
 270
 271        if (sreq->finish) {
 272                if (sreq->hmac &&
 273                    (sreq->digest != CONTEXT_CONTROL_DIGEST_HMAC)) {
 274                        /* Faking HMAC using hash - need to do outer hash */
 275                        memcpy(sreq->cache, sreq->state,
 276                               crypto_ahash_digestsize(ahash));
 277
 278                        memcpy(sreq->state, ctx->opad, sreq->digest_sz);
 279
 280                        sreq->len = sreq->block_sz +
 281                                    crypto_ahash_digestsize(ahash);
 282                        sreq->processed = sreq->block_sz;
 283                        sreq->hmac = 0;
 284
 285                        if (priv->flags & EIP197_TRC_CACHE)
 286                                ctx->base.needs_inv = true;
 287                        areq->nbytes = 0;
 288                        safexcel_ahash_enqueue(areq);
 289
 290                        *should_complete = false; /* Not done yet */
 291                        return 1;
 292                }
 293
 294                if (unlikely(sreq->digest == CONTEXT_CONTROL_DIGEST_XCM &&
 295                             ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_CRC32)) {
 296                        /* Undo final XOR with 0xffffffff ...*/
 297                        *(__le32 *)areq->result = ~sreq->state[0];
 298                } else {
 299                        memcpy(areq->result, sreq->state,
 300                               crypto_ahash_digestsize(ahash));
 301                }
 302        }
 303
 304        cache_len = safexcel_queued_len(sreq);
 305        if (cache_len)
 306                memcpy(sreq->cache, sreq->cache_next, cache_len);
 307
 308        *should_complete = true;
 309
 310        return 1;
 311}
 312
 313static int safexcel_ahash_send_req(struct crypto_async_request *async, int ring,
 314                                   int *commands, int *results)
 315{
 316        struct ahash_request *areq = ahash_request_cast(async);
 317        struct safexcel_ahash_req *req = ahash_request_ctx(areq);
 318        struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
 319        struct safexcel_crypto_priv *priv = ctx->priv;
 320        struct safexcel_command_desc *cdesc, *first_cdesc = NULL;
 321        struct safexcel_result_desc *rdesc;
 322        struct scatterlist *sg;
 323        struct safexcel_token *dmmy;
 324        int i, extra = 0, n_cdesc = 0, ret = 0, cache_len, skip = 0;
 325        u64 queued, len;
 326
 327        queued = safexcel_queued_len(req);
 328        if (queued <= HASH_CACHE_SIZE)
 329                cache_len = queued;
 330        else
 331                cache_len = queued - areq->nbytes;
 332
 333        if (!req->finish && !req->last_req) {
 334                /* If this is not the last request and the queued data does not
 335                 * fit into full cache blocks, cache it for the next send call.
 336                 */
 337                extra = queued & (HASH_CACHE_SIZE - 1);
 338
 339                /* If this is not the last request and the queued data
 340                 * is a multiple of a block, cache the last one for now.
 341                 */
 342                if (!extra)
 343                        extra = HASH_CACHE_SIZE;
 344
 345                sg_pcopy_to_buffer(areq->src, sg_nents(areq->src),
 346                                   req->cache_next, extra,
 347                                   areq->nbytes - extra);
 348
 349                queued -= extra;
 350
 351                if (!queued) {
 352                        *commands = 0;
 353                        *results = 0;
 354                        return 0;
 355                }
 356
 357                extra = 0;
 358        }
 359
 360        if (unlikely(req->xcbcmac && req->processed > AES_BLOCK_SIZE)) {
 361                if (unlikely(cache_len < AES_BLOCK_SIZE)) {
 362                        /*
 363                         * Cache contains less than 1 full block, complete.
 364                         */
 365                        extra = AES_BLOCK_SIZE - cache_len;
 366                        if (queued > cache_len) {
 367                                /* More data follows: borrow bytes */
 368                                u64 tmp = queued - cache_len;
 369
 370                                skip = min_t(u64, tmp, extra);
 371                                sg_pcopy_to_buffer(areq->src,
 372                                        sg_nents(areq->src),
 373                                        req->cache + cache_len,
 374                                        skip, 0);
 375                        }
 376                        extra -= skip;
 377                        memset(req->cache + cache_len + skip, 0, extra);
 378                        if (!ctx->cbcmac && extra) {
 379                                // 10- padding for XCBCMAC & CMAC
 380                                req->cache[cache_len + skip] = 0x80;
 381                                // HW will use K2 iso K3 - compensate!
 382                                for (i = 0; i < AES_BLOCK_SIZE / sizeof(u32); i++)
 383                                        ((__be32 *)req->cache)[i] ^=
 384                                          cpu_to_be32(le32_to_cpu(
 385                                            ctx->ipad[i] ^ ctx->ipad[i + 4]));
 386                        }
 387                        cache_len = AES_BLOCK_SIZE;
 388                        queued = queued + extra;
 389                }
 390
 391                /* XCBC continue: XOR previous result into 1st word */
 392                crypto_xor(req->cache, (const u8 *)req->state, AES_BLOCK_SIZE);
 393        }
 394
 395        len = queued;
 396        /* Add a command descriptor for the cached data, if any */
 397        if (cache_len) {
 398                req->cache_dma = dma_map_single(priv->dev, req->cache,
 399                                                cache_len, DMA_TO_DEVICE);
 400                if (dma_mapping_error(priv->dev, req->cache_dma))
 401                        return -EINVAL;
 402
 403                req->cache_sz = cache_len;
 404                first_cdesc = safexcel_add_cdesc(priv, ring, 1,
 405                                                 (cache_len == len),
 406                                                 req->cache_dma, cache_len,
 407                                                 len, ctx->base.ctxr_dma,
 408                                                 &dmmy);
 409                if (IS_ERR(first_cdesc)) {
 410                        ret = PTR_ERR(first_cdesc);
 411                        goto unmap_cache;
 412                }
 413                n_cdesc++;
 414
 415                queued -= cache_len;
 416                if (!queued)
 417                        goto send_command;
 418        }
 419
 420        /* Now handle the current ahash request buffer(s) */
 421        req->nents = dma_map_sg(priv->dev, areq->src,
 422                                sg_nents_for_len(areq->src,
 423                                                 areq->nbytes),
 424                                DMA_TO_DEVICE);
 425        if (!req->nents) {
 426                ret = -ENOMEM;
 427                goto cdesc_rollback;
 428        }
 429
 430        for_each_sg(areq->src, sg, req->nents, i) {
 431                int sglen = sg_dma_len(sg);
 432
 433                if (unlikely(sglen <= skip)) {
 434                        skip -= sglen;
 435                        continue;
 436                }
 437
 438                /* Do not overflow the request */
 439                if ((queued + skip) <= sglen)
 440                        sglen = queued;
 441                else
 442                        sglen -= skip;
 443
 444                cdesc = safexcel_add_cdesc(priv, ring, !n_cdesc,
 445                                           !(queued - sglen),
 446                                           sg_dma_address(sg) + skip, sglen,
 447                                           len, ctx->base.ctxr_dma, &dmmy);
 448                if (IS_ERR(cdesc)) {
 449                        ret = PTR_ERR(cdesc);
 450                        goto unmap_sg;
 451                }
 452
 453                if (!n_cdesc)
 454                        first_cdesc = cdesc;
 455                n_cdesc++;
 456
 457                queued -= sglen;
 458                if (!queued)
 459                        break;
 460                skip = 0;
 461        }
 462
 463send_command:
 464        /* Setup the context options */
 465        safexcel_context_control(ctx, req, first_cdesc);
 466
 467        /* Add the token */
 468        safexcel_hash_token(first_cdesc, len, req->digest_sz, ctx->cbcmac);
 469
 470        req->result_dma = dma_map_single(priv->dev, req->state, req->digest_sz,
 471                                         DMA_FROM_DEVICE);
 472        if (dma_mapping_error(priv->dev, req->result_dma)) {
 473                ret = -EINVAL;
 474                goto unmap_sg;
 475        }
 476
 477        /* Add a result descriptor */
 478        rdesc = safexcel_add_rdesc(priv, ring, 1, 1, req->result_dma,
 479                                   req->digest_sz);
 480        if (IS_ERR(rdesc)) {
 481                ret = PTR_ERR(rdesc);
 482                goto unmap_result;
 483        }
 484
 485        safexcel_rdr_req_set(priv, ring, rdesc, &areq->base);
 486
 487        req->processed += len - extra;
 488
 489        *commands = n_cdesc;
 490        *results = 1;
 491        return 0;
 492
 493unmap_result:
 494        dma_unmap_single(priv->dev, req->result_dma, req->digest_sz,
 495                         DMA_FROM_DEVICE);
 496unmap_sg:
 497        if (req->nents) {
 498                dma_unmap_sg(priv->dev, areq->src, req->nents, DMA_TO_DEVICE);
 499                req->nents = 0;
 500        }
 501cdesc_rollback:
 502        for (i = 0; i < n_cdesc; i++)
 503                safexcel_ring_rollback_wptr(priv, &priv->ring[ring].cdr);
 504unmap_cache:
 505        if (req->cache_dma) {
 506                dma_unmap_single(priv->dev, req->cache_dma, req->cache_sz,
 507                                 DMA_TO_DEVICE);
 508                req->cache_dma = 0;
 509                req->cache_sz = 0;
 510        }
 511
 512        return ret;
 513}
 514
 515static int safexcel_handle_inv_result(struct safexcel_crypto_priv *priv,
 516                                      int ring,
 517                                      struct crypto_async_request *async,
 518                                      bool *should_complete, int *ret)
 519{
 520        struct safexcel_result_desc *rdesc;
 521        struct ahash_request *areq = ahash_request_cast(async);
 522        struct crypto_ahash *ahash = crypto_ahash_reqtfm(areq);
 523        struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(ahash);
 524        int enq_ret;
 525
 526        *ret = 0;
 527
 528        rdesc = safexcel_ring_next_rptr(priv, &priv->ring[ring].rdr);
 529        if (IS_ERR(rdesc)) {
 530                dev_err(priv->dev,
 531                        "hash: invalidate: could not retrieve the result descriptor\n");
 532                *ret = PTR_ERR(rdesc);
 533        } else {
 534                *ret = safexcel_rdesc_check_errors(priv, rdesc);
 535        }
 536
 537        safexcel_complete(priv, ring);
 538
 539        if (ctx->base.exit_inv) {
 540                dma_pool_free(priv->context_pool, ctx->base.ctxr,
 541                              ctx->base.ctxr_dma);
 542
 543                *should_complete = true;
 544                return 1;
 545        }
 546
 547        ring = safexcel_select_ring(priv);
 548        ctx->base.ring = ring;
 549
 550        spin_lock_bh(&priv->ring[ring].queue_lock);
 551        enq_ret = crypto_enqueue_request(&priv->ring[ring].queue, async);
 552        spin_unlock_bh(&priv->ring[ring].queue_lock);
 553
 554        if (enq_ret != -EINPROGRESS)
 555                *ret = enq_ret;
 556
 557        queue_work(priv->ring[ring].workqueue,
 558                   &priv->ring[ring].work_data.work);
 559
 560        *should_complete = false;
 561
 562        return 1;
 563}
 564
 565static int safexcel_handle_result(struct safexcel_crypto_priv *priv, int ring,
 566                                  struct crypto_async_request *async,
 567                                  bool *should_complete, int *ret)
 568{
 569        struct ahash_request *areq = ahash_request_cast(async);
 570        struct safexcel_ahash_req *req = ahash_request_ctx(areq);
 571        int err;
 572
 573        BUG_ON(!(priv->flags & EIP197_TRC_CACHE) && req->needs_inv);
 574
 575        if (req->needs_inv) {
 576                req->needs_inv = false;
 577                err = safexcel_handle_inv_result(priv, ring, async,
 578                                                 should_complete, ret);
 579        } else {
 580                err = safexcel_handle_req_result(priv, ring, async,
 581                                                 should_complete, ret);
 582        }
 583
 584        return err;
 585}
 586
 587static int safexcel_ahash_send_inv(struct crypto_async_request *async,
 588                                   int ring, int *commands, int *results)
 589{
 590        struct ahash_request *areq = ahash_request_cast(async);
 591        struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
 592        int ret;
 593
 594        ret = safexcel_invalidate_cache(async, ctx->priv,
 595                                        ctx->base.ctxr_dma, ring);
 596        if (unlikely(ret))
 597                return ret;
 598
 599        *commands = 1;
 600        *results = 1;
 601
 602        return 0;
 603}
 604
 605static int safexcel_ahash_send(struct crypto_async_request *async,
 606                               int ring, int *commands, int *results)
 607{
 608        struct ahash_request *areq = ahash_request_cast(async);
 609        struct safexcel_ahash_req *req = ahash_request_ctx(areq);
 610        int ret;
 611
 612        if (req->needs_inv)
 613                ret = safexcel_ahash_send_inv(async, ring, commands, results);
 614        else
 615                ret = safexcel_ahash_send_req(async, ring, commands, results);
 616
 617        return ret;
 618}
 619
 620static int safexcel_ahash_exit_inv(struct crypto_tfm *tfm)
 621{
 622        struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
 623        struct safexcel_crypto_priv *priv = ctx->priv;
 624        EIP197_REQUEST_ON_STACK(req, ahash, EIP197_AHASH_REQ_SIZE);
 625        struct safexcel_ahash_req *rctx = ahash_request_ctx(req);
 626        struct safexcel_inv_result result = {};
 627        int ring = ctx->base.ring;
 628
 629        memset(req, 0, EIP197_AHASH_REQ_SIZE);
 630
 631        /* create invalidation request */
 632        init_completion(&result.completion);
 633        ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
 634                                   safexcel_inv_complete, &result);
 635
 636        ahash_request_set_tfm(req, __crypto_ahash_cast(tfm));
 637        ctx = crypto_tfm_ctx(req->base.tfm);
 638        ctx->base.exit_inv = true;
 639        rctx->needs_inv = true;
 640
 641        spin_lock_bh(&priv->ring[ring].queue_lock);
 642        crypto_enqueue_request(&priv->ring[ring].queue, &req->base);
 643        spin_unlock_bh(&priv->ring[ring].queue_lock);
 644
 645        queue_work(priv->ring[ring].workqueue,
 646                   &priv->ring[ring].work_data.work);
 647
 648        wait_for_completion(&result.completion);
 649
 650        if (result.error) {
 651                dev_warn(priv->dev, "hash: completion error (%d)\n",
 652                         result.error);
 653                return result.error;
 654        }
 655
 656        return 0;
 657}
 658
 659/* safexcel_ahash_cache: cache data until at least one request can be sent to
 660 * the engine, aka. when there is at least 1 block size in the pipe.
 661 */
 662static int safexcel_ahash_cache(struct ahash_request *areq)
 663{
 664        struct safexcel_ahash_req *req = ahash_request_ctx(areq);
 665        u64 cache_len;
 666
 667        /* cache_len: everything accepted by the driver but not sent yet,
 668         * tot sz handled by update() - last req sz - tot sz handled by send()
 669         */
 670        cache_len = safexcel_queued_len(req);
 671
 672        /*
 673         * In case there isn't enough bytes to proceed (less than a
 674         * block size), cache the data until we have enough.
 675         */
 676        if (cache_len + areq->nbytes <= HASH_CACHE_SIZE) {
 677                sg_pcopy_to_buffer(areq->src, sg_nents(areq->src),
 678                                   req->cache + cache_len,
 679                                   areq->nbytes, 0);
 680                return 0;
 681        }
 682
 683        /* We couldn't cache all the data */
 684        return -E2BIG;
 685}
 686
 687static int safexcel_ahash_enqueue(struct ahash_request *areq)
 688{
 689        struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
 690        struct safexcel_ahash_req *req = ahash_request_ctx(areq);
 691        struct safexcel_crypto_priv *priv = ctx->priv;
 692        int ret, ring;
 693
 694        req->needs_inv = false;
 695
 696        if (ctx->base.ctxr) {
 697                if (priv->flags & EIP197_TRC_CACHE && !ctx->base.needs_inv &&
 698                     /* invalidate for *any* non-XCBC continuation */
 699                   ((req->not_first && !req->xcbcmac) ||
 700                     /* invalidate if (i)digest changed */
 701                     memcmp(ctx->base.ctxr->data, req->state, req->state_sz) ||
 702                     /* invalidate for HMAC finish with odigest changed */
 703                     (req->finish && req->hmac &&
 704                      memcmp(ctx->base.ctxr->data + (req->state_sz>>2),
 705                             ctx->opad, req->state_sz))))
 706                        /*
 707                         * We're still setting needs_inv here, even though it is
 708                         * cleared right away, because the needs_inv flag can be
 709                         * set in other functions and we want to keep the same
 710                         * logic.
 711                         */
 712                        ctx->base.needs_inv = true;
 713
 714                if (ctx->base.needs_inv) {
 715                        ctx->base.needs_inv = false;
 716                        req->needs_inv = true;
 717                }
 718        } else {
 719                ctx->base.ring = safexcel_select_ring(priv);
 720                ctx->base.ctxr = dma_pool_zalloc(priv->context_pool,
 721                                                 EIP197_GFP_FLAGS(areq->base),
 722                                                 &ctx->base.ctxr_dma);
 723                if (!ctx->base.ctxr)
 724                        return -ENOMEM;
 725        }
 726        req->not_first = true;
 727
 728        ring = ctx->base.ring;
 729
 730        spin_lock_bh(&priv->ring[ring].queue_lock);
 731        ret = crypto_enqueue_request(&priv->ring[ring].queue, &areq->base);
 732        spin_unlock_bh(&priv->ring[ring].queue_lock);
 733
 734        queue_work(priv->ring[ring].workqueue,
 735                   &priv->ring[ring].work_data.work);
 736
 737        return ret;
 738}
 739
 740static int safexcel_ahash_update(struct ahash_request *areq)
 741{
 742        struct safexcel_ahash_req *req = ahash_request_ctx(areq);
 743        int ret;
 744
 745        /* If the request is 0 length, do nothing */
 746        if (!areq->nbytes)
 747                return 0;
 748
 749        /* Add request to the cache if it fits */
 750        ret = safexcel_ahash_cache(areq);
 751
 752        /* Update total request length */
 753        req->len += areq->nbytes;
 754
 755        /* If not all data could fit into the cache, go process the excess.
 756         * Also go process immediately for an HMAC IV precompute, which
 757         * will never be finished at all, but needs to be processed anyway.
 758         */
 759        if ((ret && !req->finish) || req->last_req)
 760                return safexcel_ahash_enqueue(areq);
 761
 762        return 0;
 763}
 764
 765static int safexcel_ahash_final(struct ahash_request *areq)
 766{
 767        struct safexcel_ahash_req *req = ahash_request_ctx(areq);
 768        struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
 769
 770        req->finish = true;
 771
 772        if (unlikely(!req->len && !areq->nbytes)) {
 773                /*
 774                 * If we have an overall 0 length *hash* request:
 775                 * The HW cannot do 0 length hash, so we provide the correct
 776                 * result directly here.
 777                 */
 778                if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_MD5)
 779                        memcpy(areq->result, md5_zero_message_hash,
 780                               MD5_DIGEST_SIZE);
 781                else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SHA1)
 782                        memcpy(areq->result, sha1_zero_message_hash,
 783                               SHA1_DIGEST_SIZE);
 784                else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SHA224)
 785                        memcpy(areq->result, sha224_zero_message_hash,
 786                               SHA224_DIGEST_SIZE);
 787                else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SHA256)
 788                        memcpy(areq->result, sha256_zero_message_hash,
 789                               SHA256_DIGEST_SIZE);
 790                else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SHA384)
 791                        memcpy(areq->result, sha384_zero_message_hash,
 792                               SHA384_DIGEST_SIZE);
 793                else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SHA512)
 794                        memcpy(areq->result, sha512_zero_message_hash,
 795                               SHA512_DIGEST_SIZE);
 796                else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SM3) {
 797                        memcpy(areq->result,
 798                               EIP197_SM3_ZEROM_HASH, SM3_DIGEST_SIZE);
 799                }
 800
 801                return 0;
 802        } else if (unlikely(req->digest == CONTEXT_CONTROL_DIGEST_XCM &&
 803                            ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_MD5 &&
 804                            req->len == sizeof(u32) && !areq->nbytes)) {
 805                /* Zero length CRC32 */
 806                memcpy(areq->result, ctx->ipad, sizeof(u32));
 807                return 0;
 808        } else if (unlikely(ctx->cbcmac && req->len == AES_BLOCK_SIZE &&
 809                            !areq->nbytes)) {
 810                /* Zero length CBC MAC */
 811                memset(areq->result, 0, AES_BLOCK_SIZE);
 812                return 0;
 813        } else if (unlikely(req->xcbcmac && req->len == AES_BLOCK_SIZE &&
 814                            !areq->nbytes)) {
 815                /* Zero length (X)CBC/CMAC */
 816                int i;
 817
 818                for (i = 0; i < AES_BLOCK_SIZE / sizeof(u32); i++)
 819                        ((__be32 *)areq->result)[i] =
 820                                cpu_to_be32(le32_to_cpu(ctx->ipad[i + 4]));//K3
 821                areq->result[0] ^= 0x80;                        // 10- padding
 822                crypto_cipher_encrypt_one(ctx->kaes, areq->result, areq->result);
 823                return 0;
 824        } else if (unlikely(req->hmac &&
 825                            (req->len == req->block_sz) &&
 826                            !areq->nbytes)) {
 827                /*
 828                 * If we have an overall 0 length *HMAC* request:
 829                 * For HMAC, we need to finalize the inner digest
 830                 * and then perform the outer hash.
 831                 */
 832
 833                /* generate pad block in the cache */
 834                /* start with a hash block of all zeroes */
 835                memset(req->cache, 0, req->block_sz);
 836                /* set the first byte to 0x80 to 'append a 1 bit' */
 837                req->cache[0] = 0x80;
 838                /* add the length in bits in the last 2 bytes */
 839                if (req->len_is_le) {
 840                        /* Little endian length word (e.g. MD5) */
 841                        req->cache[req->block_sz-8] = (req->block_sz << 3) &
 842                                                      255;
 843                        req->cache[req->block_sz-7] = (req->block_sz >> 5);
 844                } else {
 845                        /* Big endian length word (e.g. any SHA) */
 846                        req->cache[req->block_sz-2] = (req->block_sz >> 5);
 847                        req->cache[req->block_sz-1] = (req->block_sz << 3) &
 848                                                      255;
 849                }
 850
 851                req->len += req->block_sz; /* plus 1 hash block */
 852
 853                /* Set special zero-length HMAC flag */
 854                req->hmac_zlen = true;
 855
 856                /* Finalize HMAC */
 857                req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
 858        } else if (req->hmac) {
 859                /* Finalize HMAC */
 860                req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
 861        }
 862
 863        return safexcel_ahash_enqueue(areq);
 864}
 865
 866static int safexcel_ahash_finup(struct ahash_request *areq)
 867{
 868        struct safexcel_ahash_req *req = ahash_request_ctx(areq);
 869
 870        req->finish = true;
 871
 872        safexcel_ahash_update(areq);
 873        return safexcel_ahash_final(areq);
 874}
 875
 876static int safexcel_ahash_export(struct ahash_request *areq, void *out)
 877{
 878        struct safexcel_ahash_req *req = ahash_request_ctx(areq);
 879        struct safexcel_ahash_export_state *export = out;
 880
 881        export->len = req->len;
 882        export->processed = req->processed;
 883
 884        export->digest = req->digest;
 885
 886        memcpy(export->state, req->state, req->state_sz);
 887        memcpy(export->cache, req->cache, HASH_CACHE_SIZE);
 888
 889        return 0;
 890}
 891
 892static int safexcel_ahash_import(struct ahash_request *areq, const void *in)
 893{
 894        struct safexcel_ahash_req *req = ahash_request_ctx(areq);
 895        const struct safexcel_ahash_export_state *export = in;
 896        int ret;
 897
 898        ret = crypto_ahash_init(areq);
 899        if (ret)
 900                return ret;
 901
 902        req->len = export->len;
 903        req->processed = export->processed;
 904
 905        req->digest = export->digest;
 906
 907        memcpy(req->cache, export->cache, HASH_CACHE_SIZE);
 908        memcpy(req->state, export->state, req->state_sz);
 909
 910        return 0;
 911}
 912
 913static int safexcel_ahash_cra_init(struct crypto_tfm *tfm)
 914{
 915        struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
 916        struct safexcel_alg_template *tmpl =
 917                container_of(__crypto_ahash_alg(tfm->__crt_alg),
 918                             struct safexcel_alg_template, alg.ahash);
 919
 920        ctx->priv = tmpl->priv;
 921        ctx->base.send = safexcel_ahash_send;
 922        ctx->base.handle_result = safexcel_handle_result;
 923        ctx->fb_do_setkey = false;
 924
 925        crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm),
 926                                 sizeof(struct safexcel_ahash_req));
 927        return 0;
 928}
 929
 930static int safexcel_sha1_init(struct ahash_request *areq)
 931{
 932        struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
 933        struct safexcel_ahash_req *req = ahash_request_ctx(areq);
 934
 935        memset(req, 0, sizeof(*req));
 936
 937        ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA1;
 938        req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
 939        req->state_sz = SHA1_DIGEST_SIZE;
 940        req->digest_sz = SHA1_DIGEST_SIZE;
 941        req->block_sz = SHA1_BLOCK_SIZE;
 942
 943        return 0;
 944}
 945
 946static int safexcel_sha1_digest(struct ahash_request *areq)
 947{
 948        int ret = safexcel_sha1_init(areq);
 949
 950        if (ret)
 951                return ret;
 952
 953        return safexcel_ahash_finup(areq);
 954}
 955
 956static void safexcel_ahash_cra_exit(struct crypto_tfm *tfm)
 957{
 958        struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
 959        struct safexcel_crypto_priv *priv = ctx->priv;
 960        int ret;
 961
 962        /* context not allocated, skip invalidation */
 963        if (!ctx->base.ctxr)
 964                return;
 965
 966        if (priv->flags & EIP197_TRC_CACHE) {
 967                ret = safexcel_ahash_exit_inv(tfm);
 968                if (ret)
 969                        dev_warn(priv->dev, "hash: invalidation error %d\n", ret);
 970        } else {
 971                dma_pool_free(priv->context_pool, ctx->base.ctxr,
 972                              ctx->base.ctxr_dma);
 973        }
 974}
 975
 976struct safexcel_alg_template safexcel_alg_sha1 = {
 977        .type = SAFEXCEL_ALG_TYPE_AHASH,
 978        .algo_mask = SAFEXCEL_ALG_SHA1,
 979        .alg.ahash = {
 980                .init = safexcel_sha1_init,
 981                .update = safexcel_ahash_update,
 982                .final = safexcel_ahash_final,
 983                .finup = safexcel_ahash_finup,
 984                .digest = safexcel_sha1_digest,
 985                .export = safexcel_ahash_export,
 986                .import = safexcel_ahash_import,
 987                .halg = {
 988                        .digestsize = SHA1_DIGEST_SIZE,
 989                        .statesize = sizeof(struct safexcel_ahash_export_state),
 990                        .base = {
 991                                .cra_name = "sha1",
 992                                .cra_driver_name = "safexcel-sha1",
 993                                .cra_priority = SAFEXCEL_CRA_PRIORITY,
 994                                .cra_flags = CRYPTO_ALG_ASYNC |
 995                                             CRYPTO_ALG_ALLOCATES_MEMORY |
 996                                             CRYPTO_ALG_KERN_DRIVER_ONLY,
 997                                .cra_blocksize = SHA1_BLOCK_SIZE,
 998                                .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
 999                                .cra_init = safexcel_ahash_cra_init,
1000                                .cra_exit = safexcel_ahash_cra_exit,
1001                                .cra_module = THIS_MODULE,
1002                        },
1003                },
1004        },
1005};
1006
1007static int safexcel_hmac_sha1_init(struct ahash_request *areq)
1008{
1009        struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1010        struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1011
1012        memset(req, 0, sizeof(*req));
1013
1014        /* Start from ipad precompute */
1015        memcpy(req->state, ctx->ipad, SHA1_DIGEST_SIZE);
1016        /* Already processed the key^ipad part now! */
1017        req->len        = SHA1_BLOCK_SIZE;
1018        req->processed  = SHA1_BLOCK_SIZE;
1019
1020        ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA1;
1021        req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1022        req->state_sz = SHA1_DIGEST_SIZE;
1023        req->digest_sz = SHA1_DIGEST_SIZE;
1024        req->block_sz = SHA1_BLOCK_SIZE;
1025        req->hmac = true;
1026
1027        return 0;
1028}
1029
1030static int safexcel_hmac_sha1_digest(struct ahash_request *areq)
1031{
1032        int ret = safexcel_hmac_sha1_init(areq);
1033
1034        if (ret)
1035                return ret;
1036
1037        return safexcel_ahash_finup(areq);
1038}
1039
1040struct safexcel_ahash_result {
1041        struct completion completion;
1042        int error;
1043};
1044
1045static void safexcel_ahash_complete(struct crypto_async_request *req, int error)
1046{
1047        struct safexcel_ahash_result *result = req->data;
1048
1049        if (error == -EINPROGRESS)
1050                return;
1051
1052        result->error = error;
1053        complete(&result->completion);
1054}
1055
1056static int safexcel_hmac_init_pad(struct ahash_request *areq,
1057                                  unsigned int blocksize, const u8 *key,
1058                                  unsigned int keylen, u8 *ipad, u8 *opad)
1059{
1060        struct safexcel_ahash_result result;
1061        struct scatterlist sg;
1062        int ret, i;
1063        u8 *keydup;
1064
1065        if (keylen <= blocksize) {
1066                memcpy(ipad, key, keylen);
1067        } else {
1068                keydup = kmemdup(key, keylen, GFP_KERNEL);
1069                if (!keydup)
1070                        return -ENOMEM;
1071
1072                ahash_request_set_callback(areq, CRYPTO_TFM_REQ_MAY_BACKLOG,
1073                                           safexcel_ahash_complete, &result);
1074                sg_init_one(&sg, keydup, keylen);
1075                ahash_request_set_crypt(areq, &sg, ipad, keylen);
1076                init_completion(&result.completion);
1077
1078                ret = crypto_ahash_digest(areq);
1079                if (ret == -EINPROGRESS || ret == -EBUSY) {
1080                        wait_for_completion_interruptible(&result.completion);
1081                        ret = result.error;
1082                }
1083
1084                /* Avoid leaking */
1085                memzero_explicit(keydup, keylen);
1086                kfree(keydup);
1087
1088                if (ret)
1089                        return ret;
1090
1091                keylen = crypto_ahash_digestsize(crypto_ahash_reqtfm(areq));
1092        }
1093
1094        memset(ipad + keylen, 0, blocksize - keylen);
1095        memcpy(opad, ipad, blocksize);
1096
1097        for (i = 0; i < blocksize; i++) {
1098                ipad[i] ^= HMAC_IPAD_VALUE;
1099                opad[i] ^= HMAC_OPAD_VALUE;
1100        }
1101
1102        return 0;
1103}
1104
1105static int safexcel_hmac_init_iv(struct ahash_request *areq,
1106                                 unsigned int blocksize, u8 *pad, void *state)
1107{
1108        struct safexcel_ahash_result result;
1109        struct safexcel_ahash_req *req;
1110        struct scatterlist sg;
1111        int ret;
1112
1113        ahash_request_set_callback(areq, CRYPTO_TFM_REQ_MAY_BACKLOG,
1114                                   safexcel_ahash_complete, &result);
1115        sg_init_one(&sg, pad, blocksize);
1116        ahash_request_set_crypt(areq, &sg, pad, blocksize);
1117        init_completion(&result.completion);
1118
1119        ret = crypto_ahash_init(areq);
1120        if (ret)
1121                return ret;
1122
1123        req = ahash_request_ctx(areq);
1124        req->hmac = true;
1125        req->last_req = true;
1126
1127        ret = crypto_ahash_update(areq);
1128        if (ret && ret != -EINPROGRESS && ret != -EBUSY)
1129                return ret;
1130
1131        wait_for_completion_interruptible(&result.completion);
1132        if (result.error)
1133                return result.error;
1134
1135        return crypto_ahash_export(areq, state);
1136}
1137
1138int safexcel_hmac_setkey(const char *alg, const u8 *key, unsigned int keylen,
1139                         void *istate, void *ostate)
1140{
1141        struct ahash_request *areq;
1142        struct crypto_ahash *tfm;
1143        unsigned int blocksize;
1144        u8 *ipad, *opad;
1145        int ret;
1146
1147        tfm = crypto_alloc_ahash(alg, 0, 0);
1148        if (IS_ERR(tfm))
1149                return PTR_ERR(tfm);
1150
1151        areq = ahash_request_alloc(tfm, GFP_KERNEL);
1152        if (!areq) {
1153                ret = -ENOMEM;
1154                goto free_ahash;
1155        }
1156
1157        crypto_ahash_clear_flags(tfm, ~0);
1158        blocksize = crypto_tfm_alg_blocksize(crypto_ahash_tfm(tfm));
1159
1160        ipad = kcalloc(2, blocksize, GFP_KERNEL);
1161        if (!ipad) {
1162                ret = -ENOMEM;
1163                goto free_request;
1164        }
1165
1166        opad = ipad + blocksize;
1167
1168        ret = safexcel_hmac_init_pad(areq, blocksize, key, keylen, ipad, opad);
1169        if (ret)
1170                goto free_ipad;
1171
1172        ret = safexcel_hmac_init_iv(areq, blocksize, ipad, istate);
1173        if (ret)
1174                goto free_ipad;
1175
1176        ret = safexcel_hmac_init_iv(areq, blocksize, opad, ostate);
1177
1178free_ipad:
1179        kfree(ipad);
1180free_request:
1181        ahash_request_free(areq);
1182free_ahash:
1183        crypto_free_ahash(tfm);
1184
1185        return ret;
1186}
1187
1188static int safexcel_hmac_alg_setkey(struct crypto_ahash *tfm, const u8 *key,
1189                                    unsigned int keylen, const char *alg,
1190                                    unsigned int state_sz)
1191{
1192        struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
1193        struct safexcel_crypto_priv *priv = ctx->priv;
1194        struct safexcel_ahash_export_state istate, ostate;
1195        int ret;
1196
1197        ret = safexcel_hmac_setkey(alg, key, keylen, &istate, &ostate);
1198        if (ret)
1199                return ret;
1200
1201        if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr &&
1202            (memcmp(ctx->ipad, istate.state, state_sz) ||
1203             memcmp(ctx->opad, ostate.state, state_sz)))
1204                ctx->base.needs_inv = true;
1205
1206        memcpy(ctx->ipad, &istate.state, state_sz);
1207        memcpy(ctx->opad, &ostate.state, state_sz);
1208
1209        return 0;
1210}
1211
1212static int safexcel_hmac_sha1_setkey(struct crypto_ahash *tfm, const u8 *key,
1213                                     unsigned int keylen)
1214{
1215        return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-sha1",
1216                                        SHA1_DIGEST_SIZE);
1217}
1218
1219struct safexcel_alg_template safexcel_alg_hmac_sha1 = {
1220        .type = SAFEXCEL_ALG_TYPE_AHASH,
1221        .algo_mask = SAFEXCEL_ALG_SHA1,
1222        .alg.ahash = {
1223                .init = safexcel_hmac_sha1_init,
1224                .update = safexcel_ahash_update,
1225                .final = safexcel_ahash_final,
1226                .finup = safexcel_ahash_finup,
1227                .digest = safexcel_hmac_sha1_digest,
1228                .setkey = safexcel_hmac_sha1_setkey,
1229                .export = safexcel_ahash_export,
1230                .import = safexcel_ahash_import,
1231                .halg = {
1232                        .digestsize = SHA1_DIGEST_SIZE,
1233                        .statesize = sizeof(struct safexcel_ahash_export_state),
1234                        .base = {
1235                                .cra_name = "hmac(sha1)",
1236                                .cra_driver_name = "safexcel-hmac-sha1",
1237                                .cra_priority = SAFEXCEL_CRA_PRIORITY,
1238                                .cra_flags = CRYPTO_ALG_ASYNC |
1239                                             CRYPTO_ALG_ALLOCATES_MEMORY |
1240                                             CRYPTO_ALG_KERN_DRIVER_ONLY,
1241                                .cra_blocksize = SHA1_BLOCK_SIZE,
1242                                .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1243                                .cra_init = safexcel_ahash_cra_init,
1244                                .cra_exit = safexcel_ahash_cra_exit,
1245                                .cra_module = THIS_MODULE,
1246                        },
1247                },
1248        },
1249};
1250
1251static int safexcel_sha256_init(struct ahash_request *areq)
1252{
1253        struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1254        struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1255
1256        memset(req, 0, sizeof(*req));
1257
1258        ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA256;
1259        req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1260        req->state_sz = SHA256_DIGEST_SIZE;
1261        req->digest_sz = SHA256_DIGEST_SIZE;
1262        req->block_sz = SHA256_BLOCK_SIZE;
1263
1264        return 0;
1265}
1266
1267static int safexcel_sha256_digest(struct ahash_request *areq)
1268{
1269        int ret = safexcel_sha256_init(areq);
1270
1271        if (ret)
1272                return ret;
1273
1274        return safexcel_ahash_finup(areq);
1275}
1276
1277struct safexcel_alg_template safexcel_alg_sha256 = {
1278        .type = SAFEXCEL_ALG_TYPE_AHASH,
1279        .algo_mask = SAFEXCEL_ALG_SHA2_256,
1280        .alg.ahash = {
1281                .init = safexcel_sha256_init,
1282                .update = safexcel_ahash_update,
1283                .final = safexcel_ahash_final,
1284                .finup = safexcel_ahash_finup,
1285                .digest = safexcel_sha256_digest,
1286                .export = safexcel_ahash_export,
1287                .import = safexcel_ahash_import,
1288                .halg = {
1289                        .digestsize = SHA256_DIGEST_SIZE,
1290                        .statesize = sizeof(struct safexcel_ahash_export_state),
1291                        .base = {
1292                                .cra_name = "sha256",
1293                                .cra_driver_name = "safexcel-sha256",
1294                                .cra_priority = SAFEXCEL_CRA_PRIORITY,
1295                                .cra_flags = CRYPTO_ALG_ASYNC |
1296                                             CRYPTO_ALG_ALLOCATES_MEMORY |
1297                                             CRYPTO_ALG_KERN_DRIVER_ONLY,
1298                                .cra_blocksize = SHA256_BLOCK_SIZE,
1299                                .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1300                                .cra_init = safexcel_ahash_cra_init,
1301                                .cra_exit = safexcel_ahash_cra_exit,
1302                                .cra_module = THIS_MODULE,
1303                        },
1304                },
1305        },
1306};
1307
1308static int safexcel_sha224_init(struct ahash_request *areq)
1309{
1310        struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1311        struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1312
1313        memset(req, 0, sizeof(*req));
1314
1315        ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA224;
1316        req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1317        req->state_sz = SHA256_DIGEST_SIZE;
1318        req->digest_sz = SHA256_DIGEST_SIZE;
1319        req->block_sz = SHA256_BLOCK_SIZE;
1320
1321        return 0;
1322}
1323
1324static int safexcel_sha224_digest(struct ahash_request *areq)
1325{
1326        int ret = safexcel_sha224_init(areq);
1327
1328        if (ret)
1329                return ret;
1330
1331        return safexcel_ahash_finup(areq);
1332}
1333
1334struct safexcel_alg_template safexcel_alg_sha224 = {
1335        .type = SAFEXCEL_ALG_TYPE_AHASH,
1336        .algo_mask = SAFEXCEL_ALG_SHA2_256,
1337        .alg.ahash = {
1338                .init = safexcel_sha224_init,
1339                .update = safexcel_ahash_update,
1340                .final = safexcel_ahash_final,
1341                .finup = safexcel_ahash_finup,
1342                .digest = safexcel_sha224_digest,
1343                .export = safexcel_ahash_export,
1344                .import = safexcel_ahash_import,
1345                .halg = {
1346                        .digestsize = SHA224_DIGEST_SIZE,
1347                        .statesize = sizeof(struct safexcel_ahash_export_state),
1348                        .base = {
1349                                .cra_name = "sha224",
1350                                .cra_driver_name = "safexcel-sha224",
1351                                .cra_priority = SAFEXCEL_CRA_PRIORITY,
1352                                .cra_flags = CRYPTO_ALG_ASYNC |
1353                                             CRYPTO_ALG_ALLOCATES_MEMORY |
1354                                             CRYPTO_ALG_KERN_DRIVER_ONLY,
1355                                .cra_blocksize = SHA224_BLOCK_SIZE,
1356                                .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1357                                .cra_init = safexcel_ahash_cra_init,
1358                                .cra_exit = safexcel_ahash_cra_exit,
1359                                .cra_module = THIS_MODULE,
1360                        },
1361                },
1362        },
1363};
1364
1365static int safexcel_hmac_sha224_setkey(struct crypto_ahash *tfm, const u8 *key,
1366                                       unsigned int keylen)
1367{
1368        return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-sha224",
1369                                        SHA256_DIGEST_SIZE);
1370}
1371
1372static int safexcel_hmac_sha224_init(struct ahash_request *areq)
1373{
1374        struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1375        struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1376
1377        memset(req, 0, sizeof(*req));
1378
1379        /* Start from ipad precompute */
1380        memcpy(req->state, ctx->ipad, SHA256_DIGEST_SIZE);
1381        /* Already processed the key^ipad part now! */
1382        req->len        = SHA256_BLOCK_SIZE;
1383        req->processed  = SHA256_BLOCK_SIZE;
1384
1385        ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA224;
1386        req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1387        req->state_sz = SHA256_DIGEST_SIZE;
1388        req->digest_sz = SHA256_DIGEST_SIZE;
1389        req->block_sz = SHA256_BLOCK_SIZE;
1390        req->hmac = true;
1391
1392        return 0;
1393}
1394
1395static int safexcel_hmac_sha224_digest(struct ahash_request *areq)
1396{
1397        int ret = safexcel_hmac_sha224_init(areq);
1398
1399        if (ret)
1400                return ret;
1401
1402        return safexcel_ahash_finup(areq);
1403}
1404
1405struct safexcel_alg_template safexcel_alg_hmac_sha224 = {
1406        .type = SAFEXCEL_ALG_TYPE_AHASH,
1407        .algo_mask = SAFEXCEL_ALG_SHA2_256,
1408        .alg.ahash = {
1409                .init = safexcel_hmac_sha224_init,
1410                .update = safexcel_ahash_update,
1411                .final = safexcel_ahash_final,
1412                .finup = safexcel_ahash_finup,
1413                .digest = safexcel_hmac_sha224_digest,
1414                .setkey = safexcel_hmac_sha224_setkey,
1415                .export = safexcel_ahash_export,
1416                .import = safexcel_ahash_import,
1417                .halg = {
1418                        .digestsize = SHA224_DIGEST_SIZE,
1419                        .statesize = sizeof(struct safexcel_ahash_export_state),
1420                        .base = {
1421                                .cra_name = "hmac(sha224)",
1422                                .cra_driver_name = "safexcel-hmac-sha224",
1423                                .cra_priority = SAFEXCEL_CRA_PRIORITY,
1424                                .cra_flags = CRYPTO_ALG_ASYNC |
1425                                             CRYPTO_ALG_ALLOCATES_MEMORY |
1426                                             CRYPTO_ALG_KERN_DRIVER_ONLY,
1427                                .cra_blocksize = SHA224_BLOCK_SIZE,
1428                                .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1429                                .cra_init = safexcel_ahash_cra_init,
1430                                .cra_exit = safexcel_ahash_cra_exit,
1431                                .cra_module = THIS_MODULE,
1432                        },
1433                },
1434        },
1435};
1436
1437static int safexcel_hmac_sha256_setkey(struct crypto_ahash *tfm, const u8 *key,
1438                                     unsigned int keylen)
1439{
1440        return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-sha256",
1441                                        SHA256_DIGEST_SIZE);
1442}
1443
1444static int safexcel_hmac_sha256_init(struct ahash_request *areq)
1445{
1446        struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1447        struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1448
1449        memset(req, 0, sizeof(*req));
1450
1451        /* Start from ipad precompute */
1452        memcpy(req->state, ctx->ipad, SHA256_DIGEST_SIZE);
1453        /* Already processed the key^ipad part now! */
1454        req->len        = SHA256_BLOCK_SIZE;
1455        req->processed  = SHA256_BLOCK_SIZE;
1456
1457        ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA256;
1458        req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1459        req->state_sz = SHA256_DIGEST_SIZE;
1460        req->digest_sz = SHA256_DIGEST_SIZE;
1461        req->block_sz = SHA256_BLOCK_SIZE;
1462        req->hmac = true;
1463
1464        return 0;
1465}
1466
1467static int safexcel_hmac_sha256_digest(struct ahash_request *areq)
1468{
1469        int ret = safexcel_hmac_sha256_init(areq);
1470
1471        if (ret)
1472                return ret;
1473
1474        return safexcel_ahash_finup(areq);
1475}
1476
1477struct safexcel_alg_template safexcel_alg_hmac_sha256 = {
1478        .type = SAFEXCEL_ALG_TYPE_AHASH,
1479        .algo_mask = SAFEXCEL_ALG_SHA2_256,
1480        .alg.ahash = {
1481                .init = safexcel_hmac_sha256_init,
1482                .update = safexcel_ahash_update,
1483                .final = safexcel_ahash_final,
1484                .finup = safexcel_ahash_finup,
1485                .digest = safexcel_hmac_sha256_digest,
1486                .setkey = safexcel_hmac_sha256_setkey,
1487                .export = safexcel_ahash_export,
1488                .import = safexcel_ahash_import,
1489                .halg = {
1490                        .digestsize = SHA256_DIGEST_SIZE,
1491                        .statesize = sizeof(struct safexcel_ahash_export_state),
1492                        .base = {
1493                                .cra_name = "hmac(sha256)",
1494                                .cra_driver_name = "safexcel-hmac-sha256",
1495                                .cra_priority = SAFEXCEL_CRA_PRIORITY,
1496                                .cra_flags = CRYPTO_ALG_ASYNC |
1497                                             CRYPTO_ALG_ALLOCATES_MEMORY |
1498                                             CRYPTO_ALG_KERN_DRIVER_ONLY,
1499                                .cra_blocksize = SHA256_BLOCK_SIZE,
1500                                .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1501                                .cra_init = safexcel_ahash_cra_init,
1502                                .cra_exit = safexcel_ahash_cra_exit,
1503                                .cra_module = THIS_MODULE,
1504                        },
1505                },
1506        },
1507};
1508
1509static int safexcel_sha512_init(struct ahash_request *areq)
1510{
1511        struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1512        struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1513
1514        memset(req, 0, sizeof(*req));
1515
1516        ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA512;
1517        req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1518        req->state_sz = SHA512_DIGEST_SIZE;
1519        req->digest_sz = SHA512_DIGEST_SIZE;
1520        req->block_sz = SHA512_BLOCK_SIZE;
1521
1522        return 0;
1523}
1524
1525static int safexcel_sha512_digest(struct ahash_request *areq)
1526{
1527        int ret = safexcel_sha512_init(areq);
1528
1529        if (ret)
1530                return ret;
1531
1532        return safexcel_ahash_finup(areq);
1533}
1534
1535struct safexcel_alg_template safexcel_alg_sha512 = {
1536        .type = SAFEXCEL_ALG_TYPE_AHASH,
1537        .algo_mask = SAFEXCEL_ALG_SHA2_512,
1538        .alg.ahash = {
1539                .init = safexcel_sha512_init,
1540                .update = safexcel_ahash_update,
1541                .final = safexcel_ahash_final,
1542                .finup = safexcel_ahash_finup,
1543                .digest = safexcel_sha512_digest,
1544                .export = safexcel_ahash_export,
1545                .import = safexcel_ahash_import,
1546                .halg = {
1547                        .digestsize = SHA512_DIGEST_SIZE,
1548                        .statesize = sizeof(struct safexcel_ahash_export_state),
1549                        .base = {
1550                                .cra_name = "sha512",
1551                                .cra_driver_name = "safexcel-sha512",
1552                                .cra_priority = SAFEXCEL_CRA_PRIORITY,
1553                                .cra_flags = CRYPTO_ALG_ASYNC |
1554                                             CRYPTO_ALG_ALLOCATES_MEMORY |
1555                                             CRYPTO_ALG_KERN_DRIVER_ONLY,
1556                                .cra_blocksize = SHA512_BLOCK_SIZE,
1557                                .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1558                                .cra_init = safexcel_ahash_cra_init,
1559                                .cra_exit = safexcel_ahash_cra_exit,
1560                                .cra_module = THIS_MODULE,
1561                        },
1562                },
1563        },
1564};
1565
1566static int safexcel_sha384_init(struct ahash_request *areq)
1567{
1568        struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1569        struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1570
1571        memset(req, 0, sizeof(*req));
1572
1573        ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA384;
1574        req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1575        req->state_sz = SHA512_DIGEST_SIZE;
1576        req->digest_sz = SHA512_DIGEST_SIZE;
1577        req->block_sz = SHA512_BLOCK_SIZE;
1578
1579        return 0;
1580}
1581
1582static int safexcel_sha384_digest(struct ahash_request *areq)
1583{
1584        int ret = safexcel_sha384_init(areq);
1585
1586        if (ret)
1587                return ret;
1588
1589        return safexcel_ahash_finup(areq);
1590}
1591
1592struct safexcel_alg_template safexcel_alg_sha384 = {
1593        .type = SAFEXCEL_ALG_TYPE_AHASH,
1594        .algo_mask = SAFEXCEL_ALG_SHA2_512,
1595        .alg.ahash = {
1596                .init = safexcel_sha384_init,
1597                .update = safexcel_ahash_update,
1598                .final = safexcel_ahash_final,
1599                .finup = safexcel_ahash_finup,
1600                .digest = safexcel_sha384_digest,
1601                .export = safexcel_ahash_export,
1602                .import = safexcel_ahash_import,
1603                .halg = {
1604                        .digestsize = SHA384_DIGEST_SIZE,
1605                        .statesize = sizeof(struct safexcel_ahash_export_state),
1606                        .base = {
1607                                .cra_name = "sha384",
1608                                .cra_driver_name = "safexcel-sha384",
1609                                .cra_priority = SAFEXCEL_CRA_PRIORITY,
1610                                .cra_flags = CRYPTO_ALG_ASYNC |
1611                                             CRYPTO_ALG_ALLOCATES_MEMORY |
1612                                             CRYPTO_ALG_KERN_DRIVER_ONLY,
1613                                .cra_blocksize = SHA384_BLOCK_SIZE,
1614                                .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1615                                .cra_init = safexcel_ahash_cra_init,
1616                                .cra_exit = safexcel_ahash_cra_exit,
1617                                .cra_module = THIS_MODULE,
1618                        },
1619                },
1620        },
1621};
1622
1623static int safexcel_hmac_sha512_setkey(struct crypto_ahash *tfm, const u8 *key,
1624                                       unsigned int keylen)
1625{
1626        return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-sha512",
1627                                        SHA512_DIGEST_SIZE);
1628}
1629
1630static int safexcel_hmac_sha512_init(struct ahash_request *areq)
1631{
1632        struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1633        struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1634
1635        memset(req, 0, sizeof(*req));
1636
1637        /* Start from ipad precompute */
1638        memcpy(req->state, ctx->ipad, SHA512_DIGEST_SIZE);
1639        /* Already processed the key^ipad part now! */
1640        req->len        = SHA512_BLOCK_SIZE;
1641        req->processed  = SHA512_BLOCK_SIZE;
1642
1643        ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA512;
1644        req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1645        req->state_sz = SHA512_DIGEST_SIZE;
1646        req->digest_sz = SHA512_DIGEST_SIZE;
1647        req->block_sz = SHA512_BLOCK_SIZE;
1648        req->hmac = true;
1649
1650        return 0;
1651}
1652
1653static int safexcel_hmac_sha512_digest(struct ahash_request *areq)
1654{
1655        int ret = safexcel_hmac_sha512_init(areq);
1656
1657        if (ret)
1658                return ret;
1659
1660        return safexcel_ahash_finup(areq);
1661}
1662
1663struct safexcel_alg_template safexcel_alg_hmac_sha512 = {
1664        .type = SAFEXCEL_ALG_TYPE_AHASH,
1665        .algo_mask = SAFEXCEL_ALG_SHA2_512,
1666        .alg.ahash = {
1667                .init = safexcel_hmac_sha512_init,
1668                .update = safexcel_ahash_update,
1669                .final = safexcel_ahash_final,
1670                .finup = safexcel_ahash_finup,
1671                .digest = safexcel_hmac_sha512_digest,
1672                .setkey = safexcel_hmac_sha512_setkey,
1673                .export = safexcel_ahash_export,
1674                .import = safexcel_ahash_import,
1675                .halg = {
1676                        .digestsize = SHA512_DIGEST_SIZE,
1677                        .statesize = sizeof(struct safexcel_ahash_export_state),
1678                        .base = {
1679                                .cra_name = "hmac(sha512)",
1680                                .cra_driver_name = "safexcel-hmac-sha512",
1681                                .cra_priority = SAFEXCEL_CRA_PRIORITY,
1682                                .cra_flags = CRYPTO_ALG_ASYNC |
1683                                             CRYPTO_ALG_ALLOCATES_MEMORY |
1684                                             CRYPTO_ALG_KERN_DRIVER_ONLY,
1685                                .cra_blocksize = SHA512_BLOCK_SIZE,
1686                                .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1687                                .cra_init = safexcel_ahash_cra_init,
1688                                .cra_exit = safexcel_ahash_cra_exit,
1689                                .cra_module = THIS_MODULE,
1690                        },
1691                },
1692        },
1693};
1694
1695static int safexcel_hmac_sha384_setkey(struct crypto_ahash *tfm, const u8 *key,
1696                                       unsigned int keylen)
1697{
1698        return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-sha384",
1699                                        SHA512_DIGEST_SIZE);
1700}
1701
1702static int safexcel_hmac_sha384_init(struct ahash_request *areq)
1703{
1704        struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1705        struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1706
1707        memset(req, 0, sizeof(*req));
1708
1709        /* Start from ipad precompute */
1710        memcpy(req->state, ctx->ipad, SHA512_DIGEST_SIZE);
1711        /* Already processed the key^ipad part now! */
1712        req->len        = SHA512_BLOCK_SIZE;
1713        req->processed  = SHA512_BLOCK_SIZE;
1714
1715        ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA384;
1716        req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1717        req->state_sz = SHA512_DIGEST_SIZE;
1718        req->digest_sz = SHA512_DIGEST_SIZE;
1719        req->block_sz = SHA512_BLOCK_SIZE;
1720        req->hmac = true;
1721
1722        return 0;
1723}
1724
1725static int safexcel_hmac_sha384_digest(struct ahash_request *areq)
1726{
1727        int ret = safexcel_hmac_sha384_init(areq);
1728
1729        if (ret)
1730                return ret;
1731
1732        return safexcel_ahash_finup(areq);
1733}
1734
1735struct safexcel_alg_template safexcel_alg_hmac_sha384 = {
1736        .type = SAFEXCEL_ALG_TYPE_AHASH,
1737        .algo_mask = SAFEXCEL_ALG_SHA2_512,
1738        .alg.ahash = {
1739                .init = safexcel_hmac_sha384_init,
1740                .update = safexcel_ahash_update,
1741                .final = safexcel_ahash_final,
1742                .finup = safexcel_ahash_finup,
1743                .digest = safexcel_hmac_sha384_digest,
1744                .setkey = safexcel_hmac_sha384_setkey,
1745                .export = safexcel_ahash_export,
1746                .import = safexcel_ahash_import,
1747                .halg = {
1748                        .digestsize = SHA384_DIGEST_SIZE,
1749                        .statesize = sizeof(struct safexcel_ahash_export_state),
1750                        .base = {
1751                                .cra_name = "hmac(sha384)",
1752                                .cra_driver_name = "safexcel-hmac-sha384",
1753                                .cra_priority = SAFEXCEL_CRA_PRIORITY,
1754                                .cra_flags = CRYPTO_ALG_ASYNC |
1755                                             CRYPTO_ALG_ALLOCATES_MEMORY |
1756                                             CRYPTO_ALG_KERN_DRIVER_ONLY,
1757                                .cra_blocksize = SHA384_BLOCK_SIZE,
1758                                .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1759                                .cra_init = safexcel_ahash_cra_init,
1760                                .cra_exit = safexcel_ahash_cra_exit,
1761                                .cra_module = THIS_MODULE,
1762                        },
1763                },
1764        },
1765};
1766
1767static int safexcel_md5_init(struct ahash_request *areq)
1768{
1769        struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1770        struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1771
1772        memset(req, 0, sizeof(*req));
1773
1774        ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_MD5;
1775        req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1776        req->state_sz = MD5_DIGEST_SIZE;
1777        req->digest_sz = MD5_DIGEST_SIZE;
1778        req->block_sz = MD5_HMAC_BLOCK_SIZE;
1779
1780        return 0;
1781}
1782
1783static int safexcel_md5_digest(struct ahash_request *areq)
1784{
1785        int ret = safexcel_md5_init(areq);
1786
1787        if (ret)
1788                return ret;
1789
1790        return safexcel_ahash_finup(areq);
1791}
1792
1793struct safexcel_alg_template safexcel_alg_md5 = {
1794        .type = SAFEXCEL_ALG_TYPE_AHASH,
1795        .algo_mask = SAFEXCEL_ALG_MD5,
1796        .alg.ahash = {
1797                .init = safexcel_md5_init,
1798                .update = safexcel_ahash_update,
1799                .final = safexcel_ahash_final,
1800                .finup = safexcel_ahash_finup,
1801                .digest = safexcel_md5_digest,
1802                .export = safexcel_ahash_export,
1803                .import = safexcel_ahash_import,
1804                .halg = {
1805                        .digestsize = MD5_DIGEST_SIZE,
1806                        .statesize = sizeof(struct safexcel_ahash_export_state),
1807                        .base = {
1808                                .cra_name = "md5",
1809                                .cra_driver_name = "safexcel-md5",
1810                                .cra_priority = SAFEXCEL_CRA_PRIORITY,
1811                                .cra_flags = CRYPTO_ALG_ASYNC |
1812                                             CRYPTO_ALG_ALLOCATES_MEMORY |
1813                                             CRYPTO_ALG_KERN_DRIVER_ONLY,
1814                                .cra_blocksize = MD5_HMAC_BLOCK_SIZE,
1815                                .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1816                                .cra_init = safexcel_ahash_cra_init,
1817                                .cra_exit = safexcel_ahash_cra_exit,
1818                                .cra_module = THIS_MODULE,
1819                        },
1820                },
1821        },
1822};
1823
1824static int safexcel_hmac_md5_init(struct ahash_request *areq)
1825{
1826        struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1827        struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1828
1829        memset(req, 0, sizeof(*req));
1830
1831        /* Start from ipad precompute */
1832        memcpy(req->state, ctx->ipad, MD5_DIGEST_SIZE);
1833        /* Already processed the key^ipad part now! */
1834        req->len        = MD5_HMAC_BLOCK_SIZE;
1835        req->processed  = MD5_HMAC_BLOCK_SIZE;
1836
1837        ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_MD5;
1838        req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1839        req->state_sz = MD5_DIGEST_SIZE;
1840        req->digest_sz = MD5_DIGEST_SIZE;
1841        req->block_sz = MD5_HMAC_BLOCK_SIZE;
1842        req->len_is_le = true; /* MD5 is little endian! ... */
1843        req->hmac = true;
1844
1845        return 0;
1846}
1847
1848static int safexcel_hmac_md5_setkey(struct crypto_ahash *tfm, const u8 *key,
1849                                     unsigned int keylen)
1850{
1851        return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-md5",
1852                                        MD5_DIGEST_SIZE);
1853}
1854
1855static int safexcel_hmac_md5_digest(struct ahash_request *areq)
1856{
1857        int ret = safexcel_hmac_md5_init(areq);
1858
1859        if (ret)
1860                return ret;
1861
1862        return safexcel_ahash_finup(areq);
1863}
1864
1865struct safexcel_alg_template safexcel_alg_hmac_md5 = {
1866        .type = SAFEXCEL_ALG_TYPE_AHASH,
1867        .algo_mask = SAFEXCEL_ALG_MD5,
1868        .alg.ahash = {
1869                .init = safexcel_hmac_md5_init,
1870                .update = safexcel_ahash_update,
1871                .final = safexcel_ahash_final,
1872                .finup = safexcel_ahash_finup,
1873                .digest = safexcel_hmac_md5_digest,
1874                .setkey = safexcel_hmac_md5_setkey,
1875                .export = safexcel_ahash_export,
1876                .import = safexcel_ahash_import,
1877                .halg = {
1878                        .digestsize = MD5_DIGEST_SIZE,
1879                        .statesize = sizeof(struct safexcel_ahash_export_state),
1880                        .base = {
1881                                .cra_name = "hmac(md5)",
1882                                .cra_driver_name = "safexcel-hmac-md5",
1883                                .cra_priority = SAFEXCEL_CRA_PRIORITY,
1884                                .cra_flags = CRYPTO_ALG_ASYNC |
1885                                             CRYPTO_ALG_ALLOCATES_MEMORY |
1886                                             CRYPTO_ALG_KERN_DRIVER_ONLY,
1887                                .cra_blocksize = MD5_HMAC_BLOCK_SIZE,
1888                                .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1889                                .cra_init = safexcel_ahash_cra_init,
1890                                .cra_exit = safexcel_ahash_cra_exit,
1891                                .cra_module = THIS_MODULE,
1892                        },
1893                },
1894        },
1895};
1896
1897static int safexcel_crc32_cra_init(struct crypto_tfm *tfm)
1898{
1899        struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
1900        int ret = safexcel_ahash_cra_init(tfm);
1901
1902        /* Default 'key' is all zeroes */
1903        memset(ctx->ipad, 0, sizeof(u32));
1904        return ret;
1905}
1906
1907static int safexcel_crc32_init(struct ahash_request *areq)
1908{
1909        struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1910        struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1911
1912        memset(req, 0, sizeof(*req));
1913
1914        /* Start from loaded key */
1915        req->state[0]   = (__force __le32)le32_to_cpu(~ctx->ipad[0]);
1916        /* Set processed to non-zero to enable invalidation detection */
1917        req->len        = sizeof(u32);
1918        req->processed  = sizeof(u32);
1919
1920        ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_CRC32;
1921        req->digest = CONTEXT_CONTROL_DIGEST_XCM;
1922        req->state_sz = sizeof(u32);
1923        req->digest_sz = sizeof(u32);
1924        req->block_sz = sizeof(u32);
1925
1926        return 0;
1927}
1928
1929static int safexcel_crc32_setkey(struct crypto_ahash *tfm, const u8 *key,
1930                                 unsigned int keylen)
1931{
1932        struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
1933
1934        if (keylen != sizeof(u32))
1935                return -EINVAL;
1936
1937        memcpy(ctx->ipad, key, sizeof(u32));
1938        return 0;
1939}
1940
1941static int safexcel_crc32_digest(struct ahash_request *areq)
1942{
1943        return safexcel_crc32_init(areq) ?: safexcel_ahash_finup(areq);
1944}
1945
1946struct safexcel_alg_template safexcel_alg_crc32 = {
1947        .type = SAFEXCEL_ALG_TYPE_AHASH,
1948        .algo_mask = 0,
1949        .alg.ahash = {
1950                .init = safexcel_crc32_init,
1951                .update = safexcel_ahash_update,
1952                .final = safexcel_ahash_final,
1953                .finup = safexcel_ahash_finup,
1954                .digest = safexcel_crc32_digest,
1955                .setkey = safexcel_crc32_setkey,
1956                .export = safexcel_ahash_export,
1957                .import = safexcel_ahash_import,
1958                .halg = {
1959                        .digestsize = sizeof(u32),
1960                        .statesize = sizeof(struct safexcel_ahash_export_state),
1961                        .base = {
1962                                .cra_name = "crc32",
1963                                .cra_driver_name = "safexcel-crc32",
1964                                .cra_priority = SAFEXCEL_CRA_PRIORITY,
1965                                .cra_flags = CRYPTO_ALG_OPTIONAL_KEY |
1966                                             CRYPTO_ALG_ASYNC |
1967                                             CRYPTO_ALG_ALLOCATES_MEMORY |
1968                                             CRYPTO_ALG_KERN_DRIVER_ONLY,
1969                                .cra_blocksize = 1,
1970                                .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1971                                .cra_init = safexcel_crc32_cra_init,
1972                                .cra_exit = safexcel_ahash_cra_exit,
1973                                .cra_module = THIS_MODULE,
1974                        },
1975                },
1976        },
1977};
1978
1979static int safexcel_cbcmac_init(struct ahash_request *areq)
1980{
1981        struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1982        struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1983
1984        memset(req, 0, sizeof(*req));
1985
1986        /* Start from loaded keys */
1987        memcpy(req->state, ctx->ipad, ctx->key_sz);
1988        /* Set processed to non-zero to enable invalidation detection */
1989        req->len        = AES_BLOCK_SIZE;
1990        req->processed  = AES_BLOCK_SIZE;
1991
1992        req->digest   = CONTEXT_CONTROL_DIGEST_XCM;
1993        req->state_sz = ctx->key_sz;
1994        req->digest_sz = AES_BLOCK_SIZE;
1995        req->block_sz = AES_BLOCK_SIZE;
1996        req->xcbcmac  = true;
1997
1998        return 0;
1999}
2000
2001static int safexcel_cbcmac_setkey(struct crypto_ahash *tfm, const u8 *key,
2002                                 unsigned int len)
2003{
2004        struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
2005        struct crypto_aes_ctx aes;
2006        int ret, i;
2007
2008        ret = aes_expandkey(&aes, key, len);
2009        if (ret)
2010                return ret;
2011
2012        memset(ctx->ipad, 0, 2 * AES_BLOCK_SIZE);
2013        for (i = 0; i < len / sizeof(u32); i++)
2014                ctx->ipad[i + 8] = (__force __le32)cpu_to_be32(aes.key_enc[i]);
2015
2016        if (len == AES_KEYSIZE_192) {
2017                ctx->alg    = CONTEXT_CONTROL_CRYPTO_ALG_XCBC192;
2018                ctx->key_sz = AES_MAX_KEY_SIZE + 2 * AES_BLOCK_SIZE;
2019        } else if (len == AES_KEYSIZE_256) {
2020                ctx->alg    = CONTEXT_CONTROL_CRYPTO_ALG_XCBC256;
2021                ctx->key_sz = AES_MAX_KEY_SIZE + 2 * AES_BLOCK_SIZE;
2022        } else {
2023                ctx->alg    = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128;
2024                ctx->key_sz = AES_MIN_KEY_SIZE + 2 * AES_BLOCK_SIZE;
2025        }
2026        ctx->cbcmac  = true;
2027
2028        memzero_explicit(&aes, sizeof(aes));
2029        return 0;
2030}
2031
2032static int safexcel_cbcmac_digest(struct ahash_request *areq)
2033{
2034        return safexcel_cbcmac_init(areq) ?: safexcel_ahash_finup(areq);
2035}
2036
2037struct safexcel_alg_template safexcel_alg_cbcmac = {
2038        .type = SAFEXCEL_ALG_TYPE_AHASH,
2039        .algo_mask = 0,
2040        .alg.ahash = {
2041                .init = safexcel_cbcmac_init,
2042                .update = safexcel_ahash_update,
2043                .final = safexcel_ahash_final,
2044                .finup = safexcel_ahash_finup,
2045                .digest = safexcel_cbcmac_digest,
2046                .setkey = safexcel_cbcmac_setkey,
2047                .export = safexcel_ahash_export,
2048                .import = safexcel_ahash_import,
2049                .halg = {
2050                        .digestsize = AES_BLOCK_SIZE,
2051                        .statesize = sizeof(struct safexcel_ahash_export_state),
2052                        .base = {
2053                                .cra_name = "cbcmac(aes)",
2054                                .cra_driver_name = "safexcel-cbcmac-aes",
2055                                .cra_priority = SAFEXCEL_CRA_PRIORITY,
2056                                .cra_flags = CRYPTO_ALG_ASYNC |
2057                                             CRYPTO_ALG_ALLOCATES_MEMORY |
2058                                             CRYPTO_ALG_KERN_DRIVER_ONLY,
2059                                .cra_blocksize = 1,
2060                                .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2061                                .cra_init = safexcel_ahash_cra_init,
2062                                .cra_exit = safexcel_ahash_cra_exit,
2063                                .cra_module = THIS_MODULE,
2064                        },
2065                },
2066        },
2067};
2068
2069static int safexcel_xcbcmac_setkey(struct crypto_ahash *tfm, const u8 *key,
2070                                 unsigned int len)
2071{
2072        struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
2073        struct crypto_aes_ctx aes;
2074        u32 key_tmp[3 * AES_BLOCK_SIZE / sizeof(u32)];
2075        int ret, i;
2076
2077        ret = aes_expandkey(&aes, key, len);
2078        if (ret)
2079                return ret;
2080
2081        /* precompute the XCBC key material */
2082        crypto_cipher_clear_flags(ctx->kaes, CRYPTO_TFM_REQ_MASK);
2083        crypto_cipher_set_flags(ctx->kaes, crypto_ahash_get_flags(tfm) &
2084                                CRYPTO_TFM_REQ_MASK);
2085        ret = crypto_cipher_setkey(ctx->kaes, key, len);
2086        if (ret)
2087                return ret;
2088
2089        crypto_cipher_encrypt_one(ctx->kaes, (u8 *)key_tmp + 2 * AES_BLOCK_SIZE,
2090                "\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1");
2091        crypto_cipher_encrypt_one(ctx->kaes, (u8 *)key_tmp,
2092                "\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2");
2093        crypto_cipher_encrypt_one(ctx->kaes, (u8 *)key_tmp + AES_BLOCK_SIZE,
2094                "\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3");
2095        for (i = 0; i < 3 * AES_BLOCK_SIZE / sizeof(u32); i++)
2096                ctx->ipad[i] =
2097                        cpu_to_le32((__force u32)cpu_to_be32(key_tmp[i]));
2098
2099        crypto_cipher_clear_flags(ctx->kaes, CRYPTO_TFM_REQ_MASK);
2100        crypto_cipher_set_flags(ctx->kaes, crypto_ahash_get_flags(tfm) &
2101                                CRYPTO_TFM_REQ_MASK);
2102        ret = crypto_cipher_setkey(ctx->kaes,
2103                                   (u8 *)key_tmp + 2 * AES_BLOCK_SIZE,
2104                                   AES_MIN_KEY_SIZE);
2105        if (ret)
2106                return ret;
2107
2108        ctx->alg    = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128;
2109        ctx->key_sz = AES_MIN_KEY_SIZE + 2 * AES_BLOCK_SIZE;
2110        ctx->cbcmac = false;
2111
2112        memzero_explicit(&aes, sizeof(aes));
2113        return 0;
2114}
2115
2116static int safexcel_xcbcmac_cra_init(struct crypto_tfm *tfm)
2117{
2118        struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
2119
2120        safexcel_ahash_cra_init(tfm);
2121        ctx->kaes = crypto_alloc_cipher("aes", 0, 0);
2122        return PTR_ERR_OR_ZERO(ctx->kaes);
2123}
2124
2125static void safexcel_xcbcmac_cra_exit(struct crypto_tfm *tfm)
2126{
2127        struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
2128
2129        crypto_free_cipher(ctx->kaes);
2130        safexcel_ahash_cra_exit(tfm);
2131}
2132
2133struct safexcel_alg_template safexcel_alg_xcbcmac = {
2134        .type = SAFEXCEL_ALG_TYPE_AHASH,
2135        .algo_mask = 0,
2136        .alg.ahash = {
2137                .init = safexcel_cbcmac_init,
2138                .update = safexcel_ahash_update,
2139                .final = safexcel_ahash_final,
2140                .finup = safexcel_ahash_finup,
2141                .digest = safexcel_cbcmac_digest,
2142                .setkey = safexcel_xcbcmac_setkey,
2143                .export = safexcel_ahash_export,
2144                .import = safexcel_ahash_import,
2145                .halg = {
2146                        .digestsize = AES_BLOCK_SIZE,
2147                        .statesize = sizeof(struct safexcel_ahash_export_state),
2148                        .base = {
2149                                .cra_name = "xcbc(aes)",
2150                                .cra_driver_name = "safexcel-xcbc-aes",
2151                                .cra_priority = SAFEXCEL_CRA_PRIORITY,
2152                                .cra_flags = CRYPTO_ALG_ASYNC |
2153                                             CRYPTO_ALG_ALLOCATES_MEMORY |
2154                                             CRYPTO_ALG_KERN_DRIVER_ONLY,
2155                                .cra_blocksize = AES_BLOCK_SIZE,
2156                                .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2157                                .cra_init = safexcel_xcbcmac_cra_init,
2158                                .cra_exit = safexcel_xcbcmac_cra_exit,
2159                                .cra_module = THIS_MODULE,
2160                        },
2161                },
2162        },
2163};
2164
2165static int safexcel_cmac_setkey(struct crypto_ahash *tfm, const u8 *key,
2166                                unsigned int len)
2167{
2168        struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
2169        struct crypto_aes_ctx aes;
2170        __be64 consts[4];
2171        u64 _const[2];
2172        u8 msb_mask, gfmask;
2173        int ret, i;
2174
2175        ret = aes_expandkey(&aes, key, len);
2176        if (ret)
2177                return ret;
2178
2179        for (i = 0; i < len / sizeof(u32); i++)
2180                ctx->ipad[i + 8] =
2181                        cpu_to_le32((__force u32)cpu_to_be32(aes.key_enc[i]));
2182
2183        /* precompute the CMAC key material */
2184        crypto_cipher_clear_flags(ctx->kaes, CRYPTO_TFM_REQ_MASK);
2185        crypto_cipher_set_flags(ctx->kaes, crypto_ahash_get_flags(tfm) &
2186                                CRYPTO_TFM_REQ_MASK);
2187        ret = crypto_cipher_setkey(ctx->kaes, key, len);
2188        if (ret)
2189                return ret;
2190
2191        /* code below borrowed from crypto/cmac.c */
2192        /* encrypt the zero block */
2193        memset(consts, 0, AES_BLOCK_SIZE);
2194        crypto_cipher_encrypt_one(ctx->kaes, (u8 *)consts, (u8 *)consts);
2195
2196        gfmask = 0x87;
2197        _const[0] = be64_to_cpu(consts[1]);
2198        _const[1] = be64_to_cpu(consts[0]);
2199
2200        /* gf(2^128) multiply zero-ciphertext with u and u^2 */
2201        for (i = 0; i < 4; i += 2) {
2202                msb_mask = ((s64)_const[1] >> 63) & gfmask;
2203                _const[1] = (_const[1] << 1) | (_const[0] >> 63);
2204                _const[0] = (_const[0] << 1) ^ msb_mask;
2205
2206                consts[i + 0] = cpu_to_be64(_const[1]);
2207                consts[i + 1] = cpu_to_be64(_const[0]);
2208        }
2209        /* end of code borrowed from crypto/cmac.c */
2210
2211        for (i = 0; i < 2 * AES_BLOCK_SIZE / sizeof(u32); i++)
2212                ctx->ipad[i] = (__force __le32)cpu_to_be32(((u32 *)consts)[i]);
2213
2214        if (len == AES_KEYSIZE_192) {
2215                ctx->alg    = CONTEXT_CONTROL_CRYPTO_ALG_XCBC192;
2216                ctx->key_sz = AES_MAX_KEY_SIZE + 2 * AES_BLOCK_SIZE;
2217        } else if (len == AES_KEYSIZE_256) {
2218                ctx->alg    = CONTEXT_CONTROL_CRYPTO_ALG_XCBC256;
2219                ctx->key_sz = AES_MAX_KEY_SIZE + 2 * AES_BLOCK_SIZE;
2220        } else {
2221                ctx->alg    = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128;
2222                ctx->key_sz = AES_MIN_KEY_SIZE + 2 * AES_BLOCK_SIZE;
2223        }
2224        ctx->cbcmac = false;
2225
2226        memzero_explicit(&aes, sizeof(aes));
2227        return 0;
2228}
2229
2230struct safexcel_alg_template safexcel_alg_cmac = {
2231        .type = SAFEXCEL_ALG_TYPE_AHASH,
2232        .algo_mask = 0,
2233        .alg.ahash = {
2234                .init = safexcel_cbcmac_init,
2235                .update = safexcel_ahash_update,
2236                .final = safexcel_ahash_final,
2237                .finup = safexcel_ahash_finup,
2238                .digest = safexcel_cbcmac_digest,
2239                .setkey = safexcel_cmac_setkey,
2240                .export = safexcel_ahash_export,
2241                .import = safexcel_ahash_import,
2242                .halg = {
2243                        .digestsize = AES_BLOCK_SIZE,
2244                        .statesize = sizeof(struct safexcel_ahash_export_state),
2245                        .base = {
2246                                .cra_name = "cmac(aes)",
2247                                .cra_driver_name = "safexcel-cmac-aes",
2248                                .cra_priority = SAFEXCEL_CRA_PRIORITY,
2249                                .cra_flags = CRYPTO_ALG_ASYNC |
2250                                             CRYPTO_ALG_ALLOCATES_MEMORY |
2251                                             CRYPTO_ALG_KERN_DRIVER_ONLY,
2252                                .cra_blocksize = AES_BLOCK_SIZE,
2253                                .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2254                                .cra_init = safexcel_xcbcmac_cra_init,
2255                                .cra_exit = safexcel_xcbcmac_cra_exit,
2256                                .cra_module = THIS_MODULE,
2257                        },
2258                },
2259        },
2260};
2261
2262static int safexcel_sm3_init(struct ahash_request *areq)
2263{
2264        struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
2265        struct safexcel_ahash_req *req = ahash_request_ctx(areq);
2266
2267        memset(req, 0, sizeof(*req));
2268
2269        ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SM3;
2270        req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
2271        req->state_sz = SM3_DIGEST_SIZE;
2272        req->digest_sz = SM3_DIGEST_SIZE;
2273        req->block_sz = SM3_BLOCK_SIZE;
2274
2275        return 0;
2276}
2277
2278static int safexcel_sm3_digest(struct ahash_request *areq)
2279{
2280        int ret = safexcel_sm3_init(areq);
2281
2282        if (ret)
2283                return ret;
2284
2285        return safexcel_ahash_finup(areq);
2286}
2287
2288struct safexcel_alg_template safexcel_alg_sm3 = {
2289        .type = SAFEXCEL_ALG_TYPE_AHASH,
2290        .algo_mask = SAFEXCEL_ALG_SM3,
2291        .alg.ahash = {
2292                .init = safexcel_sm3_init,
2293                .update = safexcel_ahash_update,
2294                .final = safexcel_ahash_final,
2295                .finup = safexcel_ahash_finup,
2296                .digest = safexcel_sm3_digest,
2297                .export = safexcel_ahash_export,
2298                .import = safexcel_ahash_import,
2299                .halg = {
2300                        .digestsize = SM3_DIGEST_SIZE,
2301                        .statesize = sizeof(struct safexcel_ahash_export_state),
2302                        .base = {
2303                                .cra_name = "sm3",
2304                                .cra_driver_name = "safexcel-sm3",
2305                                .cra_priority = SAFEXCEL_CRA_PRIORITY,
2306                                .cra_flags = CRYPTO_ALG_ASYNC |
2307                                             CRYPTO_ALG_ALLOCATES_MEMORY |
2308                                             CRYPTO_ALG_KERN_DRIVER_ONLY,
2309                                .cra_blocksize = SM3_BLOCK_SIZE,
2310                                .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2311                                .cra_init = safexcel_ahash_cra_init,
2312                                .cra_exit = safexcel_ahash_cra_exit,
2313                                .cra_module = THIS_MODULE,
2314                        },
2315                },
2316        },
2317};
2318
2319static int safexcel_hmac_sm3_setkey(struct crypto_ahash *tfm, const u8 *key,
2320                                    unsigned int keylen)
2321{
2322        return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-sm3",
2323                                        SM3_DIGEST_SIZE);
2324}
2325
2326static int safexcel_hmac_sm3_init(struct ahash_request *areq)
2327{
2328        struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
2329        struct safexcel_ahash_req *req = ahash_request_ctx(areq);
2330
2331        memset(req, 0, sizeof(*req));
2332
2333        /* Start from ipad precompute */
2334        memcpy(req->state, ctx->ipad, SM3_DIGEST_SIZE);
2335        /* Already processed the key^ipad part now! */
2336        req->len        = SM3_BLOCK_SIZE;
2337        req->processed  = SM3_BLOCK_SIZE;
2338
2339        ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SM3;
2340        req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
2341        req->state_sz = SM3_DIGEST_SIZE;
2342        req->digest_sz = SM3_DIGEST_SIZE;
2343        req->block_sz = SM3_BLOCK_SIZE;
2344        req->hmac = true;
2345
2346        return 0;
2347}
2348
2349static int safexcel_hmac_sm3_digest(struct ahash_request *areq)
2350{
2351        int ret = safexcel_hmac_sm3_init(areq);
2352
2353        if (ret)
2354                return ret;
2355
2356        return safexcel_ahash_finup(areq);
2357}
2358
2359struct safexcel_alg_template safexcel_alg_hmac_sm3 = {
2360        .type = SAFEXCEL_ALG_TYPE_AHASH,
2361        .algo_mask = SAFEXCEL_ALG_SM3,
2362        .alg.ahash = {
2363                .init = safexcel_hmac_sm3_init,
2364                .update = safexcel_ahash_update,
2365                .final = safexcel_ahash_final,
2366                .finup = safexcel_ahash_finup,
2367                .digest = safexcel_hmac_sm3_digest,
2368                .setkey = safexcel_hmac_sm3_setkey,
2369                .export = safexcel_ahash_export,
2370                .import = safexcel_ahash_import,
2371                .halg = {
2372                        .digestsize = SM3_DIGEST_SIZE,
2373                        .statesize = sizeof(struct safexcel_ahash_export_state),
2374                        .base = {
2375                                .cra_name = "hmac(sm3)",
2376                                .cra_driver_name = "safexcel-hmac-sm3",
2377                                .cra_priority = SAFEXCEL_CRA_PRIORITY,
2378                                .cra_flags = CRYPTO_ALG_ASYNC |
2379                                             CRYPTO_ALG_ALLOCATES_MEMORY |
2380                                             CRYPTO_ALG_KERN_DRIVER_ONLY,
2381                                .cra_blocksize = SM3_BLOCK_SIZE,
2382                                .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2383                                .cra_init = safexcel_ahash_cra_init,
2384                                .cra_exit = safexcel_ahash_cra_exit,
2385                                .cra_module = THIS_MODULE,
2386                        },
2387                },
2388        },
2389};
2390
2391static int safexcel_sha3_224_init(struct ahash_request *areq)
2392{
2393        struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2394        struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2395        struct safexcel_ahash_req *req = ahash_request_ctx(areq);
2396
2397        memset(req, 0, sizeof(*req));
2398
2399        ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_224;
2400        req->digest = CONTEXT_CONTROL_DIGEST_INITIAL;
2401        req->state_sz = SHA3_224_DIGEST_SIZE;
2402        req->digest_sz = SHA3_224_DIGEST_SIZE;
2403        req->block_sz = SHA3_224_BLOCK_SIZE;
2404        ctx->do_fallback = false;
2405        ctx->fb_init_done = false;
2406        return 0;
2407}
2408
2409static int safexcel_sha3_fbcheck(struct ahash_request *req)
2410{
2411        struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
2412        struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2413        struct ahash_request *subreq = ahash_request_ctx(req);
2414        int ret = 0;
2415
2416        if (ctx->do_fallback) {
2417                ahash_request_set_tfm(subreq, ctx->fback);
2418                ahash_request_set_callback(subreq, req->base.flags,
2419                                           req->base.complete, req->base.data);
2420                ahash_request_set_crypt(subreq, req->src, req->result,
2421                                        req->nbytes);
2422                if (!ctx->fb_init_done) {
2423                        if (ctx->fb_do_setkey) {
2424                                /* Set fallback cipher HMAC key */
2425                                u8 key[SHA3_224_BLOCK_SIZE];
2426
2427                                memcpy(key, ctx->ipad,
2428                                       crypto_ahash_blocksize(ctx->fback) / 2);
2429                                memcpy(key +
2430                                       crypto_ahash_blocksize(ctx->fback) / 2,
2431                                       ctx->opad,
2432                                       crypto_ahash_blocksize(ctx->fback) / 2);
2433                                ret = crypto_ahash_setkey(ctx->fback, key,
2434                                        crypto_ahash_blocksize(ctx->fback));
2435                                memzero_explicit(key,
2436                                        crypto_ahash_blocksize(ctx->fback));
2437                                ctx->fb_do_setkey = false;
2438                        }
2439                        ret = ret ?: crypto_ahash_init(subreq);
2440                        ctx->fb_init_done = true;
2441                }
2442        }
2443        return ret;
2444}
2445
2446static int safexcel_sha3_update(struct ahash_request *req)
2447{
2448        struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
2449        struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2450        struct ahash_request *subreq = ahash_request_ctx(req);
2451
2452        ctx->do_fallback = true;
2453        return safexcel_sha3_fbcheck(req) ?: crypto_ahash_update(subreq);
2454}
2455
2456static int safexcel_sha3_final(struct ahash_request *req)
2457{
2458        struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
2459        struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2460        struct ahash_request *subreq = ahash_request_ctx(req);
2461
2462        ctx->do_fallback = true;
2463        return safexcel_sha3_fbcheck(req) ?: crypto_ahash_final(subreq);
2464}
2465
2466static int safexcel_sha3_finup(struct ahash_request *req)
2467{
2468        struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
2469        struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2470        struct ahash_request *subreq = ahash_request_ctx(req);
2471
2472        ctx->do_fallback |= !req->nbytes;
2473        if (ctx->do_fallback)
2474                /* Update or ex/import happened or len 0, cannot use the HW */
2475                return safexcel_sha3_fbcheck(req) ?:
2476                       crypto_ahash_finup(subreq);
2477        else
2478                return safexcel_ahash_finup(req);
2479}
2480
2481static int safexcel_sha3_digest_fallback(struct ahash_request *req)
2482{
2483        struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
2484        struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2485        struct ahash_request *subreq = ahash_request_ctx(req);
2486
2487        ctx->do_fallback = true;
2488        ctx->fb_init_done = false;
2489        return safexcel_sha3_fbcheck(req) ?: crypto_ahash_finup(subreq);
2490}
2491
2492static int safexcel_sha3_224_digest(struct ahash_request *req)
2493{
2494        if (req->nbytes)
2495                return safexcel_sha3_224_init(req) ?: safexcel_ahash_finup(req);
2496
2497        /* HW cannot do zero length hash, use fallback instead */
2498        return safexcel_sha3_digest_fallback(req);
2499}
2500
2501static int safexcel_sha3_export(struct ahash_request *req, void *out)
2502{
2503        struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
2504        struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2505        struct ahash_request *subreq = ahash_request_ctx(req);
2506
2507        ctx->do_fallback = true;
2508        return safexcel_sha3_fbcheck(req) ?: crypto_ahash_export(subreq, out);
2509}
2510
2511static int safexcel_sha3_import(struct ahash_request *req, const void *in)
2512{
2513        struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
2514        struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2515        struct ahash_request *subreq = ahash_request_ctx(req);
2516
2517        ctx->do_fallback = true;
2518        return safexcel_sha3_fbcheck(req) ?: crypto_ahash_import(subreq, in);
2519        // return safexcel_ahash_import(req, in);
2520}
2521
2522static int safexcel_sha3_cra_init(struct crypto_tfm *tfm)
2523{
2524        struct crypto_ahash *ahash = __crypto_ahash_cast(tfm);
2525        struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
2526
2527        safexcel_ahash_cra_init(tfm);
2528
2529        /* Allocate fallback implementation */
2530        ctx->fback = crypto_alloc_ahash(crypto_tfm_alg_name(tfm), 0,
2531                                        CRYPTO_ALG_ASYNC |
2532                                        CRYPTO_ALG_NEED_FALLBACK);
2533        if (IS_ERR(ctx->fback))
2534                return PTR_ERR(ctx->fback);
2535
2536        /* Update statesize from fallback algorithm! */
2537        crypto_hash_alg_common(ahash)->statesize =
2538                crypto_ahash_statesize(ctx->fback);
2539        crypto_ahash_set_reqsize(ahash, max(sizeof(struct safexcel_ahash_req),
2540                                            sizeof(struct ahash_request) +
2541                                            crypto_ahash_reqsize(ctx->fback)));
2542        return 0;
2543}
2544
2545static void safexcel_sha3_cra_exit(struct crypto_tfm *tfm)
2546{
2547        struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
2548
2549        crypto_free_ahash(ctx->fback);
2550        safexcel_ahash_cra_exit(tfm);
2551}
2552
2553struct safexcel_alg_template safexcel_alg_sha3_224 = {
2554        .type = SAFEXCEL_ALG_TYPE_AHASH,
2555        .algo_mask = SAFEXCEL_ALG_SHA3,
2556        .alg.ahash = {
2557                .init = safexcel_sha3_224_init,
2558                .update = safexcel_sha3_update,
2559                .final = safexcel_sha3_final,
2560                .finup = safexcel_sha3_finup,
2561                .digest = safexcel_sha3_224_digest,
2562                .export = safexcel_sha3_export,
2563                .import = safexcel_sha3_import,
2564                .halg = {
2565                        .digestsize = SHA3_224_DIGEST_SIZE,
2566                        .statesize = sizeof(struct safexcel_ahash_export_state),
2567                        .base = {
2568                                .cra_name = "sha3-224",
2569                                .cra_driver_name = "safexcel-sha3-224",
2570                                .cra_priority = SAFEXCEL_CRA_PRIORITY,
2571                                .cra_flags = CRYPTO_ALG_ASYNC |
2572                                             CRYPTO_ALG_KERN_DRIVER_ONLY |
2573                                             CRYPTO_ALG_NEED_FALLBACK,
2574                                .cra_blocksize = SHA3_224_BLOCK_SIZE,
2575                                .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2576                                .cra_init = safexcel_sha3_cra_init,
2577                                .cra_exit = safexcel_sha3_cra_exit,
2578                                .cra_module = THIS_MODULE,
2579                        },
2580                },
2581        },
2582};
2583
2584static int safexcel_sha3_256_init(struct ahash_request *areq)
2585{
2586        struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2587        struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2588        struct safexcel_ahash_req *req = ahash_request_ctx(areq);
2589
2590        memset(req, 0, sizeof(*req));
2591
2592        ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_256;
2593        req->digest = CONTEXT_CONTROL_DIGEST_INITIAL;
2594        req->state_sz = SHA3_256_DIGEST_SIZE;
2595        req->digest_sz = SHA3_256_DIGEST_SIZE;
2596        req->block_sz = SHA3_256_BLOCK_SIZE;
2597        ctx->do_fallback = false;
2598        ctx->fb_init_done = false;
2599        return 0;
2600}
2601
2602static int safexcel_sha3_256_digest(struct ahash_request *req)
2603{
2604        if (req->nbytes)
2605                return safexcel_sha3_256_init(req) ?: safexcel_ahash_finup(req);
2606
2607        /* HW cannot do zero length hash, use fallback instead */
2608        return safexcel_sha3_digest_fallback(req);
2609}
2610
2611struct safexcel_alg_template safexcel_alg_sha3_256 = {
2612        .type = SAFEXCEL_ALG_TYPE_AHASH,
2613        .algo_mask = SAFEXCEL_ALG_SHA3,
2614        .alg.ahash = {
2615                .init = safexcel_sha3_256_init,
2616                .update = safexcel_sha3_update,
2617                .final = safexcel_sha3_final,
2618                .finup = safexcel_sha3_finup,
2619                .digest = safexcel_sha3_256_digest,
2620                .export = safexcel_sha3_export,
2621                .import = safexcel_sha3_import,
2622                .halg = {
2623                        .digestsize = SHA3_256_DIGEST_SIZE,
2624                        .statesize = sizeof(struct safexcel_ahash_export_state),
2625                        .base = {
2626                                .cra_name = "sha3-256",
2627                                .cra_driver_name = "safexcel-sha3-256",
2628                                .cra_priority = SAFEXCEL_CRA_PRIORITY,
2629                                .cra_flags = CRYPTO_ALG_ASYNC |
2630                                             CRYPTO_ALG_KERN_DRIVER_ONLY |
2631                                             CRYPTO_ALG_NEED_FALLBACK,
2632                                .cra_blocksize = SHA3_256_BLOCK_SIZE,
2633                                .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2634                                .cra_init = safexcel_sha3_cra_init,
2635                                .cra_exit = safexcel_sha3_cra_exit,
2636                                .cra_module = THIS_MODULE,
2637                        },
2638                },
2639        },
2640};
2641
2642static int safexcel_sha3_384_init(struct ahash_request *areq)
2643{
2644        struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2645        struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2646        struct safexcel_ahash_req *req = ahash_request_ctx(areq);
2647
2648        memset(req, 0, sizeof(*req));
2649
2650        ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_384;
2651        req->digest = CONTEXT_CONTROL_DIGEST_INITIAL;
2652        req->state_sz = SHA3_384_DIGEST_SIZE;
2653        req->digest_sz = SHA3_384_DIGEST_SIZE;
2654        req->block_sz = SHA3_384_BLOCK_SIZE;
2655        ctx->do_fallback = false;
2656        ctx->fb_init_done = false;
2657        return 0;
2658}
2659
2660static int safexcel_sha3_384_digest(struct ahash_request *req)
2661{
2662        if (req->nbytes)
2663                return safexcel_sha3_384_init(req) ?: safexcel_ahash_finup(req);
2664
2665        /* HW cannot do zero length hash, use fallback instead */
2666        return safexcel_sha3_digest_fallback(req);
2667}
2668
2669struct safexcel_alg_template safexcel_alg_sha3_384 = {
2670        .type = SAFEXCEL_ALG_TYPE_AHASH,
2671        .algo_mask = SAFEXCEL_ALG_SHA3,
2672        .alg.ahash = {
2673                .init = safexcel_sha3_384_init,
2674                .update = safexcel_sha3_update,
2675                .final = safexcel_sha3_final,
2676                .finup = safexcel_sha3_finup,
2677                .digest = safexcel_sha3_384_digest,
2678                .export = safexcel_sha3_export,
2679                .import = safexcel_sha3_import,
2680                .halg = {
2681                        .digestsize = SHA3_384_DIGEST_SIZE,
2682                        .statesize = sizeof(struct safexcel_ahash_export_state),
2683                        .base = {
2684                                .cra_name = "sha3-384",
2685                                .cra_driver_name = "safexcel-sha3-384",
2686                                .cra_priority = SAFEXCEL_CRA_PRIORITY,
2687                                .cra_flags = CRYPTO_ALG_ASYNC |
2688                                             CRYPTO_ALG_KERN_DRIVER_ONLY |
2689                                             CRYPTO_ALG_NEED_FALLBACK,
2690                                .cra_blocksize = SHA3_384_BLOCK_SIZE,
2691                                .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2692                                .cra_init = safexcel_sha3_cra_init,
2693                                .cra_exit = safexcel_sha3_cra_exit,
2694                                .cra_module = THIS_MODULE,
2695                        },
2696                },
2697        },
2698};
2699
2700static int safexcel_sha3_512_init(struct ahash_request *areq)
2701{
2702        struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2703        struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2704        struct safexcel_ahash_req *req = ahash_request_ctx(areq);
2705
2706        memset(req, 0, sizeof(*req));
2707
2708        ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_512;
2709        req->digest = CONTEXT_CONTROL_DIGEST_INITIAL;
2710        req->state_sz = SHA3_512_DIGEST_SIZE;
2711        req->digest_sz = SHA3_512_DIGEST_SIZE;
2712        req->block_sz = SHA3_512_BLOCK_SIZE;
2713        ctx->do_fallback = false;
2714        ctx->fb_init_done = false;
2715        return 0;
2716}
2717
2718static int safexcel_sha3_512_digest(struct ahash_request *req)
2719{
2720        if (req->nbytes)
2721                return safexcel_sha3_512_init(req) ?: safexcel_ahash_finup(req);
2722
2723        /* HW cannot do zero length hash, use fallback instead */
2724        return safexcel_sha3_digest_fallback(req);
2725}
2726
2727struct safexcel_alg_template safexcel_alg_sha3_512 = {
2728        .type = SAFEXCEL_ALG_TYPE_AHASH,
2729        .algo_mask = SAFEXCEL_ALG_SHA3,
2730        .alg.ahash = {
2731                .init = safexcel_sha3_512_init,
2732                .update = safexcel_sha3_update,
2733                .final = safexcel_sha3_final,
2734                .finup = safexcel_sha3_finup,
2735                .digest = safexcel_sha3_512_digest,
2736                .export = safexcel_sha3_export,
2737                .import = safexcel_sha3_import,
2738                .halg = {
2739                        .digestsize = SHA3_512_DIGEST_SIZE,
2740                        .statesize = sizeof(struct safexcel_ahash_export_state),
2741                        .base = {
2742                                .cra_name = "sha3-512",
2743                                .cra_driver_name = "safexcel-sha3-512",
2744                                .cra_priority = SAFEXCEL_CRA_PRIORITY,
2745                                .cra_flags = CRYPTO_ALG_ASYNC |
2746                                             CRYPTO_ALG_KERN_DRIVER_ONLY |
2747                                             CRYPTO_ALG_NEED_FALLBACK,
2748                                .cra_blocksize = SHA3_512_BLOCK_SIZE,
2749                                .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2750                                .cra_init = safexcel_sha3_cra_init,
2751                                .cra_exit = safexcel_sha3_cra_exit,
2752                                .cra_module = THIS_MODULE,
2753                        },
2754                },
2755        },
2756};
2757
2758static int safexcel_hmac_sha3_cra_init(struct crypto_tfm *tfm, const char *alg)
2759{
2760        struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
2761        int ret;
2762
2763        ret = safexcel_sha3_cra_init(tfm);
2764        if (ret)
2765                return ret;
2766
2767        /* Allocate precalc basic digest implementation */
2768        ctx->shpre = crypto_alloc_shash(alg, 0, CRYPTO_ALG_NEED_FALLBACK);
2769        if (IS_ERR(ctx->shpre))
2770                return PTR_ERR(ctx->shpre);
2771
2772        ctx->shdesc = kmalloc(sizeof(*ctx->shdesc) +
2773                              crypto_shash_descsize(ctx->shpre), GFP_KERNEL);
2774        if (!ctx->shdesc) {
2775                crypto_free_shash(ctx->shpre);
2776                return -ENOMEM;
2777        }
2778        ctx->shdesc->tfm = ctx->shpre;
2779        return 0;
2780}
2781
2782static void safexcel_hmac_sha3_cra_exit(struct crypto_tfm *tfm)
2783{
2784        struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
2785
2786        crypto_free_ahash(ctx->fback);
2787        crypto_free_shash(ctx->shpre);
2788        kfree(ctx->shdesc);
2789        safexcel_ahash_cra_exit(tfm);
2790}
2791
2792static int safexcel_hmac_sha3_setkey(struct crypto_ahash *tfm, const u8 *key,
2793                                     unsigned int keylen)
2794{
2795        struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2796        int ret = 0;
2797
2798        if (keylen > crypto_ahash_blocksize(tfm)) {
2799                /*
2800                 * If the key is larger than the blocksize, then hash it
2801                 * first using our fallback cipher
2802                 */
2803                ret = crypto_shash_digest(ctx->shdesc, key, keylen,
2804                                          (u8 *)ctx->ipad);
2805                keylen = crypto_shash_digestsize(ctx->shpre);
2806
2807                /*
2808                 * If the digest is larger than half the blocksize, we need to
2809                 * move the rest to opad due to the way our HMAC infra works.
2810                 */
2811                if (keylen > crypto_ahash_blocksize(tfm) / 2)
2812                        /* Buffers overlap, need to use memmove iso memcpy! */
2813                        memmove(ctx->opad,
2814                                (u8 *)ctx->ipad +
2815                                        crypto_ahash_blocksize(tfm) / 2,
2816                                keylen - crypto_ahash_blocksize(tfm) / 2);
2817        } else {
2818                /*
2819                 * Copy the key to our ipad & opad buffers
2820                 * Note that ipad and opad each contain one half of the key,
2821                 * to match the existing HMAC driver infrastructure.
2822                 */
2823                if (keylen <= crypto_ahash_blocksize(tfm) / 2) {
2824                        memcpy(ctx->ipad, key, keylen);
2825                } else {
2826                        memcpy(ctx->ipad, key,
2827                               crypto_ahash_blocksize(tfm) / 2);
2828                        memcpy(ctx->opad,
2829                               key + crypto_ahash_blocksize(tfm) / 2,
2830                               keylen - crypto_ahash_blocksize(tfm) / 2);
2831                }
2832        }
2833
2834        /* Pad key with zeroes */
2835        if (keylen <= crypto_ahash_blocksize(tfm) / 2) {
2836                memset((u8 *)ctx->ipad + keylen, 0,
2837                       crypto_ahash_blocksize(tfm) / 2 - keylen);
2838                memset(ctx->opad, 0, crypto_ahash_blocksize(tfm) / 2);
2839        } else {
2840                memset((u8 *)ctx->opad + keylen -
2841                       crypto_ahash_blocksize(tfm) / 2, 0,
2842                       crypto_ahash_blocksize(tfm) - keylen);
2843        }
2844
2845        /* If doing fallback, still need to set the new key! */
2846        ctx->fb_do_setkey = true;
2847        return ret;
2848}
2849
2850static int safexcel_hmac_sha3_224_init(struct ahash_request *areq)
2851{
2852        struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2853        struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2854        struct safexcel_ahash_req *req = ahash_request_ctx(areq);
2855
2856        memset(req, 0, sizeof(*req));
2857
2858        /* Copy (half of) the key */
2859        memcpy(req->state, ctx->ipad, SHA3_224_BLOCK_SIZE / 2);
2860        /* Start of HMAC should have len == processed == blocksize */
2861        req->len        = SHA3_224_BLOCK_SIZE;
2862        req->processed  = SHA3_224_BLOCK_SIZE;
2863        ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_224;
2864        req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
2865        req->state_sz = SHA3_224_BLOCK_SIZE / 2;
2866        req->digest_sz = SHA3_224_DIGEST_SIZE;
2867        req->block_sz = SHA3_224_BLOCK_SIZE;
2868        req->hmac = true;
2869        ctx->do_fallback = false;
2870        ctx->fb_init_done = false;
2871        return 0;
2872}
2873
2874static int safexcel_hmac_sha3_224_digest(struct ahash_request *req)
2875{
2876        if (req->nbytes)
2877                return safexcel_hmac_sha3_224_init(req) ?:
2878                       safexcel_ahash_finup(req);
2879
2880        /* HW cannot do zero length HMAC, use fallback instead */
2881        return safexcel_sha3_digest_fallback(req);
2882}
2883
2884static int safexcel_hmac_sha3_224_cra_init(struct crypto_tfm *tfm)
2885{
2886        return safexcel_hmac_sha3_cra_init(tfm, "sha3-224");
2887}
2888
2889struct safexcel_alg_template safexcel_alg_hmac_sha3_224 = {
2890        .type = SAFEXCEL_ALG_TYPE_AHASH,
2891        .algo_mask = SAFEXCEL_ALG_SHA3,
2892        .alg.ahash = {
2893                .init = safexcel_hmac_sha3_224_init,
2894                .update = safexcel_sha3_update,
2895                .final = safexcel_sha3_final,
2896                .finup = safexcel_sha3_finup,
2897                .digest = safexcel_hmac_sha3_224_digest,
2898                .setkey = safexcel_hmac_sha3_setkey,
2899                .export = safexcel_sha3_export,
2900                .import = safexcel_sha3_import,
2901                .halg = {
2902                        .digestsize = SHA3_224_DIGEST_SIZE,
2903                        .statesize = sizeof(struct safexcel_ahash_export_state),
2904                        .base = {
2905                                .cra_name = "hmac(sha3-224)",
2906                                .cra_driver_name = "safexcel-hmac-sha3-224",
2907                                .cra_priority = SAFEXCEL_CRA_PRIORITY,
2908                                .cra_flags = CRYPTO_ALG_ASYNC |
2909                                             CRYPTO_ALG_KERN_DRIVER_ONLY |
2910                                             CRYPTO_ALG_NEED_FALLBACK,
2911                                .cra_blocksize = SHA3_224_BLOCK_SIZE,
2912                                .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2913                                .cra_init = safexcel_hmac_sha3_224_cra_init,
2914                                .cra_exit = safexcel_hmac_sha3_cra_exit,
2915                                .cra_module = THIS_MODULE,
2916                        },
2917                },
2918        },
2919};
2920
2921static int safexcel_hmac_sha3_256_init(struct ahash_request *areq)
2922{
2923        struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2924        struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2925        struct safexcel_ahash_req *req = ahash_request_ctx(areq);
2926
2927        memset(req, 0, sizeof(*req));
2928
2929        /* Copy (half of) the key */
2930        memcpy(req->state, ctx->ipad, SHA3_256_BLOCK_SIZE / 2);
2931        /* Start of HMAC should have len == processed == blocksize */
2932        req->len        = SHA3_256_BLOCK_SIZE;
2933        req->processed  = SHA3_256_BLOCK_SIZE;
2934        ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_256;
2935        req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
2936        req->state_sz = SHA3_256_BLOCK_SIZE / 2;
2937        req->digest_sz = SHA3_256_DIGEST_SIZE;
2938        req->block_sz = SHA3_256_BLOCK_SIZE;
2939        req->hmac = true;
2940        ctx->do_fallback = false;
2941        ctx->fb_init_done = false;
2942        return 0;
2943}
2944
2945static int safexcel_hmac_sha3_256_digest(struct ahash_request *req)
2946{
2947        if (req->nbytes)
2948                return safexcel_hmac_sha3_256_init(req) ?:
2949                       safexcel_ahash_finup(req);
2950
2951        /* HW cannot do zero length HMAC, use fallback instead */
2952        return safexcel_sha3_digest_fallback(req);
2953}
2954
2955static int safexcel_hmac_sha3_256_cra_init(struct crypto_tfm *tfm)
2956{
2957        return safexcel_hmac_sha3_cra_init(tfm, "sha3-256");
2958}
2959
2960struct safexcel_alg_template safexcel_alg_hmac_sha3_256 = {
2961        .type = SAFEXCEL_ALG_TYPE_AHASH,
2962        .algo_mask = SAFEXCEL_ALG_SHA3,
2963        .alg.ahash = {
2964                .init = safexcel_hmac_sha3_256_init,
2965                .update = safexcel_sha3_update,
2966                .final = safexcel_sha3_final,
2967                .finup = safexcel_sha3_finup,
2968                .digest = safexcel_hmac_sha3_256_digest,
2969                .setkey = safexcel_hmac_sha3_setkey,
2970                .export = safexcel_sha3_export,
2971                .import = safexcel_sha3_import,
2972                .halg = {
2973                        .digestsize = SHA3_256_DIGEST_SIZE,
2974                        .statesize = sizeof(struct safexcel_ahash_export_state),
2975                        .base = {
2976                                .cra_name = "hmac(sha3-256)",
2977                                .cra_driver_name = "safexcel-hmac-sha3-256",
2978                                .cra_priority = SAFEXCEL_CRA_PRIORITY,
2979                                .cra_flags = CRYPTO_ALG_ASYNC |
2980                                             CRYPTO_ALG_KERN_DRIVER_ONLY |
2981                                             CRYPTO_ALG_NEED_FALLBACK,
2982                                .cra_blocksize = SHA3_256_BLOCK_SIZE,
2983                                .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2984                                .cra_init = safexcel_hmac_sha3_256_cra_init,
2985                                .cra_exit = safexcel_hmac_sha3_cra_exit,
2986                                .cra_module = THIS_MODULE,
2987                        },
2988                },
2989        },
2990};
2991
2992static int safexcel_hmac_sha3_384_init(struct ahash_request *areq)
2993{
2994        struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2995        struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2996        struct safexcel_ahash_req *req = ahash_request_ctx(areq);
2997
2998        memset(req, 0, sizeof(*req));
2999
3000        /* Copy (half of) the key */
3001        memcpy(req->state, ctx->ipad, SHA3_384_BLOCK_SIZE / 2);
3002        /* Start of HMAC should have len == processed == blocksize */
3003        req->len        = SHA3_384_BLOCK_SIZE;
3004        req->processed  = SHA3_384_BLOCK_SIZE;
3005        ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_384;
3006        req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
3007        req->state_sz = SHA3_384_BLOCK_SIZE / 2;
3008        req->digest_sz = SHA3_384_DIGEST_SIZE;
3009        req->block_sz = SHA3_384_BLOCK_SIZE;
3010        req->hmac = true;
3011        ctx->do_fallback = false;
3012        ctx->fb_init_done = false;
3013        return 0;
3014}
3015
3016static int safexcel_hmac_sha3_384_digest(struct ahash_request *req)
3017{
3018        if (req->nbytes)
3019                return safexcel_hmac_sha3_384_init(req) ?:
3020                       safexcel_ahash_finup(req);
3021
3022        /* HW cannot do zero length HMAC, use fallback instead */
3023        return safexcel_sha3_digest_fallback(req);
3024}
3025
3026static int safexcel_hmac_sha3_384_cra_init(struct crypto_tfm *tfm)
3027{
3028        return safexcel_hmac_sha3_cra_init(tfm, "sha3-384");
3029}
3030
3031struct safexcel_alg_template safexcel_alg_hmac_sha3_384 = {
3032        .type = SAFEXCEL_ALG_TYPE_AHASH,
3033        .algo_mask = SAFEXCEL_ALG_SHA3,
3034        .alg.ahash = {
3035                .init = safexcel_hmac_sha3_384_init,
3036                .update = safexcel_sha3_update,
3037                .final = safexcel_sha3_final,
3038                .finup = safexcel_sha3_finup,
3039                .digest = safexcel_hmac_sha3_384_digest,
3040                .setkey = safexcel_hmac_sha3_setkey,
3041                .export = safexcel_sha3_export,
3042                .import = safexcel_sha3_import,
3043                .halg = {
3044                        .digestsize = SHA3_384_DIGEST_SIZE,
3045                        .statesize = sizeof(struct safexcel_ahash_export_state),
3046                        .base = {
3047                                .cra_name = "hmac(sha3-384)",
3048                                .cra_driver_name = "safexcel-hmac-sha3-384",
3049                                .cra_priority = SAFEXCEL_CRA_PRIORITY,
3050                                .cra_flags = CRYPTO_ALG_ASYNC |
3051                                             CRYPTO_ALG_KERN_DRIVER_ONLY |
3052                                             CRYPTO_ALG_NEED_FALLBACK,
3053                                .cra_blocksize = SHA3_384_BLOCK_SIZE,
3054                                .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
3055                                .cra_init = safexcel_hmac_sha3_384_cra_init,
3056                                .cra_exit = safexcel_hmac_sha3_cra_exit,
3057                                .cra_module = THIS_MODULE,
3058                        },
3059                },
3060        },
3061};
3062
3063static int safexcel_hmac_sha3_512_init(struct ahash_request *areq)
3064{
3065        struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
3066        struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
3067        struct safexcel_ahash_req *req = ahash_request_ctx(areq);
3068
3069        memset(req, 0, sizeof(*req));
3070
3071        /* Copy (half of) the key */
3072        memcpy(req->state, ctx->ipad, SHA3_512_BLOCK_SIZE / 2);
3073        /* Start of HMAC should have len == processed == blocksize */
3074        req->len        = SHA3_512_BLOCK_SIZE;
3075        req->processed  = SHA3_512_BLOCK_SIZE;
3076        ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_512;
3077        req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
3078        req->state_sz = SHA3_512_BLOCK_SIZE / 2;
3079        req->digest_sz = SHA3_512_DIGEST_SIZE;
3080        req->block_sz = SHA3_512_BLOCK_SIZE;
3081        req->hmac = true;
3082        ctx->do_fallback = false;
3083        ctx->fb_init_done = false;
3084        return 0;
3085}
3086
3087static int safexcel_hmac_sha3_512_digest(struct ahash_request *req)
3088{
3089        if (req->nbytes)
3090                return safexcel_hmac_sha3_512_init(req) ?:
3091                       safexcel_ahash_finup(req);
3092
3093        /* HW cannot do zero length HMAC, use fallback instead */
3094        return safexcel_sha3_digest_fallback(req);
3095}
3096
3097static int safexcel_hmac_sha3_512_cra_init(struct crypto_tfm *tfm)
3098{
3099        return safexcel_hmac_sha3_cra_init(tfm, "sha3-512");
3100}
3101struct safexcel_alg_template safexcel_alg_hmac_sha3_512 = {
3102        .type = SAFEXCEL_ALG_TYPE_AHASH,
3103        .algo_mask = SAFEXCEL_ALG_SHA3,
3104        .alg.ahash = {
3105                .init = safexcel_hmac_sha3_512_init,
3106                .update = safexcel_sha3_update,
3107                .final = safexcel_sha3_final,
3108                .finup = safexcel_sha3_finup,
3109                .digest = safexcel_hmac_sha3_512_digest,
3110                .setkey = safexcel_hmac_sha3_setkey,
3111                .export = safexcel_sha3_export,
3112                .import = safexcel_sha3_import,
3113                .halg = {
3114                        .digestsize = SHA3_512_DIGEST_SIZE,
3115                        .statesize = sizeof(struct safexcel_ahash_export_state),
3116                        .base = {
3117                                .cra_name = "hmac(sha3-512)",
3118                                .cra_driver_name = "safexcel-hmac-sha3-512",
3119                                .cra_priority = SAFEXCEL_CRA_PRIORITY,
3120                                .cra_flags = CRYPTO_ALG_ASYNC |
3121                                             CRYPTO_ALG_KERN_DRIVER_ONLY |
3122                                             CRYPTO_ALG_NEED_FALLBACK,
3123                                .cra_blocksize = SHA3_512_BLOCK_SIZE,
3124                                .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
3125                                .cra_init = safexcel_hmac_sha3_512_cra_init,
3126                                .cra_exit = safexcel_hmac_sha3_cra_exit,
3127                                .cra_module = THIS_MODULE,
3128                        },
3129                },
3130        },
3131};
3132