linux/include/crypto/sha256_base.h
<<
>>
Prefs
   1/* SPDX-License-Identifier: GPL-2.0-only */
   2/*
   3 * sha256_base.h - core logic for SHA-256 implementations
   4 *
   5 * Copyright (C) 2015 Linaro Ltd <ard.biesheuvel@linaro.org>
   6 */
   7
   8#ifndef _CRYPTO_SHA256_BASE_H
   9#define _CRYPTO_SHA256_BASE_H
  10
  11#include <crypto/internal/hash.h>
  12#include <crypto/sha2.h>
  13#include <linux/crypto.h>
  14#include <linux/module.h>
  15#include <linux/string.h>
  16
  17#include <asm/unaligned.h>
  18
  19typedef void (sha256_block_fn)(struct sha256_state *sst, u8 const *src,
  20                               int blocks);
  21
  22static inline int sha224_base_init(struct shash_desc *desc)
  23{
  24        struct sha256_state *sctx = shash_desc_ctx(desc);
  25
  26        sha224_init(sctx);
  27        return 0;
  28}
  29
  30static inline int sha256_base_init(struct shash_desc *desc)
  31{
  32        struct sha256_state *sctx = shash_desc_ctx(desc);
  33
  34        sha256_init(sctx);
  35        return 0;
  36}
  37
  38static inline int sha256_base_do_update(struct shash_desc *desc,
  39                                        const u8 *data,
  40                                        unsigned int len,
  41                                        sha256_block_fn *block_fn)
  42{
  43        struct sha256_state *sctx = shash_desc_ctx(desc);
  44        unsigned int partial = sctx->count % SHA256_BLOCK_SIZE;
  45
  46        sctx->count += len;
  47
  48        if (unlikely((partial + len) >= SHA256_BLOCK_SIZE)) {
  49                int blocks;
  50
  51                if (partial) {
  52                        int p = SHA256_BLOCK_SIZE - partial;
  53
  54                        memcpy(sctx->buf + partial, data, p);
  55                        data += p;
  56                        len -= p;
  57
  58                        block_fn(sctx, sctx->buf, 1);
  59                }
  60
  61                blocks = len / SHA256_BLOCK_SIZE;
  62                len %= SHA256_BLOCK_SIZE;
  63
  64                if (blocks) {
  65                        block_fn(sctx, data, blocks);
  66                        data += blocks * SHA256_BLOCK_SIZE;
  67                }
  68                partial = 0;
  69        }
  70        if (len)
  71                memcpy(sctx->buf + partial, data, len);
  72
  73        return 0;
  74}
  75
  76static inline int sha256_base_do_finalize(struct shash_desc *desc,
  77                                          sha256_block_fn *block_fn)
  78{
  79        const int bit_offset = SHA256_BLOCK_SIZE - sizeof(__be64);
  80        struct sha256_state *sctx = shash_desc_ctx(desc);
  81        __be64 *bits = (__be64 *)(sctx->buf + bit_offset);
  82        unsigned int partial = sctx->count % SHA256_BLOCK_SIZE;
  83
  84        sctx->buf[partial++] = 0x80;
  85        if (partial > bit_offset) {
  86                memset(sctx->buf + partial, 0x0, SHA256_BLOCK_SIZE - partial);
  87                partial = 0;
  88
  89                block_fn(sctx, sctx->buf, 1);
  90        }
  91
  92        memset(sctx->buf + partial, 0x0, bit_offset - partial);
  93        *bits = cpu_to_be64(sctx->count << 3);
  94        block_fn(sctx, sctx->buf, 1);
  95
  96        return 0;
  97}
  98
  99static inline int sha256_base_finish(struct shash_desc *desc, u8 *out)
 100{
 101        unsigned int digest_size = crypto_shash_digestsize(desc->tfm);
 102        struct sha256_state *sctx = shash_desc_ctx(desc);
 103        __be32 *digest = (__be32 *)out;
 104        int i;
 105
 106        for (i = 0; digest_size > 0; i++, digest_size -= sizeof(__be32))
 107                put_unaligned_be32(sctx->state[i], digest++);
 108
 109        memzero_explicit(sctx, sizeof(*sctx));
 110        return 0;
 111}
 112
 113#endif /* _CRYPTO_SHA256_BASE_H */
 114