linux/include/crypto/sha256_base.h
<<
>>
Prefs
   1/* SPDX-License-Identifier: GPL-2.0-only */
   2/*
   3 * sha256_base.h - core logic for SHA-256 implementations
   4 *
   5 * Copyright (C) 2015 Linaro Ltd <ard.biesheuvel@linaro.org>
   6 */
   7
   8#ifndef _CRYPTO_SHA256_BASE_H
   9#define _CRYPTO_SHA256_BASE_H
  10
  11#include <crypto/internal/hash.h>
  12#include <crypto/sha.h>
  13#include <linux/crypto.h>
  14#include <linux/module.h>
  15
  16#include <asm/unaligned.h>
  17
  18typedef void (sha256_block_fn)(struct sha256_state *sst, u8 const *src,
  19                               int blocks);
  20
  21static inline int sha224_base_init(struct shash_desc *desc)
  22{
  23        struct sha256_state *sctx = shash_desc_ctx(desc);
  24
  25        return sha224_init(sctx);
  26}
  27
  28static inline int sha256_base_init(struct shash_desc *desc)
  29{
  30        struct sha256_state *sctx = shash_desc_ctx(desc);
  31
  32        return sha256_init(sctx);
  33}
  34
  35static inline int sha256_base_do_update(struct shash_desc *desc,
  36                                        const u8 *data,
  37                                        unsigned int len,
  38                                        sha256_block_fn *block_fn)
  39{
  40        struct sha256_state *sctx = shash_desc_ctx(desc);
  41        unsigned int partial = sctx->count % SHA256_BLOCK_SIZE;
  42
  43        sctx->count += len;
  44
  45        if (unlikely((partial + len) >= SHA256_BLOCK_SIZE)) {
  46                int blocks;
  47
  48                if (partial) {
  49                        int p = SHA256_BLOCK_SIZE - partial;
  50
  51                        memcpy(sctx->buf + partial, data, p);
  52                        data += p;
  53                        len -= p;
  54
  55                        block_fn(sctx, sctx->buf, 1);
  56                }
  57
  58                blocks = len / SHA256_BLOCK_SIZE;
  59                len %= SHA256_BLOCK_SIZE;
  60
  61                if (blocks) {
  62                        block_fn(sctx, data, blocks);
  63                        data += blocks * SHA256_BLOCK_SIZE;
  64                }
  65                partial = 0;
  66        }
  67        if (len)
  68                memcpy(sctx->buf + partial, data, len);
  69
  70        return 0;
  71}
  72
  73static inline int sha256_base_do_finalize(struct shash_desc *desc,
  74                                          sha256_block_fn *block_fn)
  75{
  76        const int bit_offset = SHA256_BLOCK_SIZE - sizeof(__be64);
  77        struct sha256_state *sctx = shash_desc_ctx(desc);
  78        __be64 *bits = (__be64 *)(sctx->buf + bit_offset);
  79        unsigned int partial = sctx->count % SHA256_BLOCK_SIZE;
  80
  81        sctx->buf[partial++] = 0x80;
  82        if (partial > bit_offset) {
  83                memset(sctx->buf + partial, 0x0, SHA256_BLOCK_SIZE - partial);
  84                partial = 0;
  85
  86                block_fn(sctx, sctx->buf, 1);
  87        }
  88
  89        memset(sctx->buf + partial, 0x0, bit_offset - partial);
  90        *bits = cpu_to_be64(sctx->count << 3);
  91        block_fn(sctx, sctx->buf, 1);
  92
  93        return 0;
  94}
  95
  96static inline int sha256_base_finish(struct shash_desc *desc, u8 *out)
  97{
  98        unsigned int digest_size = crypto_shash_digestsize(desc->tfm);
  99        struct sha256_state *sctx = shash_desc_ctx(desc);
 100        __be32 *digest = (__be32 *)out;
 101        int i;
 102
 103        for (i = 0; digest_size > 0; i++, digest_size -= sizeof(__be32))
 104                put_unaligned_be32(sctx->state[i], digest++);
 105
 106        *sctx = (struct sha256_state){};
 107        return 0;
 108}
 109
 110#endif /* _CRYPTO_SHA256_BASE_H */
 111