linux/arch/arm64/crypto/sha2-ce-glue.c
<<
>>
Prefs
   1/*
   2 * sha2-ce-glue.c - SHA-224/SHA-256 using ARMv8 Crypto Extensions
   3 *
   4 * Copyright (C) 2014 - 2017 Linaro Ltd <ard.biesheuvel@linaro.org>
   5 *
   6 * This program is free software; you can redistribute it and/or modify
   7 * it under the terms of the GNU General Public License version 2 as
   8 * published by the Free Software Foundation.
   9 */
  10
  11#include <asm/neon.h>
  12#include <asm/simd.h>
  13#include <asm/unaligned.h>
  14#include <crypto/internal/hash.h>
  15#include <crypto/sha.h>
  16#include <crypto/sha256_base.h>
  17#include <linux/cpufeature.h>
  18#include <linux/crypto.h>
  19#include <linux/module.h>
  20
  21MODULE_DESCRIPTION("SHA-224/SHA-256 secure hash using ARMv8 Crypto Extensions");
  22MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>");
  23MODULE_LICENSE("GPL v2");
  24
  25struct sha256_ce_state {
  26        struct sha256_state     sst;
  27        u32                     finalize;
  28};
  29
  30asmlinkage void sha2_ce_transform(struct sha256_ce_state *sst, u8 const *src,
  31                                  int blocks);
  32
  33const u32 sha256_ce_offsetof_count = offsetof(struct sha256_ce_state,
  34                                              sst.count);
  35const u32 sha256_ce_offsetof_finalize = offsetof(struct sha256_ce_state,
  36                                                 finalize);
  37
  38asmlinkage void sha256_block_data_order(u32 *digest, u8 const *src, int blocks);
  39
  40static int sha256_ce_update(struct shash_desc *desc, const u8 *data,
  41                            unsigned int len)
  42{
  43        struct sha256_ce_state *sctx = shash_desc_ctx(desc);
  44
  45        if (!may_use_simd())
  46                return sha256_base_do_update(desc, data, len,
  47                                (sha256_block_fn *)sha256_block_data_order);
  48
  49        sctx->finalize = 0;
  50        kernel_neon_begin();
  51        sha256_base_do_update(desc, data, len,
  52                              (sha256_block_fn *)sha2_ce_transform);
  53        kernel_neon_end();
  54
  55        return 0;
  56}
  57
  58static int sha256_ce_finup(struct shash_desc *desc, const u8 *data,
  59                           unsigned int len, u8 *out)
  60{
  61        struct sha256_ce_state *sctx = shash_desc_ctx(desc);
  62        bool finalize = !sctx->sst.count && !(len % SHA256_BLOCK_SIZE) && len;
  63
  64        if (!may_use_simd()) {
  65                if (len)
  66                        sha256_base_do_update(desc, data, len,
  67                                (sha256_block_fn *)sha256_block_data_order);
  68                sha256_base_do_finalize(desc,
  69                                (sha256_block_fn *)sha256_block_data_order);
  70                return sha256_base_finish(desc, out);
  71        }
  72
  73        /*
  74         * Allow the asm code to perform the finalization if there is no
  75         * partial data and the input is a round multiple of the block size.
  76         */
  77        sctx->finalize = finalize;
  78
  79        kernel_neon_begin();
  80        sha256_base_do_update(desc, data, len,
  81                              (sha256_block_fn *)sha2_ce_transform);
  82        if (!finalize)
  83                sha256_base_do_finalize(desc,
  84                                        (sha256_block_fn *)sha2_ce_transform);
  85        kernel_neon_end();
  86        return sha256_base_finish(desc, out);
  87}
  88
  89static int sha256_ce_final(struct shash_desc *desc, u8 *out)
  90{
  91        struct sha256_ce_state *sctx = shash_desc_ctx(desc);
  92
  93        if (!may_use_simd()) {
  94                sha256_base_do_finalize(desc,
  95                                (sha256_block_fn *)sha256_block_data_order);
  96                return sha256_base_finish(desc, out);
  97        }
  98
  99        sctx->finalize = 0;
 100        kernel_neon_begin();
 101        sha256_base_do_finalize(desc, (sha256_block_fn *)sha2_ce_transform);
 102        kernel_neon_end();
 103        return sha256_base_finish(desc, out);
 104}
 105
 106static struct shash_alg algs[] = { {
 107        .init                   = sha224_base_init,
 108        .update                 = sha256_ce_update,
 109        .final                  = sha256_ce_final,
 110        .finup                  = sha256_ce_finup,
 111        .descsize               = sizeof(struct sha256_ce_state),
 112        .digestsize             = SHA224_DIGEST_SIZE,
 113        .base                   = {
 114                .cra_name               = "sha224",
 115                .cra_driver_name        = "sha224-ce",
 116                .cra_priority           = 200,
 117                .cra_flags              = CRYPTO_ALG_TYPE_SHASH,
 118                .cra_blocksize          = SHA256_BLOCK_SIZE,
 119                .cra_module             = THIS_MODULE,
 120        }
 121}, {
 122        .init                   = sha256_base_init,
 123        .update                 = sha256_ce_update,
 124        .final                  = sha256_ce_final,
 125        .finup                  = sha256_ce_finup,
 126        .descsize               = sizeof(struct sha256_ce_state),
 127        .digestsize             = SHA256_DIGEST_SIZE,
 128        .base                   = {
 129                .cra_name               = "sha256",
 130                .cra_driver_name        = "sha256-ce",
 131                .cra_priority           = 200,
 132                .cra_flags              = CRYPTO_ALG_TYPE_SHASH,
 133                .cra_blocksize          = SHA256_BLOCK_SIZE,
 134                .cra_module             = THIS_MODULE,
 135        }
 136} };
 137
 138static int __init sha2_ce_mod_init(void)
 139{
 140        return crypto_register_shashes(algs, ARRAY_SIZE(algs));
 141}
 142
 143static void __exit sha2_ce_mod_fini(void)
 144{
 145        crypto_unregister_shashes(algs, ARRAY_SIZE(algs));
 146}
 147
 148module_cpu_feature_match(SHA2, sha2_ce_mod_init);
 149module_exit(sha2_ce_mod_fini);
 150