linux/include/crypto/cryptd.h
<<
>>
Prefs
   1/* SPDX-License-Identifier: GPL-2.0 */
   2/*
   3 * Software async crypto daemon
   4 *
   5 * Added AEAD support to cryptd.
   6 *    Authors: Tadeusz Struk (tadeusz.struk@intel.com)
   7 *             Adrian Hoban <adrian.hoban@intel.com>
   8 *             Gabriele Paoloni <gabriele.paoloni@intel.com>
   9 *             Aidan O'Mahony (aidan.o.mahony@intel.com)
  10 *    Copyright (c) 2010, Intel Corporation.
  11 */
  12
  13#ifndef _CRYPTO_CRYPT_H
  14#define _CRYPTO_CRYPT_H
  15
  16#include <linux/kernel.h>
  17#include <crypto/aead.h>
  18#include <crypto/hash.h>
  19#include <crypto/skcipher.h>
  20
  21struct cryptd_ablkcipher {
  22        struct crypto_ablkcipher base;
  23};
  24
  25static inline struct cryptd_ablkcipher *__cryptd_ablkcipher_cast(
  26        struct crypto_ablkcipher *tfm)
  27{
  28        return (struct cryptd_ablkcipher *)tfm;
  29}
  30
  31/* alg_name should be algorithm to be cryptd-ed */
  32struct cryptd_ablkcipher *cryptd_alloc_ablkcipher(const char *alg_name,
  33                                                  u32 type, u32 mask);
  34struct crypto_blkcipher *cryptd_ablkcipher_child(struct cryptd_ablkcipher *tfm);
  35bool cryptd_ablkcipher_queued(struct cryptd_ablkcipher *tfm);
  36void cryptd_free_ablkcipher(struct cryptd_ablkcipher *tfm);
  37
  38struct cryptd_skcipher {
  39        struct crypto_skcipher base;
  40};
  41
  42struct cryptd_skcipher *cryptd_alloc_skcipher(const char *alg_name,
  43                                              u32 type, u32 mask);
  44struct crypto_skcipher *cryptd_skcipher_child(struct cryptd_skcipher *tfm);
  45/* Must be called without moving CPUs. */
  46bool cryptd_skcipher_queued(struct cryptd_skcipher *tfm);
  47void cryptd_free_skcipher(struct cryptd_skcipher *tfm);
  48
  49struct cryptd_ahash {
  50        struct crypto_ahash base;
  51};
  52
  53static inline struct cryptd_ahash *__cryptd_ahash_cast(
  54        struct crypto_ahash *tfm)
  55{
  56        return (struct cryptd_ahash *)tfm;
  57}
  58
  59/* alg_name should be algorithm to be cryptd-ed */
  60struct cryptd_ahash *cryptd_alloc_ahash(const char *alg_name,
  61                                        u32 type, u32 mask);
  62struct crypto_shash *cryptd_ahash_child(struct cryptd_ahash *tfm);
  63struct shash_desc *cryptd_shash_desc(struct ahash_request *req);
  64/* Must be called without moving CPUs. */
  65bool cryptd_ahash_queued(struct cryptd_ahash *tfm);
  66void cryptd_free_ahash(struct cryptd_ahash *tfm);
  67
  68struct cryptd_aead {
  69        struct crypto_aead base;
  70};
  71
  72static inline struct cryptd_aead *__cryptd_aead_cast(
  73        struct crypto_aead *tfm)
  74{
  75        return (struct cryptd_aead *)tfm;
  76}
  77
  78struct cryptd_aead *cryptd_alloc_aead(const char *alg_name,
  79                                          u32 type, u32 mask);
  80
  81struct crypto_aead *cryptd_aead_child(struct cryptd_aead *tfm);
  82/* Must be called without moving CPUs. */
  83bool cryptd_aead_queued(struct cryptd_aead *tfm);
  84
  85void cryptd_free_aead(struct cryptd_aead *tfm);
  86
  87#endif
  88