linux/include/crypto/engine.h
<<
>>
Prefs
   1/* SPDX-License-Identifier: GPL-2.0-or-later */
   2/*
   3 * Crypto engine API
   4 *
   5 * Copyright (c) 2016 Baolin Wang <baolin.wang@linaro.org>
   6 */
   7#ifndef _CRYPTO_ENGINE_H
   8#define _CRYPTO_ENGINE_H
   9
  10#include <linux/crypto.h>
  11#include <linux/list.h>
  12#include <linux/kernel.h>
  13#include <linux/kthread.h>
  14#include <crypto/algapi.h>
  15#include <crypto/aead.h>
  16#include <crypto/akcipher.h>
  17#include <crypto/hash.h>
  18#include <crypto/skcipher.h>
  19
  20#define ENGINE_NAME_LEN 30
  21/*
  22 * struct crypto_engine - crypto hardware engine
  23 * @name: the engine name
  24 * @idling: the engine is entering idle state
  25 * @busy: request pump is busy
  26 * @running: the engine is on working
  27 * @retry_support: indication that the hardware allows re-execution
  28 * of a failed backlog request
  29 * crypto-engine, in head position to keep order
  30 * @list: link with the global crypto engine list
  31 * @queue_lock: spinlock to synchronise access to request queue
  32 * @queue: the crypto queue of the engine
  33 * @rt: whether this queue is set to run as a realtime task
  34 * @prepare_crypt_hardware: a request will soon arrive from the queue
  35 * so the subsystem requests the driver to prepare the hardware
  36 * by issuing this call
  37 * @unprepare_crypt_hardware: there are currently no more requests on the
  38 * queue so the subsystem notifies the driver that it may relax the
  39 * hardware by issuing this call
  40 * @do_batch_requests: execute a batch of requests. Depends on multiple
  41 * requests support.
  42 * @kworker: kthread worker struct for request pump
  43 * @pump_requests: work struct for scheduling work to the request pump
  44 * @priv_data: the engine private data
  45 * @cur_req: the current request which is on processing
  46 */
  47struct crypto_engine {
  48        char                    name[ENGINE_NAME_LEN];
  49        bool                    idling;
  50        bool                    busy;
  51        bool                    running;
  52
  53        bool                    retry_support;
  54
  55        struct list_head        list;
  56        spinlock_t              queue_lock;
  57        struct crypto_queue     queue;
  58        struct device           *dev;
  59
  60        bool                    rt;
  61
  62        int (*prepare_crypt_hardware)(struct crypto_engine *engine);
  63        int (*unprepare_crypt_hardware)(struct crypto_engine *engine);
  64        int (*do_batch_requests)(struct crypto_engine *engine);
  65
  66
  67        struct kthread_worker           *kworker;
  68        struct kthread_work             pump_requests;
  69
  70        void                            *priv_data;
  71        struct crypto_async_request     *cur_req;
  72};
  73
  74/*
  75 * struct crypto_engine_op - crypto hardware engine operations
  76 * @prepare__request: do some prepare if need before handle the current request
  77 * @unprepare_request: undo any work done by prepare_request()
  78 * @do_one_request: do encryption for current request
  79 */
  80struct crypto_engine_op {
  81        int (*prepare_request)(struct crypto_engine *engine,
  82                               void *areq);
  83        int (*unprepare_request)(struct crypto_engine *engine,
  84                                 void *areq);
  85        int (*do_one_request)(struct crypto_engine *engine,
  86                              void *areq);
  87};
  88
  89struct crypto_engine_ctx {
  90        struct crypto_engine_op op;
  91};
  92
  93int crypto_transfer_aead_request_to_engine(struct crypto_engine *engine,
  94                                           struct aead_request *req);
  95int crypto_transfer_akcipher_request_to_engine(struct crypto_engine *engine,
  96                                               struct akcipher_request *req);
  97int crypto_transfer_hash_request_to_engine(struct crypto_engine *engine,
  98                                               struct ahash_request *req);
  99int crypto_transfer_skcipher_request_to_engine(struct crypto_engine *engine,
 100                                               struct skcipher_request *req);
 101void crypto_finalize_aead_request(struct crypto_engine *engine,
 102                                  struct aead_request *req, int err);
 103void crypto_finalize_akcipher_request(struct crypto_engine *engine,
 104                                      struct akcipher_request *req, int err);
 105void crypto_finalize_hash_request(struct crypto_engine *engine,
 106                                  struct ahash_request *req, int err);
 107void crypto_finalize_skcipher_request(struct crypto_engine *engine,
 108                                      struct skcipher_request *req, int err);
 109int crypto_engine_start(struct crypto_engine *engine);
 110int crypto_engine_stop(struct crypto_engine *engine);
 111struct crypto_engine *crypto_engine_alloc_init(struct device *dev, bool rt);
 112struct crypto_engine *crypto_engine_alloc_init_and_set(struct device *dev,
 113                                                       bool retry_support,
 114                                                       int (*cbk_do_batch)(struct crypto_engine *engine),
 115                                                       bool rt, int qlen);
 116int crypto_engine_exit(struct crypto_engine *engine);
 117
 118#endif /* _CRYPTO_ENGINE_H */
 119