linux/include/crypto/internal/aead.h
<<
>>
Prefs
   1/* SPDX-License-Identifier: GPL-2.0-or-later */
   2/*
   3 * AEAD: Authenticated Encryption with Associated Data
   4 * 
   5 * Copyright (c) 2007-2015 Herbert Xu <herbert@gondor.apana.org.au>
   6 */
   7
   8#ifndef _CRYPTO_INTERNAL_AEAD_H
   9#define _CRYPTO_INTERNAL_AEAD_H
  10
  11#include <crypto/aead.h>
  12#include <crypto/algapi.h>
  13#include <linux/stddef.h>
  14#include <linux/types.h>
  15
  16struct rtattr;
  17
  18struct aead_instance {
  19        void (*free)(struct aead_instance *inst);
  20        union {
  21                struct {
  22                        char head[offsetof(struct aead_alg, base)];
  23                        struct crypto_instance base;
  24                } s;
  25                struct aead_alg alg;
  26        };
  27};
  28
  29struct crypto_aead_spawn {
  30        struct crypto_spawn base;
  31};
  32
  33struct aead_queue {
  34        struct crypto_queue base;
  35};
  36
  37static inline void *crypto_aead_ctx(struct crypto_aead *tfm)
  38{
  39        return crypto_tfm_ctx(&tfm->base);
  40}
  41
  42static inline struct crypto_instance *aead_crypto_instance(
  43        struct aead_instance *inst)
  44{
  45        return container_of(&inst->alg.base, struct crypto_instance, alg);
  46}
  47
  48static inline struct aead_instance *aead_instance(struct crypto_instance *inst)
  49{
  50        return container_of(&inst->alg, struct aead_instance, alg.base);
  51}
  52
  53static inline struct aead_instance *aead_alg_instance(struct crypto_aead *aead)
  54{
  55        return aead_instance(crypto_tfm_alg_instance(&aead->base));
  56}
  57
  58static inline void *aead_instance_ctx(struct aead_instance *inst)
  59{
  60        return crypto_instance_ctx(aead_crypto_instance(inst));
  61}
  62
  63static inline void *aead_request_ctx(struct aead_request *req)
  64{
  65        return req->__ctx;
  66}
  67
  68static inline void aead_request_complete(struct aead_request *req, int err)
  69{
  70        req->base.complete(&req->base, err);
  71}
  72
  73static inline u32 aead_request_flags(struct aead_request *req)
  74{
  75        return req->base.flags;
  76}
  77
  78static inline struct aead_request *aead_request_cast(
  79        struct crypto_async_request *req)
  80{
  81        return container_of(req, struct aead_request, base);
  82}
  83
  84static inline void crypto_set_aead_spawn(
  85        struct crypto_aead_spawn *spawn, struct crypto_instance *inst)
  86{
  87        crypto_set_spawn(&spawn->base, inst);
  88}
  89
  90int crypto_grab_aead(struct crypto_aead_spawn *spawn, const char *name,
  91                     u32 type, u32 mask);
  92
  93static inline void crypto_drop_aead(struct crypto_aead_spawn *spawn)
  94{
  95        crypto_drop_spawn(&spawn->base);
  96}
  97
  98static inline struct aead_alg *crypto_spawn_aead_alg(
  99        struct crypto_aead_spawn *spawn)
 100{
 101        return container_of(spawn->base.alg, struct aead_alg, base);
 102}
 103
 104static inline struct crypto_aead *crypto_spawn_aead(
 105        struct crypto_aead_spawn *spawn)
 106{
 107        return crypto_spawn_tfm2(&spawn->base);
 108}
 109
 110static inline void crypto_aead_set_reqsize(struct crypto_aead *aead,
 111                                           unsigned int reqsize)
 112{
 113        aead->reqsize = reqsize;
 114}
 115
 116static inline unsigned int crypto_aead_alg_maxauthsize(struct aead_alg *alg)
 117{
 118        return alg->maxauthsize;
 119}
 120
 121static inline unsigned int crypto_aead_maxauthsize(struct crypto_aead *aead)
 122{
 123        return crypto_aead_alg_maxauthsize(crypto_aead_alg(aead));
 124}
 125
 126static inline void aead_init_queue(struct aead_queue *queue,
 127                                   unsigned int max_qlen)
 128{
 129        crypto_init_queue(&queue->base, max_qlen);
 130}
 131
 132static inline int aead_enqueue_request(struct aead_queue *queue,
 133                                       struct aead_request *request)
 134{
 135        return crypto_enqueue_request(&queue->base, &request->base);
 136}
 137
 138static inline struct aead_request *aead_dequeue_request(
 139        struct aead_queue *queue)
 140{
 141        struct crypto_async_request *req;
 142
 143        req = crypto_dequeue_request(&queue->base);
 144
 145        return req ? container_of(req, struct aead_request, base) : NULL;
 146}
 147
 148static inline struct aead_request *aead_get_backlog(struct aead_queue *queue)
 149{
 150        struct crypto_async_request *req;
 151
 152        req = crypto_get_backlog(&queue->base);
 153
 154        return req ? container_of(req, struct aead_request, base) : NULL;
 155}
 156
 157static inline unsigned int crypto_aead_alg_chunksize(struct aead_alg *alg)
 158{
 159        return alg->chunksize;
 160}
 161
 162/**
 163 * crypto_aead_chunksize() - obtain chunk size
 164 * @tfm: cipher handle
 165 *
 166 * The block size is set to one for ciphers such as CCM.  However,
 167 * you still need to provide incremental updates in multiples of
 168 * the underlying block size as the IV does not have sub-block
 169 * granularity.  This is known in this API as the chunk size.
 170 *
 171 * Return: chunk size in bytes
 172 */
 173static inline unsigned int crypto_aead_chunksize(struct crypto_aead *tfm)
 174{
 175        return crypto_aead_alg_chunksize(crypto_aead_alg(tfm));
 176}
 177
 178int crypto_register_aead(struct aead_alg *alg);
 179void crypto_unregister_aead(struct aead_alg *alg);
 180int crypto_register_aeads(struct aead_alg *algs, int count);
 181void crypto_unregister_aeads(struct aead_alg *algs, int count);
 182int aead_register_instance(struct crypto_template *tmpl,
 183                           struct aead_instance *inst);
 184
 185#endif  /* _CRYPTO_INTERNAL_AEAD_H */
 186
 187