linux/include/crypto/internal/aead.h
<<
>>
Prefs
   1/*
   2 * AEAD: Authenticated Encryption with Associated Data
   3 * 
   4 * Copyright (c) 2007-2015 Herbert Xu <herbert@gondor.apana.org.au>
   5 *
   6 * This program is free software; you can redistribute it and/or modify it
   7 * under the terms of the GNU General Public License as published by the Free
   8 * Software Foundation; either version 2 of the License, or (at your option) 
   9 * any later version.
  10 *
  11 */
  12
  13#ifndef _CRYPTO_INTERNAL_AEAD_H
  14#define _CRYPTO_INTERNAL_AEAD_H
  15
  16#include <crypto/aead.h>
  17#include <crypto/algapi.h>
  18#include <linux/stddef.h>
  19#include <linux/types.h>
  20
  21struct rtattr;
  22
  23struct aead_instance {
  24        void (*free)(struct aead_instance *inst);
  25        union {
  26                struct {
  27                        char head[offsetof(struct aead_alg, base)];
  28                        struct crypto_instance base;
  29                } s;
  30                struct aead_alg alg;
  31        };
  32};
  33
  34struct crypto_aead_spawn {
  35        struct crypto_spawn base;
  36};
  37
  38struct aead_queue {
  39        struct crypto_queue base;
  40};
  41
  42static inline void *crypto_aead_ctx(struct crypto_aead *tfm)
  43{
  44        return crypto_tfm_ctx(&tfm->base);
  45}
  46
  47static inline struct crypto_instance *aead_crypto_instance(
  48        struct aead_instance *inst)
  49{
  50        return container_of(&inst->alg.base, struct crypto_instance, alg);
  51}
  52
  53static inline struct aead_instance *aead_instance(struct crypto_instance *inst)
  54{
  55        return container_of(&inst->alg, struct aead_instance, alg.base);
  56}
  57
  58static inline struct aead_instance *aead_alg_instance(struct crypto_aead *aead)
  59{
  60        return aead_instance(crypto_tfm_alg_instance(&aead->base));
  61}
  62
  63static inline void *aead_instance_ctx(struct aead_instance *inst)
  64{
  65        return crypto_instance_ctx(aead_crypto_instance(inst));
  66}
  67
  68static inline void *aead_request_ctx(struct aead_request *req)
  69{
  70        return req->__ctx;
  71}
  72
  73static inline void aead_request_complete(struct aead_request *req, int err)
  74{
  75        req->base.complete(&req->base, err);
  76}
  77
  78static inline u32 aead_request_flags(struct aead_request *req)
  79{
  80        return req->base.flags;
  81}
  82
  83static inline struct aead_request *aead_request_cast(
  84        struct crypto_async_request *req)
  85{
  86        return container_of(req, struct aead_request, base);
  87}
  88
  89static inline void crypto_set_aead_spawn(
  90        struct crypto_aead_spawn *spawn, struct crypto_instance *inst)
  91{
  92        crypto_set_spawn(&spawn->base, inst);
  93}
  94
  95int crypto_grab_aead(struct crypto_aead_spawn *spawn, const char *name,
  96                     u32 type, u32 mask);
  97
  98static inline void crypto_drop_aead(struct crypto_aead_spawn *spawn)
  99{
 100        crypto_drop_spawn(&spawn->base);
 101}
 102
 103static inline struct aead_alg *crypto_spawn_aead_alg(
 104        struct crypto_aead_spawn *spawn)
 105{
 106        return container_of(spawn->base.alg, struct aead_alg, base);
 107}
 108
 109static inline struct crypto_aead *crypto_spawn_aead(
 110        struct crypto_aead_spawn *spawn)
 111{
 112        return crypto_spawn_tfm2(&spawn->base);
 113}
 114
 115static inline void crypto_aead_set_reqsize(struct crypto_aead *aead,
 116                                           unsigned int reqsize)
 117{
 118        aead->reqsize = reqsize;
 119}
 120
 121static inline unsigned int crypto_aead_alg_maxauthsize(struct aead_alg *alg)
 122{
 123        return alg->maxauthsize;
 124}
 125
 126static inline unsigned int crypto_aead_maxauthsize(struct crypto_aead *aead)
 127{
 128        return crypto_aead_alg_maxauthsize(crypto_aead_alg(aead));
 129}
 130
 131static inline void aead_init_queue(struct aead_queue *queue,
 132                                   unsigned int max_qlen)
 133{
 134        crypto_init_queue(&queue->base, max_qlen);
 135}
 136
 137static inline int aead_enqueue_request(struct aead_queue *queue,
 138                                       struct aead_request *request)
 139{
 140        return crypto_enqueue_request(&queue->base, &request->base);
 141}
 142
 143static inline struct aead_request *aead_dequeue_request(
 144        struct aead_queue *queue)
 145{
 146        struct crypto_async_request *req;
 147
 148        req = crypto_dequeue_request(&queue->base);
 149
 150        return req ? container_of(req, struct aead_request, base) : NULL;
 151}
 152
 153static inline struct aead_request *aead_get_backlog(struct aead_queue *queue)
 154{
 155        struct crypto_async_request *req;
 156
 157        req = crypto_get_backlog(&queue->base);
 158
 159        return req ? container_of(req, struct aead_request, base) : NULL;
 160}
 161
 162static inline unsigned int crypto_aead_alg_chunksize(struct aead_alg *alg)
 163{
 164        return alg->chunksize;
 165}
 166
 167/**
 168 * crypto_aead_chunksize() - obtain chunk size
 169 * @tfm: cipher handle
 170 *
 171 * The block size is set to one for ciphers such as CCM.  However,
 172 * you still need to provide incremental updates in multiples of
 173 * the underlying block size as the IV does not have sub-block
 174 * granularity.  This is known in this API as the chunk size.
 175 *
 176 * Return: chunk size in bytes
 177 */
 178static inline unsigned int crypto_aead_chunksize(struct crypto_aead *tfm)
 179{
 180        return crypto_aead_alg_chunksize(crypto_aead_alg(tfm));
 181}
 182
 183int crypto_register_aead(struct aead_alg *alg);
 184void crypto_unregister_aead(struct aead_alg *alg);
 185int crypto_register_aeads(struct aead_alg *algs, int count);
 186void crypto_unregister_aeads(struct aead_alg *algs, int count);
 187int aead_register_instance(struct crypto_template *tmpl,
 188                           struct aead_instance *inst);
 189
 190#endif  /* _CRYPTO_INTERNAL_AEAD_H */
 191
 192