linux/include/crypto/internal/skcipher.h
<<
>>
Prefs
   1/* SPDX-License-Identifier: GPL-2.0-or-later */
   2/*
   3 * Symmetric key ciphers.
   4 * 
   5 * Copyright (c) 2007 Herbert Xu <herbert@gondor.apana.org.au>
   6 */
   7
   8#ifndef _CRYPTO_INTERNAL_SKCIPHER_H
   9#define _CRYPTO_INTERNAL_SKCIPHER_H
  10
  11#include <crypto/algapi.h>
  12#include <crypto/skcipher.h>
  13#include <linux/list.h>
  14#include <linux/types.h>
  15
  16struct aead_request;
  17struct rtattr;
  18
  19struct skcipher_instance {
  20        void (*free)(struct skcipher_instance *inst);
  21        union {
  22                struct {
  23                        char head[offsetof(struct skcipher_alg, base)];
  24                        struct crypto_instance base;
  25                } s;
  26                struct skcipher_alg alg;
  27        };
  28};
  29
  30struct crypto_skcipher_spawn {
  31        struct crypto_spawn base;
  32};
  33
  34struct skcipher_walk {
  35        union {
  36                struct {
  37                        struct page *page;
  38                        unsigned long offset;
  39                } phys;
  40
  41                struct {
  42                        u8 *page;
  43                        void *addr;
  44                } virt;
  45        } src, dst;
  46
  47        struct scatter_walk in;
  48        unsigned int nbytes;
  49
  50        struct scatter_walk out;
  51        unsigned int total;
  52
  53        struct list_head buffers;
  54
  55        u8 *page;
  56        u8 *buffer;
  57        u8 *oiv;
  58        void *iv;
  59
  60        unsigned int ivsize;
  61
  62        int flags;
  63        unsigned int blocksize;
  64        unsigned int stride;
  65        unsigned int alignmask;
  66};
  67
  68static inline struct crypto_instance *skcipher_crypto_instance(
  69        struct skcipher_instance *inst)
  70{
  71        return &inst->s.base;
  72}
  73
  74static inline struct skcipher_instance *skcipher_alg_instance(
  75        struct crypto_skcipher *skcipher)
  76{
  77        return container_of(crypto_skcipher_alg(skcipher),
  78                            struct skcipher_instance, alg);
  79}
  80
  81static inline void *skcipher_instance_ctx(struct skcipher_instance *inst)
  82{
  83        return crypto_instance_ctx(skcipher_crypto_instance(inst));
  84}
  85
  86static inline void skcipher_request_complete(struct skcipher_request *req, int err)
  87{
  88        req->base.complete(&req->base, err);
  89}
  90
  91int crypto_grab_skcipher(struct crypto_skcipher_spawn *spawn,
  92                         struct crypto_instance *inst,
  93                         const char *name, u32 type, u32 mask);
  94
  95static inline void crypto_drop_skcipher(struct crypto_skcipher_spawn *spawn)
  96{
  97        crypto_drop_spawn(&spawn->base);
  98}
  99
 100static inline struct skcipher_alg *crypto_skcipher_spawn_alg(
 101        struct crypto_skcipher_spawn *spawn)
 102{
 103        return container_of(spawn->base.alg, struct skcipher_alg, base);
 104}
 105
 106static inline struct skcipher_alg *crypto_spawn_skcipher_alg(
 107        struct crypto_skcipher_spawn *spawn)
 108{
 109        return crypto_skcipher_spawn_alg(spawn);
 110}
 111
 112static inline struct crypto_skcipher *crypto_spawn_skcipher(
 113        struct crypto_skcipher_spawn *spawn)
 114{
 115        return crypto_spawn_tfm2(&spawn->base);
 116}
 117
 118static inline void crypto_skcipher_set_reqsize(
 119        struct crypto_skcipher *skcipher, unsigned int reqsize)
 120{
 121        skcipher->reqsize = reqsize;
 122}
 123
 124int crypto_register_skcipher(struct skcipher_alg *alg);
 125void crypto_unregister_skcipher(struct skcipher_alg *alg);
 126int crypto_register_skciphers(struct skcipher_alg *algs, int count);
 127void crypto_unregister_skciphers(struct skcipher_alg *algs, int count);
 128int skcipher_register_instance(struct crypto_template *tmpl,
 129                               struct skcipher_instance *inst);
 130
 131int skcipher_walk_done(struct skcipher_walk *walk, int err);
 132int skcipher_walk_virt(struct skcipher_walk *walk,
 133                       struct skcipher_request *req,
 134                       bool atomic);
 135void skcipher_walk_atomise(struct skcipher_walk *walk);
 136int skcipher_walk_async(struct skcipher_walk *walk,
 137                        struct skcipher_request *req);
 138int skcipher_walk_aead_encrypt(struct skcipher_walk *walk,
 139                               struct aead_request *req, bool atomic);
 140int skcipher_walk_aead_decrypt(struct skcipher_walk *walk,
 141                               struct aead_request *req, bool atomic);
 142void skcipher_walk_complete(struct skcipher_walk *walk, int err);
 143
 144static inline void skcipher_walk_abort(struct skcipher_walk *walk)
 145{
 146        skcipher_walk_done(walk, -ECANCELED);
 147}
 148
 149static inline void *crypto_skcipher_ctx(struct crypto_skcipher *tfm)
 150{
 151        return crypto_tfm_ctx(&tfm->base);
 152}
 153
 154static inline void *skcipher_request_ctx(struct skcipher_request *req)
 155{
 156        return req->__ctx;
 157}
 158
 159static inline u32 skcipher_request_flags(struct skcipher_request *req)
 160{
 161        return req->base.flags;
 162}
 163
 164static inline unsigned int crypto_skcipher_alg_min_keysize(
 165        struct skcipher_alg *alg)
 166{
 167        return alg->min_keysize;
 168}
 169
 170static inline unsigned int crypto_skcipher_alg_max_keysize(
 171        struct skcipher_alg *alg)
 172{
 173        return alg->max_keysize;
 174}
 175
 176static inline unsigned int crypto_skcipher_alg_walksize(
 177        struct skcipher_alg *alg)
 178{
 179        return alg->walksize;
 180}
 181
 182/**
 183 * crypto_skcipher_walksize() - obtain walk size
 184 * @tfm: cipher handle
 185 *
 186 * In some cases, algorithms can only perform optimally when operating on
 187 * multiple blocks in parallel. This is reflected by the walksize, which
 188 * must be a multiple of the chunksize (or equal if the concern does not
 189 * apply)
 190 *
 191 * Return: walk size in bytes
 192 */
 193static inline unsigned int crypto_skcipher_walksize(
 194        struct crypto_skcipher *tfm)
 195{
 196        return crypto_skcipher_alg_walksize(crypto_skcipher_alg(tfm));
 197}
 198
 199/* Helpers for simple block cipher modes of operation */
 200struct skcipher_ctx_simple {
 201        struct crypto_cipher *cipher;   /* underlying block cipher */
 202};
 203static inline struct crypto_cipher *
 204skcipher_cipher_simple(struct crypto_skcipher *tfm)
 205{
 206        struct skcipher_ctx_simple *ctx = crypto_skcipher_ctx(tfm);
 207
 208        return ctx->cipher;
 209}
 210
 211struct skcipher_instance *skcipher_alloc_instance_simple(
 212        struct crypto_template *tmpl, struct rtattr **tb);
 213
 214static inline struct crypto_alg *skcipher_ialg_simple(
 215        struct skcipher_instance *inst)
 216{
 217        struct crypto_cipher_spawn *spawn = skcipher_instance_ctx(inst);
 218
 219        return crypto_spawn_cipher_alg(spawn);
 220}
 221
 222#endif  /* _CRYPTO_INTERNAL_SKCIPHER_H */
 223
 224