linux/include/crypto/algapi.h
<<
>>
Prefs
   1/* SPDX-License-Identifier: GPL-2.0-or-later */
   2/*
   3 * Cryptographic API for algorithms (i.e., low-level API).
   4 *
   5 * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
   6 */
   7#ifndef _CRYPTO_ALGAPI_H
   8#define _CRYPTO_ALGAPI_H
   9
  10#include <linux/crypto.h>
  11#include <linux/list.h>
  12#include <linux/kernel.h>
  13#include <linux/skbuff.h>
  14
  15/*
  16 * Maximum values for blocksize and alignmask, used to allocate
  17 * static buffers that are big enough for any combination of
  18 * algs and architectures. Ciphers have a lower maximum size.
  19 */
  20#define MAX_ALGAPI_BLOCKSIZE            160
  21#define MAX_ALGAPI_ALIGNMASK            63
  22#define MAX_CIPHER_BLOCKSIZE            16
  23#define MAX_CIPHER_ALIGNMASK            15
  24
  25struct crypto_aead;
  26struct crypto_instance;
  27struct module;
  28struct rtattr;
  29struct seq_file;
  30
  31struct crypto_type {
  32        unsigned int (*ctxsize)(struct crypto_alg *alg, u32 type, u32 mask);
  33        unsigned int (*extsize)(struct crypto_alg *alg);
  34        int (*init)(struct crypto_tfm *tfm, u32 type, u32 mask);
  35        int (*init_tfm)(struct crypto_tfm *tfm);
  36        void (*show)(struct seq_file *m, struct crypto_alg *alg);
  37        int (*report)(struct sk_buff *skb, struct crypto_alg *alg);
  38        void (*free)(struct crypto_instance *inst);
  39
  40        unsigned int type;
  41        unsigned int maskclear;
  42        unsigned int maskset;
  43        unsigned int tfmsize;
  44};
  45
  46struct crypto_instance {
  47        struct crypto_alg alg;
  48
  49        struct crypto_template *tmpl;
  50
  51        union {
  52                /* Node in list of instances after registration. */
  53                struct hlist_node list;
  54                /* List of attached spawns before registration. */
  55                struct crypto_spawn *spawns;
  56        };
  57
  58        void *__ctx[] CRYPTO_MINALIGN_ATTR;
  59};
  60
  61struct crypto_template {
  62        struct list_head list;
  63        struct hlist_head instances;
  64        struct module *module;
  65
  66        int (*create)(struct crypto_template *tmpl, struct rtattr **tb);
  67
  68        char name[CRYPTO_MAX_ALG_NAME];
  69};
  70
  71struct crypto_spawn {
  72        struct list_head list;
  73        struct crypto_alg *alg;
  74        union {
  75                /* Back pointer to instance after registration.*/
  76                struct crypto_instance *inst;
  77                /* Spawn list pointer prior to registration. */
  78                struct crypto_spawn *next;
  79        };
  80        const struct crypto_type *frontend;
  81        u32 mask;
  82        bool dead;
  83        bool registered;
  84};
  85
  86struct crypto_queue {
  87        struct list_head list;
  88        struct list_head *backlog;
  89
  90        unsigned int qlen;
  91        unsigned int max_qlen;
  92};
  93
  94struct scatter_walk {
  95        struct scatterlist *sg;
  96        unsigned int offset;
  97};
  98
  99void crypto_mod_put(struct crypto_alg *alg);
 100
 101int crypto_register_template(struct crypto_template *tmpl);
 102int crypto_register_templates(struct crypto_template *tmpls, int count);
 103void crypto_unregister_template(struct crypto_template *tmpl);
 104void crypto_unregister_templates(struct crypto_template *tmpls, int count);
 105struct crypto_template *crypto_lookup_template(const char *name);
 106
 107int crypto_register_instance(struct crypto_template *tmpl,
 108                             struct crypto_instance *inst);
 109void crypto_unregister_instance(struct crypto_instance *inst);
 110
 111int crypto_grab_spawn(struct crypto_spawn *spawn, struct crypto_instance *inst,
 112                      const char *name, u32 type, u32 mask);
 113void crypto_drop_spawn(struct crypto_spawn *spawn);
 114struct crypto_tfm *crypto_spawn_tfm(struct crypto_spawn *spawn, u32 type,
 115                                    u32 mask);
 116void *crypto_spawn_tfm2(struct crypto_spawn *spawn);
 117
 118struct crypto_attr_type *crypto_get_attr_type(struct rtattr **tb);
 119int crypto_check_attr_type(struct rtattr **tb, u32 type);
 120const char *crypto_attr_alg_name(struct rtattr *rta);
 121int crypto_attr_u32(struct rtattr *rta, u32 *num);
 122int crypto_inst_setname(struct crypto_instance *inst, const char *name,
 123                        struct crypto_alg *alg);
 124
 125void crypto_init_queue(struct crypto_queue *queue, unsigned int max_qlen);
 126int crypto_enqueue_request(struct crypto_queue *queue,
 127                           struct crypto_async_request *request);
 128struct crypto_async_request *crypto_dequeue_request(struct crypto_queue *queue);
 129static inline unsigned int crypto_queue_len(struct crypto_queue *queue)
 130{
 131        return queue->qlen;
 132}
 133
 134void crypto_inc(u8 *a, unsigned int size);
 135void __crypto_xor(u8 *dst, const u8 *src1, const u8 *src2, unsigned int size);
 136
 137static inline void crypto_xor(u8 *dst, const u8 *src, unsigned int size)
 138{
 139        if (IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) &&
 140            __builtin_constant_p(size) &&
 141            (size % sizeof(unsigned long)) == 0) {
 142                unsigned long *d = (unsigned long *)dst;
 143                unsigned long *s = (unsigned long *)src;
 144
 145                while (size > 0) {
 146                        *d++ ^= *s++;
 147                        size -= sizeof(unsigned long);
 148                }
 149        } else {
 150                __crypto_xor(dst, dst, src, size);
 151        }
 152}
 153
 154static inline void crypto_xor_cpy(u8 *dst, const u8 *src1, const u8 *src2,
 155                                  unsigned int size)
 156{
 157        if (IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) &&
 158            __builtin_constant_p(size) &&
 159            (size % sizeof(unsigned long)) == 0) {
 160                unsigned long *d = (unsigned long *)dst;
 161                unsigned long *s1 = (unsigned long *)src1;
 162                unsigned long *s2 = (unsigned long *)src2;
 163
 164                while (size > 0) {
 165                        *d++ = *s1++ ^ *s2++;
 166                        size -= sizeof(unsigned long);
 167                }
 168        } else {
 169                __crypto_xor(dst, src1, src2, size);
 170        }
 171}
 172
 173static inline void *crypto_tfm_ctx_aligned(struct crypto_tfm *tfm)
 174{
 175        return PTR_ALIGN(crypto_tfm_ctx(tfm),
 176                         crypto_tfm_alg_alignmask(tfm) + 1);
 177}
 178
 179static inline struct crypto_instance *crypto_tfm_alg_instance(
 180        struct crypto_tfm *tfm)
 181{
 182        return container_of(tfm->__crt_alg, struct crypto_instance, alg);
 183}
 184
 185static inline void *crypto_instance_ctx(struct crypto_instance *inst)
 186{
 187        return inst->__ctx;
 188}
 189
 190struct crypto_cipher_spawn {
 191        struct crypto_spawn base;
 192};
 193
 194static inline int crypto_grab_cipher(struct crypto_cipher_spawn *spawn,
 195                                     struct crypto_instance *inst,
 196                                     const char *name, u32 type, u32 mask)
 197{
 198        type &= ~CRYPTO_ALG_TYPE_MASK;
 199        type |= CRYPTO_ALG_TYPE_CIPHER;
 200        mask |= CRYPTO_ALG_TYPE_MASK;
 201        return crypto_grab_spawn(&spawn->base, inst, name, type, mask);
 202}
 203
 204static inline void crypto_drop_cipher(struct crypto_cipher_spawn *spawn)
 205{
 206        crypto_drop_spawn(&spawn->base);
 207}
 208
 209static inline struct crypto_alg *crypto_spawn_cipher_alg(
 210        struct crypto_cipher_spawn *spawn)
 211{
 212        return spawn->base.alg;
 213}
 214
 215static inline struct crypto_cipher *crypto_spawn_cipher(
 216        struct crypto_cipher_spawn *spawn)
 217{
 218        u32 type = CRYPTO_ALG_TYPE_CIPHER;
 219        u32 mask = CRYPTO_ALG_TYPE_MASK;
 220
 221        return __crypto_cipher_cast(crypto_spawn_tfm(&spawn->base, type, mask));
 222}
 223
 224static inline struct cipher_alg *crypto_cipher_alg(struct crypto_cipher *tfm)
 225{
 226        return &crypto_cipher_tfm(tfm)->__crt_alg->cra_cipher;
 227}
 228
 229static inline struct crypto_async_request *crypto_get_backlog(
 230        struct crypto_queue *queue)
 231{
 232        return queue->backlog == &queue->list ? NULL :
 233               container_of(queue->backlog, struct crypto_async_request, list);
 234}
 235
 236static inline int crypto_requires_off(u32 type, u32 mask, u32 off)
 237{
 238        return (type ^ off) & mask & off;
 239}
 240
 241/*
 242 * Returns CRYPTO_ALG_ASYNC if type/mask requires the use of sync algorithms.
 243 * Otherwise returns zero.
 244 */
 245static inline int crypto_requires_sync(u32 type, u32 mask)
 246{
 247        return crypto_requires_off(type, mask, CRYPTO_ALG_ASYNC);
 248}
 249
 250noinline unsigned long __crypto_memneq(const void *a, const void *b, size_t size);
 251
 252/**
 253 * crypto_memneq - Compare two areas of memory without leaking
 254 *                 timing information.
 255 *
 256 * @a: One area of memory
 257 * @b: Another area of memory
 258 * @size: The size of the area.
 259 *
 260 * Returns 0 when data is equal, 1 otherwise.
 261 */
 262static inline int crypto_memneq(const void *a, const void *b, size_t size)
 263{
 264        return __crypto_memneq(a, b, size) != 0UL ? 1 : 0;
 265}
 266
 267static inline void crypto_yield(u32 flags)
 268{
 269        if (flags & CRYPTO_TFM_REQ_MAY_SLEEP)
 270                cond_resched();
 271}
 272
 273int crypto_register_notifier(struct notifier_block *nb);
 274int crypto_unregister_notifier(struct notifier_block *nb);
 275
 276/* Crypto notification events. */
 277enum {
 278        CRYPTO_MSG_ALG_REQUEST,
 279        CRYPTO_MSG_ALG_REGISTER,
 280        CRYPTO_MSG_ALG_LOADED,
 281};
 282
 283#endif  /* _CRYPTO_ALGAPI_H */
 284