linux/include/crypto/algapi.h
<<
>>
Prefs
   1/*
   2 * Cryptographic API for algorithms (i.e., low-level API).
   3 *
   4 * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
   5 *
   6 * This program is free software; you can redistribute it and/or modify it
   7 * under the terms of the GNU General Public License as published by the Free
   8 * Software Foundation; either version 2 of the License, or (at your option) 
   9 * any later version.
  10 *
  11 */
  12#ifndef _CRYPTO_ALGAPI_H
  13#define _CRYPTO_ALGAPI_H
  14
  15#include <linux/crypto.h>
  16#include <linux/list.h>
  17#include <linux/kernel.h>
  18#include <linux/skbuff.h>
  19
  20struct module;
  21struct rtattr;
  22struct seq_file;
  23
  24struct crypto_type {
  25        unsigned int (*ctxsize)(struct crypto_alg *alg, u32 type, u32 mask);
  26        unsigned int (*extsize)(struct crypto_alg *alg);
  27        int (*init)(struct crypto_tfm *tfm, u32 type, u32 mask);
  28        int (*init_tfm)(struct crypto_tfm *tfm);
  29        void (*show)(struct seq_file *m, struct crypto_alg *alg);
  30        int (*report)(struct sk_buff *skb, struct crypto_alg *alg);
  31        struct crypto_alg *(*lookup)(const char *name, u32 type, u32 mask);
  32
  33        unsigned int type;
  34        unsigned int maskclear;
  35        unsigned int maskset;
  36        unsigned int tfmsize;
  37};
  38
  39struct crypto_instance {
  40        struct crypto_alg alg;
  41
  42        struct crypto_template *tmpl;
  43        struct hlist_node list;
  44
  45        void *__ctx[] CRYPTO_MINALIGN_ATTR;
  46};
  47
  48struct crypto_template {
  49        struct list_head list;
  50        struct hlist_head instances;
  51        struct module *module;
  52
  53        struct crypto_instance *(*alloc)(struct rtattr **tb);
  54        void (*free)(struct crypto_instance *inst);
  55        int (*create)(struct crypto_template *tmpl, struct rtattr **tb);
  56
  57        char name[CRYPTO_MAX_ALG_NAME];
  58};
  59
  60struct crypto_spawn {
  61        struct list_head list;
  62        struct crypto_alg *alg;
  63        struct crypto_instance *inst;
  64        const struct crypto_type *frontend;
  65        u32 mask;
  66};
  67
  68struct crypto_queue {
  69        struct list_head list;
  70        struct list_head *backlog;
  71
  72        unsigned int qlen;
  73        unsigned int max_qlen;
  74};
  75
  76struct scatter_walk {
  77        struct scatterlist *sg;
  78        unsigned int offset;
  79};
  80
  81struct blkcipher_walk {
  82        union {
  83                struct {
  84                        struct page *page;
  85                        unsigned long offset;
  86                } phys;
  87
  88                struct {
  89                        u8 *page;
  90                        u8 *addr;
  91                } virt;
  92        } src, dst;
  93
  94        struct scatter_walk in;
  95        unsigned int nbytes;
  96
  97        struct scatter_walk out;
  98        unsigned int total;
  99
 100        void *page;
 101        u8 *buffer;
 102        u8 *iv;
 103        unsigned int ivsize;
 104
 105        int flags;
 106        unsigned int walk_blocksize;
 107        unsigned int cipher_blocksize;
 108        unsigned int alignmask;
 109};
 110
 111struct ablkcipher_walk {
 112        struct {
 113                struct page *page;
 114                unsigned int offset;
 115        } src, dst;
 116
 117        struct scatter_walk     in;
 118        unsigned int            nbytes;
 119        struct scatter_walk     out;
 120        unsigned int            total;
 121        struct list_head        buffers;
 122        u8                      *iv_buffer;
 123        u8                      *iv;
 124        int                     flags;
 125        unsigned int            blocksize;
 126};
 127
 128extern const struct crypto_type crypto_ablkcipher_type;
 129extern const struct crypto_type crypto_aead_type;
 130extern const struct crypto_type crypto_blkcipher_type;
 131
 132void crypto_mod_put(struct crypto_alg *alg);
 133
 134int crypto_register_template(struct crypto_template *tmpl);
 135void crypto_unregister_template(struct crypto_template *tmpl);
 136struct crypto_template *crypto_lookup_template(const char *name);
 137
 138int crypto_register_instance(struct crypto_template *tmpl,
 139                             struct crypto_instance *inst);
 140int crypto_unregister_instance(struct crypto_alg *alg);
 141
 142int crypto_init_spawn(struct crypto_spawn *spawn, struct crypto_alg *alg,
 143                      struct crypto_instance *inst, u32 mask);
 144int crypto_init_spawn2(struct crypto_spawn *spawn, struct crypto_alg *alg,
 145                       struct crypto_instance *inst,
 146                       const struct crypto_type *frontend);
 147
 148void crypto_drop_spawn(struct crypto_spawn *spawn);
 149struct crypto_tfm *crypto_spawn_tfm(struct crypto_spawn *spawn, u32 type,
 150                                    u32 mask);
 151void *crypto_spawn_tfm2(struct crypto_spawn *spawn);
 152
 153static inline void crypto_set_spawn(struct crypto_spawn *spawn,
 154                                    struct crypto_instance *inst)
 155{
 156        spawn->inst = inst;
 157}
 158
 159struct crypto_attr_type *crypto_get_attr_type(struct rtattr **tb);
 160int crypto_check_attr_type(struct rtattr **tb, u32 type);
 161const char *crypto_attr_alg_name(struct rtattr *rta);
 162struct crypto_alg *crypto_attr_alg2(struct rtattr *rta,
 163                                    const struct crypto_type *frontend,
 164                                    u32 type, u32 mask);
 165
 166static inline struct crypto_alg *crypto_attr_alg(struct rtattr *rta,
 167                                                 u32 type, u32 mask)
 168{
 169        return crypto_attr_alg2(rta, NULL, type, mask);
 170}
 171
 172int crypto_attr_u32(struct rtattr *rta, u32 *num);
 173void *crypto_alloc_instance2(const char *name, struct crypto_alg *alg,
 174                             unsigned int head);
 175struct crypto_instance *crypto_alloc_instance(const char *name,
 176                                              struct crypto_alg *alg);
 177
 178void crypto_init_queue(struct crypto_queue *queue, unsigned int max_qlen);
 179int crypto_enqueue_request(struct crypto_queue *queue,
 180                           struct crypto_async_request *request);
 181void *__crypto_dequeue_request(struct crypto_queue *queue, unsigned int offset);
 182struct crypto_async_request *crypto_dequeue_request(struct crypto_queue *queue);
 183int crypto_tfm_in_queue(struct crypto_queue *queue, struct crypto_tfm *tfm);
 184
 185/* These functions require the input/output to be aligned as u32. */
 186void crypto_inc(u8 *a, unsigned int size);
 187void crypto_xor(u8 *dst, const u8 *src, unsigned int size);
 188
 189int blkcipher_walk_done(struct blkcipher_desc *desc,
 190                        struct blkcipher_walk *walk, int err);
 191int blkcipher_walk_virt(struct blkcipher_desc *desc,
 192                        struct blkcipher_walk *walk);
 193int blkcipher_walk_phys(struct blkcipher_desc *desc,
 194                        struct blkcipher_walk *walk);
 195int blkcipher_walk_virt_block(struct blkcipher_desc *desc,
 196                              struct blkcipher_walk *walk,
 197                              unsigned int blocksize);
 198int blkcipher_aead_walk_virt_block(struct blkcipher_desc *desc,
 199                                   struct blkcipher_walk *walk,
 200                                   struct crypto_aead *tfm,
 201                                   unsigned int blocksize);
 202
 203int ablkcipher_walk_done(struct ablkcipher_request *req,
 204                         struct ablkcipher_walk *walk, int err);
 205int ablkcipher_walk_phys(struct ablkcipher_request *req,
 206                         struct ablkcipher_walk *walk);
 207void __ablkcipher_walk_complete(struct ablkcipher_walk *walk);
 208
 209static inline void *crypto_tfm_ctx_aligned(struct crypto_tfm *tfm)
 210{
 211        return PTR_ALIGN(crypto_tfm_ctx(tfm),
 212                         crypto_tfm_alg_alignmask(tfm) + 1);
 213}
 214
 215static inline struct crypto_instance *crypto_tfm_alg_instance(
 216        struct crypto_tfm *tfm)
 217{
 218        return container_of(tfm->__crt_alg, struct crypto_instance, alg);
 219}
 220
 221static inline void *crypto_instance_ctx(struct crypto_instance *inst)
 222{
 223        return inst->__ctx;
 224}
 225
 226static inline struct ablkcipher_alg *crypto_ablkcipher_alg(
 227        struct crypto_ablkcipher *tfm)
 228{
 229        return &crypto_ablkcipher_tfm(tfm)->__crt_alg->cra_ablkcipher;
 230}
 231
 232static inline void *crypto_ablkcipher_ctx(struct crypto_ablkcipher *tfm)
 233{
 234        return crypto_tfm_ctx(&tfm->base);
 235}
 236
 237static inline void *crypto_ablkcipher_ctx_aligned(struct crypto_ablkcipher *tfm)
 238{
 239        return crypto_tfm_ctx_aligned(&tfm->base);
 240}
 241
 242static inline struct aead_alg *crypto_aead_alg(struct crypto_aead *tfm)
 243{
 244        return &crypto_aead_tfm(tfm)->__crt_alg->cra_aead;
 245}
 246
 247static inline void *crypto_aead_ctx(struct crypto_aead *tfm)
 248{
 249        return crypto_tfm_ctx(&tfm->base);
 250}
 251
 252static inline struct crypto_instance *crypto_aead_alg_instance(
 253        struct crypto_aead *aead)
 254{
 255        return crypto_tfm_alg_instance(&aead->base);
 256}
 257
 258static inline struct crypto_blkcipher *crypto_spawn_blkcipher(
 259        struct crypto_spawn *spawn)
 260{
 261        u32 type = CRYPTO_ALG_TYPE_BLKCIPHER;
 262        u32 mask = CRYPTO_ALG_TYPE_MASK;
 263
 264        return __crypto_blkcipher_cast(crypto_spawn_tfm(spawn, type, mask));
 265}
 266
 267static inline void *crypto_blkcipher_ctx(struct crypto_blkcipher *tfm)
 268{
 269        return crypto_tfm_ctx(&tfm->base);
 270}
 271
 272static inline void *crypto_blkcipher_ctx_aligned(struct crypto_blkcipher *tfm)
 273{
 274        return crypto_tfm_ctx_aligned(&tfm->base);
 275}
 276
 277static inline struct crypto_cipher *crypto_spawn_cipher(
 278        struct crypto_spawn *spawn)
 279{
 280        u32 type = CRYPTO_ALG_TYPE_CIPHER;
 281        u32 mask = CRYPTO_ALG_TYPE_MASK;
 282
 283        return __crypto_cipher_cast(crypto_spawn_tfm(spawn, type, mask));
 284}
 285
 286static inline struct cipher_alg *crypto_cipher_alg(struct crypto_cipher *tfm)
 287{
 288        return &crypto_cipher_tfm(tfm)->__crt_alg->cra_cipher;
 289}
 290
 291static inline struct crypto_hash *crypto_spawn_hash(struct crypto_spawn *spawn)
 292{
 293        u32 type = CRYPTO_ALG_TYPE_HASH;
 294        u32 mask = CRYPTO_ALG_TYPE_HASH_MASK;
 295
 296        return __crypto_hash_cast(crypto_spawn_tfm(spawn, type, mask));
 297}
 298
 299static inline void *crypto_hash_ctx(struct crypto_hash *tfm)
 300{
 301        return crypto_tfm_ctx(&tfm->base);
 302}
 303
 304static inline void *crypto_hash_ctx_aligned(struct crypto_hash *tfm)
 305{
 306        return crypto_tfm_ctx_aligned(&tfm->base);
 307}
 308
 309static inline void blkcipher_walk_init(struct blkcipher_walk *walk,
 310                                       struct scatterlist *dst,
 311                                       struct scatterlist *src,
 312                                       unsigned int nbytes)
 313{
 314        walk->in.sg = src;
 315        walk->out.sg = dst;
 316        walk->total = nbytes;
 317}
 318
 319static inline void ablkcipher_walk_init(struct ablkcipher_walk *walk,
 320                                        struct scatterlist *dst,
 321                                        struct scatterlist *src,
 322                                        unsigned int nbytes)
 323{
 324        walk->in.sg = src;
 325        walk->out.sg = dst;
 326        walk->total = nbytes;
 327        INIT_LIST_HEAD(&walk->buffers);
 328}
 329
 330static inline void ablkcipher_walk_complete(struct ablkcipher_walk *walk)
 331{
 332        if (unlikely(!list_empty(&walk->buffers)))
 333                __ablkcipher_walk_complete(walk);
 334}
 335
 336static inline struct crypto_async_request *crypto_get_backlog(
 337        struct crypto_queue *queue)
 338{
 339        return queue->backlog == &queue->list ? NULL :
 340               container_of(queue->backlog, struct crypto_async_request, list);
 341}
 342
 343static inline int ablkcipher_enqueue_request(struct crypto_queue *queue,
 344                                             struct ablkcipher_request *request)
 345{
 346        return crypto_enqueue_request(queue, &request->base);
 347}
 348
 349static inline struct ablkcipher_request *ablkcipher_dequeue_request(
 350        struct crypto_queue *queue)
 351{
 352        return ablkcipher_request_cast(crypto_dequeue_request(queue));
 353}
 354
 355static inline void *ablkcipher_request_ctx(struct ablkcipher_request *req)
 356{
 357        return req->__ctx;
 358}
 359
 360static inline int ablkcipher_tfm_in_queue(struct crypto_queue *queue,
 361                                          struct crypto_ablkcipher *tfm)
 362{
 363        return crypto_tfm_in_queue(queue, crypto_ablkcipher_tfm(tfm));
 364}
 365
 366static inline void *aead_request_ctx(struct aead_request *req)
 367{
 368        return req->__ctx;
 369}
 370
 371static inline void aead_request_complete(struct aead_request *req, int err)
 372{
 373        req->base.complete(&req->base, err);
 374}
 375
 376static inline u32 aead_request_flags(struct aead_request *req)
 377{
 378        return req->base.flags;
 379}
 380
 381static inline struct crypto_alg *crypto_get_attr_alg(struct rtattr **tb,
 382                                                     u32 type, u32 mask)
 383{
 384        return crypto_attr_alg(tb[1], type, mask);
 385}
 386
 387/*
 388 * Returns CRYPTO_ALG_ASYNC if type/mask requires the use of sync algorithms.
 389 * Otherwise returns zero.
 390 */
 391static inline int crypto_requires_sync(u32 type, u32 mask)
 392{
 393        return (type ^ CRYPTO_ALG_ASYNC) & mask & CRYPTO_ALG_ASYNC;
 394}
 395
 396noinline unsigned long __crypto_memneq(const void *a, const void *b, size_t size);
 397
 398/**
 399 * crypto_memneq - Compare two areas of memory without leaking
 400 *                 timing information.
 401 *
 402 * @a: One area of memory
 403 * @b: Another area of memory
 404 * @size: The size of the area.
 405 *
 406 * Returns 0 when data is equal, 1 otherwise.
 407 */
 408static inline int crypto_memneq(const void *a, const void *b, size_t size)
 409{
 410        return __crypto_memneq(a, b, size) != 0UL ? 1 : 0;
 411}
 412
 413static inline void crypto_yield(u32 flags)
 414{
 415        if (flags & CRYPTO_TFM_REQ_MAY_SLEEP)
 416                cond_resched();
 417}
 418
 419#endif  /* _CRYPTO_ALGAPI_H */
 420