linux/include/linux/crypto.h
<<
>>
Prefs
   1/* SPDX-License-Identifier: GPL-2.0-or-later */
   2/*
   3 * Scatterlist Cryptographic API.
   4 *
   5 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
   6 * Copyright (c) 2002 David S. Miller (davem@redhat.com)
   7 * Copyright (c) 2005 Herbert Xu <herbert@gondor.apana.org.au>
   8 *
   9 * Portions derived from Cryptoapi, by Alexander Kjeldaas <astor@fast.no>
  10 * and Nettle, by Niels Möller.
  11 */
  12#ifndef _LINUX_CRYPTO_H
  13#define _LINUX_CRYPTO_H
  14
  15#include <linux/atomic.h>
  16#include <linux/kernel.h>
  17#include <linux/list.h>
  18#include <linux/bug.h>
  19#include <linux/refcount.h>
  20#include <linux/slab.h>
  21#include <linux/completion.h>
  22
  23/*
  24 * Autoloaded crypto modules should only use a prefixed name to avoid allowing
  25 * arbitrary modules to be loaded. Loading from userspace may still need the
  26 * unprefixed names, so retains those aliases as well.
  27 * This uses __MODULE_INFO directly instead of MODULE_ALIAS because pre-4.3
  28 * gcc (e.g. avr32 toolchain) uses __LINE__ for uniqueness, and this macro
  29 * expands twice on the same line. Instead, use a separate base name for the
  30 * alias.
  31 */
  32#define MODULE_ALIAS_CRYPTO(name)       \
  33                __MODULE_INFO(alias, alias_userspace, name);    \
  34                __MODULE_INFO(alias, alias_crypto, "crypto-" name)
  35
  36/*
  37 * Algorithm masks and types.
  38 */
  39#define CRYPTO_ALG_TYPE_MASK            0x0000000f
  40#define CRYPTO_ALG_TYPE_CIPHER          0x00000001
  41#define CRYPTO_ALG_TYPE_COMPRESS        0x00000002
  42#define CRYPTO_ALG_TYPE_AEAD            0x00000003
  43#define CRYPTO_ALG_TYPE_SKCIPHER        0x00000005
  44#define CRYPTO_ALG_TYPE_KPP             0x00000008
  45#define CRYPTO_ALG_TYPE_ACOMPRESS       0x0000000a
  46#define CRYPTO_ALG_TYPE_SCOMPRESS       0x0000000b
  47#define CRYPTO_ALG_TYPE_RNG             0x0000000c
  48#define CRYPTO_ALG_TYPE_AKCIPHER        0x0000000d
  49#define CRYPTO_ALG_TYPE_HASH            0x0000000e
  50#define CRYPTO_ALG_TYPE_SHASH           0x0000000e
  51#define CRYPTO_ALG_TYPE_AHASH           0x0000000f
  52
  53#define CRYPTO_ALG_TYPE_HASH_MASK       0x0000000e
  54#define CRYPTO_ALG_TYPE_AHASH_MASK      0x0000000e
  55#define CRYPTO_ALG_TYPE_ACOMPRESS_MASK  0x0000000e
  56
  57#define CRYPTO_ALG_LARVAL               0x00000010
  58#define CRYPTO_ALG_DEAD                 0x00000020
  59#define CRYPTO_ALG_DYING                0x00000040
  60#define CRYPTO_ALG_ASYNC                0x00000080
  61
  62/*
  63 * Set if the algorithm (or an algorithm which it uses) requires another
  64 * algorithm of the same type to handle corner cases.
  65 */
  66#define CRYPTO_ALG_NEED_FALLBACK        0x00000100
  67
  68/*
  69 * Set if the algorithm has passed automated run-time testing.  Note that
  70 * if there is no run-time testing for a given algorithm it is considered
  71 * to have passed.
  72 */
  73
  74#define CRYPTO_ALG_TESTED               0x00000400
  75
  76/*
  77 * Set if the algorithm is an instance that is built from templates.
  78 */
  79#define CRYPTO_ALG_INSTANCE             0x00000800
  80
  81/* Set this bit if the algorithm provided is hardware accelerated but
  82 * not available to userspace via instruction set or so.
  83 */
  84#define CRYPTO_ALG_KERN_DRIVER_ONLY     0x00001000
  85
  86/*
  87 * Mark a cipher as a service implementation only usable by another
  88 * cipher and never by a normal user of the kernel crypto API
  89 */
  90#define CRYPTO_ALG_INTERNAL             0x00002000
  91
  92/*
  93 * Set if the algorithm has a ->setkey() method but can be used without
  94 * calling it first, i.e. there is a default key.
  95 */
  96#define CRYPTO_ALG_OPTIONAL_KEY         0x00004000
  97
  98/*
  99 * Don't trigger module loading
 100 */
 101#define CRYPTO_NOLOAD                   0x00008000
 102
 103/*
 104 * The algorithm may allocate memory during request processing, i.e. during
 105 * encryption, decryption, or hashing.  Users can request an algorithm with this
 106 * flag unset if they can't handle memory allocation failures.
 107 *
 108 * This flag is currently only implemented for algorithms of type "skcipher",
 109 * "aead", "ahash", "shash", and "cipher".  Algorithms of other types might not
 110 * have this flag set even if they allocate memory.
 111 *
 112 * In some edge cases, algorithms can allocate memory regardless of this flag.
 113 * To avoid these cases, users must obey the following usage constraints:
 114 *    skcipher:
 115 *      - The IV buffer and all scatterlist elements must be aligned to the
 116 *        algorithm's alignmask.
 117 *      - If the data were to be divided into chunks of size
 118 *        crypto_skcipher_walksize() (with any remainder going at the end), no
 119 *        chunk can cross a page boundary or a scatterlist element boundary.
 120 *    aead:
 121 *      - The IV buffer and all scatterlist elements must be aligned to the
 122 *        algorithm's alignmask.
 123 *      - The first scatterlist element must contain all the associated data,
 124 *        and its pages must be !PageHighMem.
 125 *      - If the plaintext/ciphertext were to be divided into chunks of size
 126 *        crypto_aead_walksize() (with the remainder going at the end), no chunk
 127 *        can cross a page boundary or a scatterlist element boundary.
 128 *    ahash:
 129 *      - The result buffer must be aligned to the algorithm's alignmask.
 130 *      - crypto_ahash_finup() must not be used unless the algorithm implements
 131 *        ->finup() natively.
 132 */
 133#define CRYPTO_ALG_ALLOCATES_MEMORY     0x00010000
 134
 135/*
 136 * Transform masks and values (for crt_flags).
 137 */
 138#define CRYPTO_TFM_NEED_KEY             0x00000001
 139
 140#define CRYPTO_TFM_REQ_MASK             0x000fff00
 141#define CRYPTO_TFM_REQ_FORBID_WEAK_KEYS 0x00000100
 142#define CRYPTO_TFM_REQ_MAY_SLEEP        0x00000200
 143#define CRYPTO_TFM_REQ_MAY_BACKLOG      0x00000400
 144
 145/*
 146 * Miscellaneous stuff.
 147 */
 148#define CRYPTO_MAX_ALG_NAME             128
 149
 150/*
 151 * The macro CRYPTO_MINALIGN_ATTR (along with the void * type in the actual
 152 * declaration) is used to ensure that the crypto_tfm context structure is
 153 * aligned correctly for the given architecture so that there are no alignment
 154 * faults for C data types.  On architectures that support non-cache coherent
 155 * DMA, such as ARM or arm64, it also takes into account the minimal alignment
 156 * that is required to ensure that the context struct member does not share any
 157 * cachelines with the rest of the struct. This is needed to ensure that cache
 158 * maintenance for non-coherent DMA (cache invalidation in particular) does not
 159 * affect data that may be accessed by the CPU concurrently.
 160 */
 161#define CRYPTO_MINALIGN ARCH_KMALLOC_MINALIGN
 162
 163#define CRYPTO_MINALIGN_ATTR __attribute__ ((__aligned__(CRYPTO_MINALIGN)))
 164
 165struct scatterlist;
 166struct crypto_async_request;
 167struct crypto_tfm;
 168struct crypto_type;
 169
 170typedef void (*crypto_completion_t)(struct crypto_async_request *req, int err);
 171
 172/**
 173 * DOC: Block Cipher Context Data Structures
 174 *
 175 * These data structures define the operating context for each block cipher
 176 * type.
 177 */
 178
 179struct crypto_async_request {
 180        struct list_head list;
 181        crypto_completion_t complete;
 182        void *data;
 183        struct crypto_tfm *tfm;
 184
 185        u32 flags;
 186};
 187
 188/**
 189 * DOC: Block Cipher Algorithm Definitions
 190 *
 191 * These data structures define modular crypto algorithm implementations,
 192 * managed via crypto_register_alg() and crypto_unregister_alg().
 193 */
 194
 195/**
 196 * struct cipher_alg - single-block symmetric ciphers definition
 197 * @cia_min_keysize: Minimum key size supported by the transformation. This is
 198 *                   the smallest key length supported by this transformation
 199 *                   algorithm. This must be set to one of the pre-defined
 200 *                   values as this is not hardware specific. Possible values
 201 *                   for this field can be found via git grep "_MIN_KEY_SIZE"
 202 *                   include/crypto/
 203 * @cia_max_keysize: Maximum key size supported by the transformation. This is
 204 *                  the largest key length supported by this transformation
 205 *                  algorithm. This must be set to one of the pre-defined values
 206 *                  as this is not hardware specific. Possible values for this
 207 *                  field can be found via git grep "_MAX_KEY_SIZE"
 208 *                  include/crypto/
 209 * @cia_setkey: Set key for the transformation. This function is used to either
 210 *              program a supplied key into the hardware or store the key in the
 211 *              transformation context for programming it later. Note that this
 212 *              function does modify the transformation context. This function
 213 *              can be called multiple times during the existence of the
 214 *              transformation object, so one must make sure the key is properly
 215 *              reprogrammed into the hardware. This function is also
 216 *              responsible for checking the key length for validity.
 217 * @cia_encrypt: Encrypt a single block. This function is used to encrypt a
 218 *               single block of data, which must be @cra_blocksize big. This
 219 *               always operates on a full @cra_blocksize and it is not possible
 220 *               to encrypt a block of smaller size. The supplied buffers must
 221 *               therefore also be at least of @cra_blocksize size. Both the
 222 *               input and output buffers are always aligned to @cra_alignmask.
 223 *               In case either of the input or output buffer supplied by user
 224 *               of the crypto API is not aligned to @cra_alignmask, the crypto
 225 *               API will re-align the buffers. The re-alignment means that a
 226 *               new buffer will be allocated, the data will be copied into the
 227 *               new buffer, then the processing will happen on the new buffer,
 228 *               then the data will be copied back into the original buffer and
 229 *               finally the new buffer will be freed. In case a software
 230 *               fallback was put in place in the @cra_init call, this function
 231 *               might need to use the fallback if the algorithm doesn't support
 232 *               all of the key sizes. In case the key was stored in
 233 *               transformation context, the key might need to be re-programmed
 234 *               into the hardware in this function. This function shall not
 235 *               modify the transformation context, as this function may be
 236 *               called in parallel with the same transformation object.
 237 * @cia_decrypt: Decrypt a single block. This is a reverse counterpart to
 238 *               @cia_encrypt, and the conditions are exactly the same.
 239 *
 240 * All fields are mandatory and must be filled.
 241 */
 242struct cipher_alg {
 243        unsigned int cia_min_keysize;
 244        unsigned int cia_max_keysize;
 245        int (*cia_setkey)(struct crypto_tfm *tfm, const u8 *key,
 246                          unsigned int keylen);
 247        void (*cia_encrypt)(struct crypto_tfm *tfm, u8 *dst, const u8 *src);
 248        void (*cia_decrypt)(struct crypto_tfm *tfm, u8 *dst, const u8 *src);
 249};
 250
 251/**
 252 * struct compress_alg - compression/decompression algorithm
 253 * @coa_compress: Compress a buffer of specified length, storing the resulting
 254 *                data in the specified buffer. Return the length of the
 255 *                compressed data in dlen.
 256 * @coa_decompress: Decompress the source buffer, storing the uncompressed
 257 *                  data in the specified buffer. The length of the data is
 258 *                  returned in dlen.
 259 *
 260 * All fields are mandatory.
 261 */
 262struct compress_alg {
 263        int (*coa_compress)(struct crypto_tfm *tfm, const u8 *src,
 264                            unsigned int slen, u8 *dst, unsigned int *dlen);
 265        int (*coa_decompress)(struct crypto_tfm *tfm, const u8 *src,
 266                              unsigned int slen, u8 *dst, unsigned int *dlen);
 267};
 268
 269#ifdef CONFIG_CRYPTO_STATS
 270/*
 271 * struct crypto_istat_aead - statistics for AEAD algorithm
 272 * @encrypt_cnt:        number of encrypt requests
 273 * @encrypt_tlen:       total data size handled by encrypt requests
 274 * @decrypt_cnt:        number of decrypt requests
 275 * @decrypt_tlen:       total data size handled by decrypt requests
 276 * @err_cnt:            number of error for AEAD requests
 277 */
 278struct crypto_istat_aead {
 279        atomic64_t encrypt_cnt;
 280        atomic64_t encrypt_tlen;
 281        atomic64_t decrypt_cnt;
 282        atomic64_t decrypt_tlen;
 283        atomic64_t err_cnt;
 284};
 285
 286/*
 287 * struct crypto_istat_akcipher - statistics for akcipher algorithm
 288 * @encrypt_cnt:        number of encrypt requests
 289 * @encrypt_tlen:       total data size handled by encrypt requests
 290 * @decrypt_cnt:        number of decrypt requests
 291 * @decrypt_tlen:       total data size handled by decrypt requests
 292 * @verify_cnt:         number of verify operation
 293 * @sign_cnt:           number of sign requests
 294 * @err_cnt:            number of error for akcipher requests
 295 */
 296struct crypto_istat_akcipher {
 297        atomic64_t encrypt_cnt;
 298        atomic64_t encrypt_tlen;
 299        atomic64_t decrypt_cnt;
 300        atomic64_t decrypt_tlen;
 301        atomic64_t verify_cnt;
 302        atomic64_t sign_cnt;
 303        atomic64_t err_cnt;
 304};
 305
 306/*
 307 * struct crypto_istat_cipher - statistics for cipher algorithm
 308 * @encrypt_cnt:        number of encrypt requests
 309 * @encrypt_tlen:       total data size handled by encrypt requests
 310 * @decrypt_cnt:        number of decrypt requests
 311 * @decrypt_tlen:       total data size handled by decrypt requests
 312 * @err_cnt:            number of error for cipher requests
 313 */
 314struct crypto_istat_cipher {
 315        atomic64_t encrypt_cnt;
 316        atomic64_t encrypt_tlen;
 317        atomic64_t decrypt_cnt;
 318        atomic64_t decrypt_tlen;
 319        atomic64_t err_cnt;
 320};
 321
 322/*
 323 * struct crypto_istat_compress - statistics for compress algorithm
 324 * @compress_cnt:       number of compress requests
 325 * @compress_tlen:      total data size handled by compress requests
 326 * @decompress_cnt:     number of decompress requests
 327 * @decompress_tlen:    total data size handled by decompress requests
 328 * @err_cnt:            number of error for compress requests
 329 */
 330struct crypto_istat_compress {
 331        atomic64_t compress_cnt;
 332        atomic64_t compress_tlen;
 333        atomic64_t decompress_cnt;
 334        atomic64_t decompress_tlen;
 335        atomic64_t err_cnt;
 336};
 337
 338/*
 339 * struct crypto_istat_hash - statistics for has algorithm
 340 * @hash_cnt:           number of hash requests
 341 * @hash_tlen:          total data size hashed
 342 * @err_cnt:            number of error for hash requests
 343 */
 344struct crypto_istat_hash {
 345        atomic64_t hash_cnt;
 346        atomic64_t hash_tlen;
 347        atomic64_t err_cnt;
 348};
 349
 350/*
 351 * struct crypto_istat_kpp - statistics for KPP algorithm
 352 * @setsecret_cnt:              number of setsecrey operation
 353 * @generate_public_key_cnt:    number of generate_public_key operation
 354 * @compute_shared_secret_cnt:  number of compute_shared_secret operation
 355 * @err_cnt:                    number of error for KPP requests
 356 */
 357struct crypto_istat_kpp {
 358        atomic64_t setsecret_cnt;
 359        atomic64_t generate_public_key_cnt;
 360        atomic64_t compute_shared_secret_cnt;
 361        atomic64_t err_cnt;
 362};
 363
 364/*
 365 * struct crypto_istat_rng: statistics for RNG algorithm
 366 * @generate_cnt:       number of RNG generate requests
 367 * @generate_tlen:      total data size of generated data by the RNG
 368 * @seed_cnt:           number of times the RNG was seeded
 369 * @err_cnt:            number of error for RNG requests
 370 */
 371struct crypto_istat_rng {
 372        atomic64_t generate_cnt;
 373        atomic64_t generate_tlen;
 374        atomic64_t seed_cnt;
 375        atomic64_t err_cnt;
 376};
 377#endif /* CONFIG_CRYPTO_STATS */
 378
 379#define cra_cipher      cra_u.cipher
 380#define cra_compress    cra_u.compress
 381
 382/**
 383 * struct crypto_alg - definition of a cryptograpic cipher algorithm
 384 * @cra_flags: Flags describing this transformation. See include/linux/crypto.h
 385 *             CRYPTO_ALG_* flags for the flags which go in here. Those are
 386 *             used for fine-tuning the description of the transformation
 387 *             algorithm.
 388 * @cra_blocksize: Minimum block size of this transformation. The size in bytes
 389 *                 of the smallest possible unit which can be transformed with
 390 *                 this algorithm. The users must respect this value.
 391 *                 In case of HASH transformation, it is possible for a smaller
 392 *                 block than @cra_blocksize to be passed to the crypto API for
 393 *                 transformation, in case of any other transformation type, an
 394 *                 error will be returned upon any attempt to transform smaller
 395 *                 than @cra_blocksize chunks.
 396 * @cra_ctxsize: Size of the operational context of the transformation. This
 397 *               value informs the kernel crypto API about the memory size
 398 *               needed to be allocated for the transformation context.
 399 * @cra_alignmask: Alignment mask for the input and output data buffer. The data
 400 *                 buffer containing the input data for the algorithm must be
 401 *                 aligned to this alignment mask. The data buffer for the
 402 *                 output data must be aligned to this alignment mask. Note that
 403 *                 the Crypto API will do the re-alignment in software, but
 404 *                 only under special conditions and there is a performance hit.
 405 *                 The re-alignment happens at these occasions for different
 406 *                 @cra_u types: cipher -- For both input data and output data
 407 *                 buffer; ahash -- For output hash destination buf; shash --
 408 *                 For output hash destination buf.
 409 *                 This is needed on hardware which is flawed by design and
 410 *                 cannot pick data from arbitrary addresses.
 411 * @cra_priority: Priority of this transformation implementation. In case
 412 *                multiple transformations with same @cra_name are available to
 413 *                the Crypto API, the kernel will use the one with highest
 414 *                @cra_priority.
 415 * @cra_name: Generic name (usable by multiple implementations) of the
 416 *            transformation algorithm. This is the name of the transformation
 417 *            itself. This field is used by the kernel when looking up the
 418 *            providers of particular transformation.
 419 * @cra_driver_name: Unique name of the transformation provider. This is the
 420 *                   name of the provider of the transformation. This can be any
 421 *                   arbitrary value, but in the usual case, this contains the
 422 *                   name of the chip or provider and the name of the
 423 *                   transformation algorithm.
 424 * @cra_type: Type of the cryptographic transformation. This is a pointer to
 425 *            struct crypto_type, which implements callbacks common for all
 426 *            transformation types. There are multiple options, such as
 427 *            &crypto_skcipher_type, &crypto_ahash_type, &crypto_rng_type.
 428 *            This field might be empty. In that case, there are no common
 429 *            callbacks. This is the case for: cipher, compress, shash.
 430 * @cra_u: Callbacks implementing the transformation. This is a union of
 431 *         multiple structures. Depending on the type of transformation selected
 432 *         by @cra_type and @cra_flags above, the associated structure must be
 433 *         filled with callbacks. This field might be empty. This is the case
 434 *         for ahash, shash.
 435 * @cra_init: Initialize the cryptographic transformation object. This function
 436 *            is used to initialize the cryptographic transformation object.
 437 *            This function is called only once at the instantiation time, right
 438 *            after the transformation context was allocated. In case the
 439 *            cryptographic hardware has some special requirements which need to
 440 *            be handled by software, this function shall check for the precise
 441 *            requirement of the transformation and put any software fallbacks
 442 *            in place.
 443 * @cra_exit: Deinitialize the cryptographic transformation object. This is a
 444 *            counterpart to @cra_init, used to remove various changes set in
 445 *            @cra_init.
 446 * @cra_u.cipher: Union member which contains a single-block symmetric cipher
 447 *                definition. See @struct @cipher_alg.
 448 * @cra_u.compress: Union member which contains a (de)compression algorithm.
 449 *                  See @struct @compress_alg.
 450 * @cra_module: Owner of this transformation implementation. Set to THIS_MODULE
 451 * @cra_list: internally used
 452 * @cra_users: internally used
 453 * @cra_refcnt: internally used
 454 * @cra_destroy: internally used
 455 *
 456 * @stats: union of all possible crypto_istat_xxx structures
 457 * @stats.aead:         statistics for AEAD algorithm
 458 * @stats.akcipher:     statistics for akcipher algorithm
 459 * @stats.cipher:       statistics for cipher algorithm
 460 * @stats.compress:     statistics for compress algorithm
 461 * @stats.hash:         statistics for hash algorithm
 462 * @stats.rng:          statistics for rng algorithm
 463 * @stats.kpp:          statistics for KPP algorithm
 464 *
 465 * The struct crypto_alg describes a generic Crypto API algorithm and is common
 466 * for all of the transformations. Any variable not documented here shall not
 467 * be used by a cipher implementation as it is internal to the Crypto API.
 468 */
 469struct crypto_alg {
 470        struct list_head cra_list;
 471        struct list_head cra_users;
 472
 473        u32 cra_flags;
 474        unsigned int cra_blocksize;
 475        unsigned int cra_ctxsize;
 476        unsigned int cra_alignmask;
 477
 478        int cra_priority;
 479        refcount_t cra_refcnt;
 480
 481        char cra_name[CRYPTO_MAX_ALG_NAME];
 482        char cra_driver_name[CRYPTO_MAX_ALG_NAME];
 483
 484        const struct crypto_type *cra_type;
 485
 486        union {
 487                struct cipher_alg cipher;
 488                struct compress_alg compress;
 489        } cra_u;
 490
 491        int (*cra_init)(struct crypto_tfm *tfm);
 492        void (*cra_exit)(struct crypto_tfm *tfm);
 493        void (*cra_destroy)(struct crypto_alg *alg);
 494        
 495        struct module *cra_module;
 496
 497#ifdef CONFIG_CRYPTO_STATS
 498        union {
 499                struct crypto_istat_aead aead;
 500                struct crypto_istat_akcipher akcipher;
 501                struct crypto_istat_cipher cipher;
 502                struct crypto_istat_compress compress;
 503                struct crypto_istat_hash hash;
 504                struct crypto_istat_rng rng;
 505                struct crypto_istat_kpp kpp;
 506        } stats;
 507#endif /* CONFIG_CRYPTO_STATS */
 508
 509} CRYPTO_MINALIGN_ATTR;
 510
 511#ifdef CONFIG_CRYPTO_STATS
 512void crypto_stats_init(struct crypto_alg *alg);
 513void crypto_stats_get(struct crypto_alg *alg);
 514void crypto_stats_aead_encrypt(unsigned int cryptlen, struct crypto_alg *alg, int ret);
 515void crypto_stats_aead_decrypt(unsigned int cryptlen, struct crypto_alg *alg, int ret);
 516void crypto_stats_ahash_update(unsigned int nbytes, int ret, struct crypto_alg *alg);
 517void crypto_stats_ahash_final(unsigned int nbytes, int ret, struct crypto_alg *alg);
 518void crypto_stats_akcipher_encrypt(unsigned int src_len, int ret, struct crypto_alg *alg);
 519void crypto_stats_akcipher_decrypt(unsigned int src_len, int ret, struct crypto_alg *alg);
 520void crypto_stats_akcipher_sign(int ret, struct crypto_alg *alg);
 521void crypto_stats_akcipher_verify(int ret, struct crypto_alg *alg);
 522void crypto_stats_compress(unsigned int slen, int ret, struct crypto_alg *alg);
 523void crypto_stats_decompress(unsigned int slen, int ret, struct crypto_alg *alg);
 524void crypto_stats_kpp_set_secret(struct crypto_alg *alg, int ret);
 525void crypto_stats_kpp_generate_public_key(struct crypto_alg *alg, int ret);
 526void crypto_stats_kpp_compute_shared_secret(struct crypto_alg *alg, int ret);
 527void crypto_stats_rng_seed(struct crypto_alg *alg, int ret);
 528void crypto_stats_rng_generate(struct crypto_alg *alg, unsigned int dlen, int ret);
 529void crypto_stats_skcipher_encrypt(unsigned int cryptlen, int ret, struct crypto_alg *alg);
 530void crypto_stats_skcipher_decrypt(unsigned int cryptlen, int ret, struct crypto_alg *alg);
 531#else
 532static inline void crypto_stats_init(struct crypto_alg *alg)
 533{}
 534static inline void crypto_stats_get(struct crypto_alg *alg)
 535{}
 536static inline void crypto_stats_aead_encrypt(unsigned int cryptlen, struct crypto_alg *alg, int ret)
 537{}
 538static inline void crypto_stats_aead_decrypt(unsigned int cryptlen, struct crypto_alg *alg, int ret)
 539{}
 540static inline void crypto_stats_ahash_update(unsigned int nbytes, int ret, struct crypto_alg *alg)
 541{}
 542static inline void crypto_stats_ahash_final(unsigned int nbytes, int ret, struct crypto_alg *alg)
 543{}
 544static inline void crypto_stats_akcipher_encrypt(unsigned int src_len, int ret, struct crypto_alg *alg)
 545{}
 546static inline void crypto_stats_akcipher_decrypt(unsigned int src_len, int ret, struct crypto_alg *alg)
 547{}
 548static inline void crypto_stats_akcipher_sign(int ret, struct crypto_alg *alg)
 549{}
 550static inline void crypto_stats_akcipher_verify(int ret, struct crypto_alg *alg)
 551{}
 552static inline void crypto_stats_compress(unsigned int slen, int ret, struct crypto_alg *alg)
 553{}
 554static inline void crypto_stats_decompress(unsigned int slen, int ret, struct crypto_alg *alg)
 555{}
 556static inline void crypto_stats_kpp_set_secret(struct crypto_alg *alg, int ret)
 557{}
 558static inline void crypto_stats_kpp_generate_public_key(struct crypto_alg *alg, int ret)
 559{}
 560static inline void crypto_stats_kpp_compute_shared_secret(struct crypto_alg *alg, int ret)
 561{}
 562static inline void crypto_stats_rng_seed(struct crypto_alg *alg, int ret)
 563{}
 564static inline void crypto_stats_rng_generate(struct crypto_alg *alg, unsigned int dlen, int ret)
 565{}
 566static inline void crypto_stats_skcipher_encrypt(unsigned int cryptlen, int ret, struct crypto_alg *alg)
 567{}
 568static inline void crypto_stats_skcipher_decrypt(unsigned int cryptlen, int ret, struct crypto_alg *alg)
 569{}
 570#endif
 571/*
 572 * A helper struct for waiting for completion of async crypto ops
 573 */
 574struct crypto_wait {
 575        struct completion completion;
 576        int err;
 577};
 578
 579/*
 580 * Macro for declaring a crypto op async wait object on stack
 581 */
 582#define DECLARE_CRYPTO_WAIT(_wait) \
 583        struct crypto_wait _wait = { \
 584                COMPLETION_INITIALIZER_ONSTACK((_wait).completion), 0 }
 585
 586/*
 587 * Async ops completion helper functioons
 588 */
 589void crypto_req_done(struct crypto_async_request *req, int err);
 590
 591static inline int crypto_wait_req(int err, struct crypto_wait *wait)
 592{
 593        switch (err) {
 594        case -EINPROGRESS:
 595        case -EBUSY:
 596                wait_for_completion(&wait->completion);
 597                reinit_completion(&wait->completion);
 598                err = wait->err;
 599                break;
 600        }
 601
 602        return err;
 603}
 604
 605static inline void crypto_init_wait(struct crypto_wait *wait)
 606{
 607        init_completion(&wait->completion);
 608}
 609
 610/*
 611 * Algorithm registration interface.
 612 */
 613int crypto_register_alg(struct crypto_alg *alg);
 614void crypto_unregister_alg(struct crypto_alg *alg);
 615int crypto_register_algs(struct crypto_alg *algs, int count);
 616void crypto_unregister_algs(struct crypto_alg *algs, int count);
 617
 618/*
 619 * Algorithm query interface.
 620 */
 621int crypto_has_alg(const char *name, u32 type, u32 mask);
 622
 623/*
 624 * Transforms: user-instantiated objects which encapsulate algorithms
 625 * and core processing logic.  Managed via crypto_alloc_*() and
 626 * crypto_free_*(), as well as the various helpers below.
 627 */
 628
 629struct crypto_tfm {
 630
 631        u32 crt_flags;
 632
 633        int node;
 634        
 635        void (*exit)(struct crypto_tfm *tfm);
 636        
 637        struct crypto_alg *__crt_alg;
 638
 639        void *__crt_ctx[] CRYPTO_MINALIGN_ATTR;
 640};
 641
 642struct crypto_comp {
 643        struct crypto_tfm base;
 644};
 645
 646enum {
 647        CRYPTOA_UNSPEC,
 648        CRYPTOA_ALG,
 649        CRYPTOA_TYPE,
 650        CRYPTOA_U32,
 651        __CRYPTOA_MAX,
 652};
 653
 654#define CRYPTOA_MAX (__CRYPTOA_MAX - 1)
 655
 656/* Maximum number of (rtattr) parameters for each template. */
 657#define CRYPTO_MAX_ATTRS 32
 658
 659struct crypto_attr_alg {
 660        char name[CRYPTO_MAX_ALG_NAME];
 661};
 662
 663struct crypto_attr_type {
 664        u32 type;
 665        u32 mask;
 666};
 667
 668struct crypto_attr_u32 {
 669        u32 num;
 670};
 671
 672/* 
 673 * Transform user interface.
 674 */
 675 
 676struct crypto_tfm *crypto_alloc_base(const char *alg_name, u32 type, u32 mask);
 677void crypto_destroy_tfm(void *mem, struct crypto_tfm *tfm);
 678
 679static inline void crypto_free_tfm(struct crypto_tfm *tfm)
 680{
 681        return crypto_destroy_tfm(tfm, tfm);
 682}
 683
 684int alg_test(const char *driver, const char *alg, u32 type, u32 mask);
 685
 686/*
 687 * Transform helpers which query the underlying algorithm.
 688 */
 689static inline const char *crypto_tfm_alg_name(struct crypto_tfm *tfm)
 690{
 691        return tfm->__crt_alg->cra_name;
 692}
 693
 694static inline const char *crypto_tfm_alg_driver_name(struct crypto_tfm *tfm)
 695{
 696        return tfm->__crt_alg->cra_driver_name;
 697}
 698
 699static inline int crypto_tfm_alg_priority(struct crypto_tfm *tfm)
 700{
 701        return tfm->__crt_alg->cra_priority;
 702}
 703
 704static inline u32 crypto_tfm_alg_type(struct crypto_tfm *tfm)
 705{
 706        return tfm->__crt_alg->cra_flags & CRYPTO_ALG_TYPE_MASK;
 707}
 708
 709static inline unsigned int crypto_tfm_alg_blocksize(struct crypto_tfm *tfm)
 710{
 711        return tfm->__crt_alg->cra_blocksize;
 712}
 713
 714static inline unsigned int crypto_tfm_alg_alignmask(struct crypto_tfm *tfm)
 715{
 716        return tfm->__crt_alg->cra_alignmask;
 717}
 718
 719static inline u32 crypto_tfm_get_flags(struct crypto_tfm *tfm)
 720{
 721        return tfm->crt_flags;
 722}
 723
 724static inline void crypto_tfm_set_flags(struct crypto_tfm *tfm, u32 flags)
 725{
 726        tfm->crt_flags |= flags;
 727}
 728
 729static inline void crypto_tfm_clear_flags(struct crypto_tfm *tfm, u32 flags)
 730{
 731        tfm->crt_flags &= ~flags;
 732}
 733
 734static inline void *crypto_tfm_ctx(struct crypto_tfm *tfm)
 735{
 736        return tfm->__crt_ctx;
 737}
 738
 739static inline unsigned int crypto_tfm_ctx_alignment(void)
 740{
 741        struct crypto_tfm *tfm;
 742        return __alignof__(tfm->__crt_ctx);
 743}
 744
 745static inline struct crypto_comp *__crypto_comp_cast(struct crypto_tfm *tfm)
 746{
 747        return (struct crypto_comp *)tfm;
 748}
 749
 750static inline struct crypto_comp *crypto_alloc_comp(const char *alg_name,
 751                                                    u32 type, u32 mask)
 752{
 753        type &= ~CRYPTO_ALG_TYPE_MASK;
 754        type |= CRYPTO_ALG_TYPE_COMPRESS;
 755        mask |= CRYPTO_ALG_TYPE_MASK;
 756
 757        return __crypto_comp_cast(crypto_alloc_base(alg_name, type, mask));
 758}
 759
 760static inline struct crypto_tfm *crypto_comp_tfm(struct crypto_comp *tfm)
 761{
 762        return &tfm->base;
 763}
 764
 765static inline void crypto_free_comp(struct crypto_comp *tfm)
 766{
 767        crypto_free_tfm(crypto_comp_tfm(tfm));
 768}
 769
 770static inline int crypto_has_comp(const char *alg_name, u32 type, u32 mask)
 771{
 772        type &= ~CRYPTO_ALG_TYPE_MASK;
 773        type |= CRYPTO_ALG_TYPE_COMPRESS;
 774        mask |= CRYPTO_ALG_TYPE_MASK;
 775
 776        return crypto_has_alg(alg_name, type, mask);
 777}
 778
 779static inline const char *crypto_comp_name(struct crypto_comp *tfm)
 780{
 781        return crypto_tfm_alg_name(crypto_comp_tfm(tfm));
 782}
 783
 784int crypto_comp_compress(struct crypto_comp *tfm,
 785                         const u8 *src, unsigned int slen,
 786                         u8 *dst, unsigned int *dlen);
 787
 788int crypto_comp_decompress(struct crypto_comp *tfm,
 789                           const u8 *src, unsigned int slen,
 790                           u8 *dst, unsigned int *dlen);
 791
 792#endif  /* _LINUX_CRYPTO_H */
 793
 794