linux/crypto/api.c
<<
>>
Prefs
   1/*
   2 * Scatterlist Cryptographic API.
   3 *
   4 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
   5 * Copyright (c) 2002 David S. Miller (davem@redhat.com)
   6 * Copyright (c) 2005 Herbert Xu <herbert@gondor.apana.org.au>
   7 *
   8 * Portions derived from Cryptoapi, by Alexander Kjeldaas <astor@fast.no>
   9 * and Nettle, by Niels Möller.
  10 *
  11 * This program is free software; you can redistribute it and/or modify it
  12 * under the terms of the GNU General Public License as published by the Free
  13 * Software Foundation; either version 2 of the License, or (at your option)
  14 * any later version.
  15 *
  16 */
  17
  18#include <linux/err.h>
  19#include <linux/errno.h>
  20#include <linux/kernel.h>
  21#include <linux/kmod.h>
  22#include <linux/module.h>
  23#include <linux/param.h>
  24#include <linux/sched.h>
  25#include <linux/slab.h>
  26#include <linux/string.h>
  27#include "internal.h"
  28
  29LIST_HEAD(crypto_alg_list);
  30EXPORT_SYMBOL_GPL(crypto_alg_list);
  31DECLARE_RWSEM(crypto_alg_sem);
  32EXPORT_SYMBOL_GPL(crypto_alg_sem);
  33
  34BLOCKING_NOTIFIER_HEAD(crypto_chain);
  35EXPORT_SYMBOL_GPL(crypto_chain);
  36
  37struct crypto_alg *crypto_mod_get(struct crypto_alg *alg)
  38{
  39        return try_module_get(alg->cra_module) ? crypto_alg_get(alg) : NULL;
  40}
  41EXPORT_SYMBOL_GPL(crypto_mod_get);
  42
  43void crypto_mod_put(struct crypto_alg *alg)
  44{
  45        struct module *module = alg->cra_module;
  46
  47        crypto_alg_put(alg);
  48        module_put(module);
  49}
  50EXPORT_SYMBOL_GPL(crypto_mod_put);
  51
  52static inline int crypto_is_test_larval(struct crypto_larval *larval)
  53{
  54        return larval->alg.cra_driver_name[0];
  55}
  56
  57static struct crypto_alg *__crypto_alg_lookup(const char *name, u32 type,
  58                                              u32 mask)
  59{
  60        struct crypto_alg *q, *alg = NULL;
  61        int best = -2;
  62
  63        list_for_each_entry(q, &crypto_alg_list, cra_list) {
  64                int exact, fuzzy;
  65
  66                if (crypto_is_moribund(q))
  67                        continue;
  68
  69                if ((q->cra_flags ^ type) & mask)
  70                        continue;
  71
  72                if (crypto_is_larval(q) &&
  73                    !crypto_is_test_larval((struct crypto_larval *)q) &&
  74                    ((struct crypto_larval *)q)->mask != mask)
  75                        continue;
  76
  77                exact = !strcmp(q->cra_driver_name, name);
  78                fuzzy = !strcmp(q->cra_name, name);
  79                if (!exact && !(fuzzy && q->cra_priority > best))
  80                        continue;
  81
  82                if (unlikely(!crypto_mod_get(q)))
  83                        continue;
  84
  85                best = q->cra_priority;
  86                if (alg)
  87                        crypto_mod_put(alg);
  88                alg = q;
  89
  90                if (exact)
  91                        break;
  92        }
  93
  94        return alg;
  95}
  96
  97static void crypto_larval_destroy(struct crypto_alg *alg)
  98{
  99        struct crypto_larval *larval = (void *)alg;
 100
 101        BUG_ON(!crypto_is_larval(alg));
 102        if (larval->adult)
 103                crypto_mod_put(larval->adult);
 104        kfree(larval);
 105}
 106
 107struct crypto_larval *crypto_larval_alloc(const char *name, u32 type, u32 mask)
 108{
 109        struct crypto_larval *larval;
 110
 111        larval = kzalloc(sizeof(*larval), GFP_KERNEL);
 112        if (!larval)
 113                return ERR_PTR(-ENOMEM);
 114
 115        larval->mask = mask;
 116        larval->alg.cra_flags = CRYPTO_ALG_LARVAL | type;
 117        larval->alg.cra_priority = -1;
 118        larval->alg.cra_destroy = crypto_larval_destroy;
 119
 120        strlcpy(larval->alg.cra_name, name, CRYPTO_MAX_ALG_NAME);
 121        init_completion(&larval->completion);
 122
 123        return larval;
 124}
 125EXPORT_SYMBOL_GPL(crypto_larval_alloc);
 126
 127static struct crypto_alg *crypto_larval_add(const char *name, u32 type,
 128                                            u32 mask)
 129{
 130        struct crypto_alg *alg;
 131        struct crypto_larval *larval;
 132
 133        larval = crypto_larval_alloc(name, type, mask);
 134        if (IS_ERR(larval))
 135                return ERR_CAST(larval);
 136
 137        atomic_set(&larval->alg.cra_refcnt, 2);
 138
 139        down_write(&crypto_alg_sem);
 140        alg = __crypto_alg_lookup(name, type, mask);
 141        if (!alg) {
 142                alg = &larval->alg;
 143                list_add(&alg->cra_list, &crypto_alg_list);
 144        }
 145        up_write(&crypto_alg_sem);
 146
 147        if (alg != &larval->alg)
 148                kfree(larval);
 149
 150        return alg;
 151}
 152
 153void crypto_larval_kill(struct crypto_alg *alg)
 154{
 155        struct crypto_larval *larval = (void *)alg;
 156
 157        down_write(&crypto_alg_sem);
 158        list_del(&alg->cra_list);
 159        up_write(&crypto_alg_sem);
 160        complete_all(&larval->completion);
 161        crypto_alg_put(alg);
 162}
 163EXPORT_SYMBOL_GPL(crypto_larval_kill);
 164
 165static struct crypto_alg *crypto_larval_wait(struct crypto_alg *alg)
 166{
 167        struct crypto_larval *larval = (void *)alg;
 168        long timeout;
 169
 170        timeout = wait_for_completion_interruptible_timeout(
 171                &larval->completion, 60 * HZ);
 172
 173        alg = larval->adult;
 174        if (timeout < 0)
 175                alg = ERR_PTR(-EINTR);
 176        else if (!timeout)
 177                alg = ERR_PTR(-ETIMEDOUT);
 178        else if (!alg)
 179                alg = ERR_PTR(-ENOENT);
 180        else if (crypto_is_test_larval(larval) &&
 181                 !(alg->cra_flags & CRYPTO_ALG_TESTED))
 182                alg = ERR_PTR(-EAGAIN);
 183        else if (!crypto_mod_get(alg))
 184                alg = ERR_PTR(-EAGAIN);
 185        crypto_mod_put(&larval->alg);
 186
 187        return alg;
 188}
 189
 190struct crypto_alg *crypto_alg_lookup(const char *name, u32 type, u32 mask)
 191{
 192        struct crypto_alg *alg;
 193
 194        down_read(&crypto_alg_sem);
 195        alg = __crypto_alg_lookup(name, type, mask);
 196        up_read(&crypto_alg_sem);
 197
 198        return alg;
 199}
 200EXPORT_SYMBOL_GPL(crypto_alg_lookup);
 201
 202struct crypto_alg *crypto_larval_lookup(const char *name, u32 type, u32 mask)
 203{
 204        struct crypto_alg *alg;
 205
 206        if (!name)
 207                return ERR_PTR(-ENOENT);
 208
 209        mask &= ~(CRYPTO_ALG_LARVAL | CRYPTO_ALG_DEAD);
 210        type &= mask;
 211
 212        alg = crypto_alg_lookup(name, type, mask);
 213        if (!alg) {
 214                request_module("%s", name);
 215
 216                if (!((type ^ CRYPTO_ALG_NEED_FALLBACK) & mask &
 217                      CRYPTO_ALG_NEED_FALLBACK))
 218                        request_module("%s-all", name);
 219
 220                alg = crypto_alg_lookup(name, type, mask);
 221        }
 222
 223        if (alg)
 224                return crypto_is_larval(alg) ? crypto_larval_wait(alg) : alg;
 225
 226        return crypto_larval_add(name, type, mask);
 227}
 228EXPORT_SYMBOL_GPL(crypto_larval_lookup);
 229
 230int crypto_probing_notify(unsigned long val, void *v)
 231{
 232        int ok;
 233
 234        ok = blocking_notifier_call_chain(&crypto_chain, val, v);
 235        if (ok == NOTIFY_DONE) {
 236                request_module("cryptomgr");
 237                ok = blocking_notifier_call_chain(&crypto_chain, val, v);
 238        }
 239
 240        return ok;
 241}
 242EXPORT_SYMBOL_GPL(crypto_probing_notify);
 243
 244struct crypto_alg *crypto_alg_mod_lookup(const char *name, u32 type, u32 mask)
 245{
 246        struct crypto_alg *alg;
 247        struct crypto_alg *larval;
 248        int ok;
 249
 250        if (!((type | mask) & CRYPTO_ALG_TESTED)) {
 251                type |= CRYPTO_ALG_TESTED;
 252                mask |= CRYPTO_ALG_TESTED;
 253        }
 254
 255        larval = crypto_larval_lookup(name, type, mask);
 256        if (IS_ERR(larval) || !crypto_is_larval(larval))
 257                return larval;
 258
 259        ok = crypto_probing_notify(CRYPTO_MSG_ALG_REQUEST, larval);
 260
 261        if (ok == NOTIFY_STOP)
 262                alg = crypto_larval_wait(larval);
 263        else {
 264                crypto_mod_put(larval);
 265                alg = ERR_PTR(-ENOENT);
 266        }
 267        crypto_larval_kill(larval);
 268        return alg;
 269}
 270EXPORT_SYMBOL_GPL(crypto_alg_mod_lookup);
 271
 272static int crypto_init_ops(struct crypto_tfm *tfm, u32 type, u32 mask)
 273{
 274        const struct crypto_type *type_obj = tfm->__crt_alg->cra_type;
 275
 276        if (type_obj)
 277                return type_obj->init(tfm, type, mask);
 278
 279        switch (crypto_tfm_alg_type(tfm)) {
 280        case CRYPTO_ALG_TYPE_CIPHER:
 281                return crypto_init_cipher_ops(tfm);
 282
 283        case CRYPTO_ALG_TYPE_COMPRESS:
 284                return crypto_init_compress_ops(tfm);
 285
 286        default:
 287                break;
 288        }
 289
 290        BUG();
 291        return -EINVAL;
 292}
 293
 294static void crypto_exit_ops(struct crypto_tfm *tfm)
 295{
 296        const struct crypto_type *type = tfm->__crt_alg->cra_type;
 297
 298        if (type) {
 299                if (tfm->exit)
 300                        tfm->exit(tfm);
 301                return;
 302        }
 303
 304        switch (crypto_tfm_alg_type(tfm)) {
 305        case CRYPTO_ALG_TYPE_CIPHER:
 306                crypto_exit_cipher_ops(tfm);
 307                break;
 308
 309        case CRYPTO_ALG_TYPE_COMPRESS:
 310                crypto_exit_compress_ops(tfm);
 311                break;
 312
 313        default:
 314                BUG();
 315        }
 316}
 317
 318static unsigned int crypto_ctxsize(struct crypto_alg *alg, u32 type, u32 mask)
 319{
 320        const struct crypto_type *type_obj = alg->cra_type;
 321        unsigned int len;
 322
 323        len = alg->cra_alignmask & ~(crypto_tfm_ctx_alignment() - 1);
 324        if (type_obj)
 325                return len + type_obj->ctxsize(alg, type, mask);
 326
 327        switch (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) {
 328        default:
 329                BUG();
 330
 331        case CRYPTO_ALG_TYPE_CIPHER:
 332                len += crypto_cipher_ctxsize(alg);
 333                break;
 334
 335        case CRYPTO_ALG_TYPE_COMPRESS:
 336                len += crypto_compress_ctxsize(alg);
 337                break;
 338        }
 339
 340        return len;
 341}
 342
 343void crypto_shoot_alg(struct crypto_alg *alg)
 344{
 345        down_write(&crypto_alg_sem);
 346        alg->cra_flags |= CRYPTO_ALG_DYING;
 347        up_write(&crypto_alg_sem);
 348}
 349EXPORT_SYMBOL_GPL(crypto_shoot_alg);
 350
 351struct crypto_tfm *__crypto_alloc_tfm(struct crypto_alg *alg, u32 type,
 352                                      u32 mask)
 353{
 354        struct crypto_tfm *tfm = NULL;
 355        unsigned int tfm_size;
 356        int err = -ENOMEM;
 357
 358        tfm_size = sizeof(*tfm) + crypto_ctxsize(alg, type, mask);
 359        tfm = kzalloc(tfm_size, GFP_KERNEL);
 360        if (tfm == NULL)
 361                goto out_err;
 362
 363        tfm->__crt_alg = alg;
 364
 365        err = crypto_init_ops(tfm, type, mask);
 366        if (err)
 367                goto out_free_tfm;
 368
 369        if (!tfm->exit && alg->cra_init && (err = alg->cra_init(tfm)))
 370                goto cra_init_failed;
 371
 372        goto out;
 373
 374cra_init_failed:
 375        crypto_exit_ops(tfm);
 376out_free_tfm:
 377        if (err == -EAGAIN)
 378                crypto_shoot_alg(alg);
 379        kfree(tfm);
 380out_err:
 381        tfm = ERR_PTR(err);
 382out:
 383        return tfm;
 384}
 385EXPORT_SYMBOL_GPL(__crypto_alloc_tfm);
 386
 387/*
 388 *      crypto_alloc_base - Locate algorithm and allocate transform
 389 *      @alg_name: Name of algorithm
 390 *      @type: Type of algorithm
 391 *      @mask: Mask for type comparison
 392 *
 393 *      This function should not be used by new algorithm types.
 394 *      Plesae use crypto_alloc_tfm instead.
 395 *
 396 *      crypto_alloc_base() will first attempt to locate an already loaded
 397 *      algorithm.  If that fails and the kernel supports dynamically loadable
 398 *      modules, it will then attempt to load a module of the same name or
 399 *      alias.  If that fails it will send a query to any loaded crypto manager
 400 *      to construct an algorithm on the fly.  A refcount is grabbed on the
 401 *      algorithm which is then associated with the new transform.
 402 *
 403 *      The returned transform is of a non-determinate type.  Most people
 404 *      should use one of the more specific allocation functions such as
 405 *      crypto_alloc_blkcipher.
 406 *
 407 *      In case of error the return value is an error pointer.
 408 */
 409struct crypto_tfm *crypto_alloc_base(const char *alg_name, u32 type, u32 mask)
 410{
 411        struct crypto_tfm *tfm;
 412        int err;
 413
 414        for (;;) {
 415                struct crypto_alg *alg;
 416
 417                alg = crypto_alg_mod_lookup(alg_name, type, mask);
 418                if (IS_ERR(alg)) {
 419                        err = PTR_ERR(alg);
 420                        goto err;
 421                }
 422
 423                tfm = __crypto_alloc_tfm(alg, type, mask);
 424                if (!IS_ERR(tfm))
 425                        return tfm;
 426
 427                crypto_mod_put(alg);
 428                err = PTR_ERR(tfm);
 429
 430err:
 431                if (err != -EAGAIN)
 432                        break;
 433                if (signal_pending(current)) {
 434                        err = -EINTR;
 435                        break;
 436                }
 437        }
 438
 439        return ERR_PTR(err);
 440}
 441EXPORT_SYMBOL_GPL(crypto_alloc_base);
 442
 443void *crypto_create_tfm(struct crypto_alg *alg,
 444                        const struct crypto_type *frontend)
 445{
 446        char *mem;
 447        struct crypto_tfm *tfm = NULL;
 448        unsigned int tfmsize;
 449        unsigned int total;
 450        int err = -ENOMEM;
 451
 452        tfmsize = frontend->tfmsize;
 453        total = tfmsize + sizeof(*tfm) + frontend->extsize(alg);
 454
 455        mem = kzalloc(total, GFP_KERNEL);
 456        if (mem == NULL)
 457                goto out_err;
 458
 459        tfm = (struct crypto_tfm *)(mem + tfmsize);
 460        tfm->__crt_alg = alg;
 461
 462        err = frontend->init_tfm(tfm);
 463        if (err)
 464                goto out_free_tfm;
 465
 466        if (!tfm->exit && alg->cra_init && (err = alg->cra_init(tfm)))
 467                goto cra_init_failed;
 468
 469        goto out;
 470
 471cra_init_failed:
 472        crypto_exit_ops(tfm);
 473out_free_tfm:
 474        if (err == -EAGAIN)
 475                crypto_shoot_alg(alg);
 476        kfree(mem);
 477out_err:
 478        mem = ERR_PTR(err);
 479out:
 480        return mem;
 481}
 482EXPORT_SYMBOL_GPL(crypto_create_tfm);
 483
 484struct crypto_alg *crypto_find_alg(const char *alg_name,
 485                                   const struct crypto_type *frontend,
 486                                   u32 type, u32 mask)
 487{
 488        struct crypto_alg *(*lookup)(const char *name, u32 type, u32 mask) =
 489                crypto_alg_mod_lookup;
 490
 491        if (frontend) {
 492                type &= frontend->maskclear;
 493                mask &= frontend->maskclear;
 494                type |= frontend->type;
 495                mask |= frontend->maskset;
 496
 497                if (frontend->lookup)
 498                        lookup = frontend->lookup;
 499        }
 500
 501        return lookup(alg_name, type, mask);
 502}
 503EXPORT_SYMBOL_GPL(crypto_find_alg);
 504
 505/*
 506 *      crypto_alloc_tfm - Locate algorithm and allocate transform
 507 *      @alg_name: Name of algorithm
 508 *      @frontend: Frontend algorithm type
 509 *      @type: Type of algorithm
 510 *      @mask: Mask for type comparison
 511 *
 512 *      crypto_alloc_tfm() will first attempt to locate an already loaded
 513 *      algorithm.  If that fails and the kernel supports dynamically loadable
 514 *      modules, it will then attempt to load a module of the same name or
 515 *      alias.  If that fails it will send a query to any loaded crypto manager
 516 *      to construct an algorithm on the fly.  A refcount is grabbed on the
 517 *      algorithm which is then associated with the new transform.
 518 *
 519 *      The returned transform is of a non-determinate type.  Most people
 520 *      should use one of the more specific allocation functions such as
 521 *      crypto_alloc_blkcipher.
 522 *
 523 *      In case of error the return value is an error pointer.
 524 */
 525void *crypto_alloc_tfm(const char *alg_name,
 526                       const struct crypto_type *frontend, u32 type, u32 mask)
 527{
 528        void *tfm;
 529        int err;
 530
 531        for (;;) {
 532                struct crypto_alg *alg;
 533
 534                alg = crypto_find_alg(alg_name, frontend, type, mask);
 535                if (IS_ERR(alg)) {
 536                        err = PTR_ERR(alg);
 537                        goto err;
 538                }
 539
 540                tfm = crypto_create_tfm(alg, frontend);
 541                if (!IS_ERR(tfm))
 542                        return tfm;
 543
 544                crypto_mod_put(alg);
 545                err = PTR_ERR(tfm);
 546
 547err:
 548                if (err != -EAGAIN)
 549                        break;
 550                if (signal_pending(current)) {
 551                        err = -EINTR;
 552                        break;
 553                }
 554        }
 555
 556        return ERR_PTR(err);
 557}
 558EXPORT_SYMBOL_GPL(crypto_alloc_tfm);
 559
 560/*
 561 *      crypto_destroy_tfm - Free crypto transform
 562 *      @mem: Start of tfm slab
 563 *      @tfm: Transform to free
 564 *
 565 *      This function frees up the transform and any associated resources,
 566 *      then drops the refcount on the associated algorithm.
 567 */
 568void crypto_destroy_tfm(void *mem, struct crypto_tfm *tfm)
 569{
 570        struct crypto_alg *alg;
 571
 572        if (unlikely(!mem))
 573                return;
 574
 575        alg = tfm->__crt_alg;
 576
 577        if (!tfm->exit && alg->cra_exit)
 578                alg->cra_exit(tfm);
 579        crypto_exit_ops(tfm);
 580        crypto_mod_put(alg);
 581        kzfree(mem);
 582}
 583EXPORT_SYMBOL_GPL(crypto_destroy_tfm);
 584
 585int crypto_has_alg(const char *name, u32 type, u32 mask)
 586{
 587        int ret = 0;
 588        struct crypto_alg *alg = crypto_alg_mod_lookup(name, type, mask);
 589
 590        if (!IS_ERR(alg)) {
 591                crypto_mod_put(alg);
 592                ret = 1;
 593        }
 594
 595        return ret;
 596}
 597EXPORT_SYMBOL_GPL(crypto_has_alg);
 598
 599MODULE_DESCRIPTION("Cryptographic core API");
 600MODULE_LICENSE("GPL");
 601