linux/crypto/api.c
<<
>>
Prefs
   1/*
   2 * Scatterlist Cryptographic API.
   3 *
   4 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
   5 * Copyright (c) 2002 David S. Miller (davem@redhat.com)
   6 * Copyright (c) 2005 Herbert Xu <herbert@gondor.apana.org.au>
   7 *
   8 * Portions derived from Cryptoapi, by Alexander Kjeldaas <astor@fast.no>
   9 * and Nettle, by Niels Möller.
  10 *
  11 * This program is free software; you can redistribute it and/or modify it
  12 * under the terms of the GNU General Public License as published by the Free
  13 * Software Foundation; either version 2 of the License, or (at your option)
  14 * any later version.
  15 *
  16 */
  17
  18#include <linux/err.h>
  19#include <linux/errno.h>
  20#include <linux/kernel.h>
  21#include <linux/kmod.h>
  22#include <linux/module.h>
  23#include <linux/param.h>
  24#include <linux/sched.h>
  25#include <linux/slab.h>
  26#include <linux/string.h>
  27#include "internal.h"
  28
  29LIST_HEAD(crypto_alg_list);
  30EXPORT_SYMBOL_GPL(crypto_alg_list);
  31DECLARE_RWSEM(crypto_alg_sem);
  32EXPORT_SYMBOL_GPL(crypto_alg_sem);
  33
  34BLOCKING_NOTIFIER_HEAD(crypto_chain);
  35EXPORT_SYMBOL_GPL(crypto_chain);
  36
  37static struct crypto_alg *crypto_larval_wait(struct crypto_alg *alg);
  38
  39struct crypto_alg *crypto_mod_get(struct crypto_alg *alg)
  40{
  41        return try_module_get(alg->cra_module) ? crypto_alg_get(alg) : NULL;
  42}
  43EXPORT_SYMBOL_GPL(crypto_mod_get);
  44
  45void crypto_mod_put(struct crypto_alg *alg)
  46{
  47        struct module *module = alg->cra_module;
  48
  49        crypto_alg_put(alg);
  50        module_put(module);
  51}
  52EXPORT_SYMBOL_GPL(crypto_mod_put);
  53
  54static inline int crypto_is_test_larval(struct crypto_larval *larval)
  55{
  56        return larval->alg.cra_driver_name[0];
  57}
  58
  59static struct crypto_alg *__crypto_alg_lookup(const char *name, u32 type,
  60                                              u32 mask)
  61{
  62        struct crypto_alg *q, *alg = NULL;
  63        int best = -2;
  64
  65        list_for_each_entry(q, &crypto_alg_list, cra_list) {
  66                int exact, fuzzy;
  67
  68                if (crypto_is_moribund(q))
  69                        continue;
  70
  71                if ((q->cra_flags ^ type) & mask)
  72                        continue;
  73
  74                if (crypto_is_larval(q) &&
  75                    !crypto_is_test_larval((struct crypto_larval *)q) &&
  76                    ((struct crypto_larval *)q)->mask != mask)
  77                        continue;
  78
  79                exact = !strcmp(q->cra_driver_name, name);
  80                fuzzy = !strcmp(q->cra_name, name);
  81                if (!exact && !(fuzzy && q->cra_priority > best))
  82                        continue;
  83
  84                if (unlikely(!crypto_mod_get(q)))
  85                        continue;
  86
  87                best = q->cra_priority;
  88                if (alg)
  89                        crypto_mod_put(alg);
  90                alg = q;
  91
  92                if (exact)
  93                        break;
  94        }
  95
  96        return alg;
  97}
  98
  99static void crypto_larval_destroy(struct crypto_alg *alg)
 100{
 101        struct crypto_larval *larval = (void *)alg;
 102
 103        BUG_ON(!crypto_is_larval(alg));
 104        if (larval->adult)
 105                crypto_mod_put(larval->adult);
 106        kfree(larval);
 107}
 108
 109struct crypto_larval *crypto_larval_alloc(const char *name, u32 type, u32 mask)
 110{
 111        struct crypto_larval *larval;
 112
 113        larval = kzalloc(sizeof(*larval), GFP_KERNEL);
 114        if (!larval)
 115                return ERR_PTR(-ENOMEM);
 116
 117        larval->mask = mask;
 118        larval->alg.cra_flags = CRYPTO_ALG_LARVAL | type;
 119        larval->alg.cra_priority = -1;
 120        larval->alg.cra_destroy = crypto_larval_destroy;
 121
 122        strlcpy(larval->alg.cra_name, name, CRYPTO_MAX_ALG_NAME);
 123        init_completion(&larval->completion);
 124
 125        return larval;
 126}
 127EXPORT_SYMBOL_GPL(crypto_larval_alloc);
 128
 129static struct crypto_alg *crypto_larval_add(const char *name, u32 type,
 130                                            u32 mask)
 131{
 132        struct crypto_alg *alg;
 133        struct crypto_larval *larval;
 134
 135        larval = crypto_larval_alloc(name, type, mask);
 136        if (IS_ERR(larval))
 137                return ERR_CAST(larval);
 138
 139        atomic_set(&larval->alg.cra_refcnt, 2);
 140
 141        down_write(&crypto_alg_sem);
 142        alg = __crypto_alg_lookup(name, type, mask);
 143        if (!alg) {
 144                alg = &larval->alg;
 145                list_add(&alg->cra_list, &crypto_alg_list);
 146        }
 147        up_write(&crypto_alg_sem);
 148
 149        if (alg != &larval->alg) {
 150                kfree(larval);
 151                if (crypto_is_larval(alg))
 152                        alg = crypto_larval_wait(alg);
 153        }
 154
 155        return alg;
 156}
 157
 158void crypto_larval_kill(struct crypto_alg *alg)
 159{
 160        struct crypto_larval *larval = (void *)alg;
 161
 162        down_write(&crypto_alg_sem);
 163        list_del(&alg->cra_list);
 164        up_write(&crypto_alg_sem);
 165        complete_all(&larval->completion);
 166        crypto_alg_put(alg);
 167}
 168EXPORT_SYMBOL_GPL(crypto_larval_kill);
 169
 170static struct crypto_alg *crypto_larval_wait(struct crypto_alg *alg)
 171{
 172        struct crypto_larval *larval = (void *)alg;
 173        long timeout;
 174
 175        timeout = wait_for_completion_killable_timeout(
 176                &larval->completion, 60 * HZ);
 177
 178        alg = larval->adult;
 179        if (timeout < 0)
 180                alg = ERR_PTR(-EINTR);
 181        else if (!timeout)
 182                alg = ERR_PTR(-ETIMEDOUT);
 183        else if (!alg)
 184                alg = ERR_PTR(-ENOENT);
 185        else if (crypto_is_test_larval(larval) &&
 186                 !(alg->cra_flags & CRYPTO_ALG_TESTED))
 187                alg = ERR_PTR(-EAGAIN);
 188        else if (!crypto_mod_get(alg))
 189                alg = ERR_PTR(-EAGAIN);
 190        crypto_mod_put(&larval->alg);
 191
 192        return alg;
 193}
 194
 195struct crypto_alg *crypto_alg_lookup(const char *name, u32 type, u32 mask)
 196{
 197        struct crypto_alg *alg;
 198        u32 test = 0;
 199
 200        if (!((type | mask) & CRYPTO_ALG_TESTED))
 201                test |= CRYPTO_ALG_TESTED;
 202
 203        down_read(&crypto_alg_sem);
 204        alg = __crypto_alg_lookup(name, type | test, mask | test);
 205        if (!alg && test) {
 206                alg = __crypto_alg_lookup(name, type, mask);
 207                if (alg && !crypto_is_larval(alg)) {
 208                        /* Test failed */
 209                        crypto_mod_put(alg);
 210                        alg = ERR_PTR(-ELIBBAD);
 211                }
 212        }
 213        up_read(&crypto_alg_sem);
 214
 215        return alg;
 216}
 217EXPORT_SYMBOL_GPL(crypto_alg_lookup);
 218
 219struct crypto_alg *crypto_larval_lookup(const char *name, u32 type, u32 mask)
 220{
 221        struct crypto_alg *alg;
 222
 223        if (!name)
 224                return ERR_PTR(-ENOENT);
 225
 226        mask &= ~(CRYPTO_ALG_LARVAL | CRYPTO_ALG_DEAD);
 227        type &= mask;
 228
 229        alg = crypto_alg_lookup(name, type, mask);
 230        if (!alg) {
 231                request_module("crypto-%s", name);
 232
 233                if (!((type ^ CRYPTO_ALG_NEED_FALLBACK) & mask &
 234                      CRYPTO_ALG_NEED_FALLBACK))
 235                        request_module("crypto-%s-all", name);
 236
 237                alg = crypto_alg_lookup(name, type, mask);
 238        }
 239
 240        if (!IS_ERR_OR_NULL(alg) && crypto_is_larval(alg))
 241                alg = crypto_larval_wait(alg);
 242        else if (!alg)
 243                alg = crypto_larval_add(name, type, mask);
 244
 245        return alg;
 246}
 247EXPORT_SYMBOL_GPL(crypto_larval_lookup);
 248
 249int crypto_probing_notify(unsigned long val, void *v)
 250{
 251        int ok;
 252
 253        ok = blocking_notifier_call_chain(&crypto_chain, val, v);
 254        if (ok == NOTIFY_DONE) {
 255                request_module("cryptomgr");
 256                ok = blocking_notifier_call_chain(&crypto_chain, val, v);
 257        }
 258
 259        return ok;
 260}
 261EXPORT_SYMBOL_GPL(crypto_probing_notify);
 262
 263struct crypto_alg *crypto_alg_mod_lookup(const char *name, u32 type, u32 mask)
 264{
 265        struct crypto_alg *alg;
 266        struct crypto_alg *larval;
 267        int ok;
 268
 269        /*
 270         * If the internal flag is set for a cipher, require a caller to
 271         * to invoke the cipher with the internal flag to use that cipher.
 272         * Also, if a caller wants to allocate a cipher that may or may
 273         * not be an internal cipher, use type | CRYPTO_ALG_INTERNAL and
 274         * !(mask & CRYPTO_ALG_INTERNAL).
 275         */
 276        if (!((type | mask) & CRYPTO_ALG_INTERNAL))
 277                mask |= CRYPTO_ALG_INTERNAL;
 278
 279        larval = crypto_larval_lookup(name, type, mask);
 280        if (IS_ERR(larval) || !crypto_is_larval(larval))
 281                return larval;
 282
 283        ok = crypto_probing_notify(CRYPTO_MSG_ALG_REQUEST, larval);
 284
 285        if (ok == NOTIFY_STOP)
 286                alg = crypto_larval_wait(larval);
 287        else {
 288                crypto_mod_put(larval);
 289                alg = ERR_PTR(-ENOENT);
 290        }
 291        crypto_larval_kill(larval);
 292        return alg;
 293}
 294EXPORT_SYMBOL_GPL(crypto_alg_mod_lookup);
 295
 296static int crypto_init_ops(struct crypto_tfm *tfm, u32 type, u32 mask)
 297{
 298        const struct crypto_type *type_obj = tfm->__crt_alg->cra_type;
 299
 300        if (type_obj)
 301                return type_obj->init(tfm, type, mask);
 302
 303        switch (crypto_tfm_alg_type(tfm)) {
 304        case CRYPTO_ALG_TYPE_CIPHER:
 305                return crypto_init_cipher_ops(tfm);
 306
 307        case CRYPTO_ALG_TYPE_COMPRESS:
 308                return crypto_init_compress_ops(tfm);
 309
 310        default:
 311                break;
 312        }
 313
 314        BUG();
 315        return -EINVAL;
 316}
 317
 318static void crypto_exit_ops(struct crypto_tfm *tfm)
 319{
 320        const struct crypto_type *type = tfm->__crt_alg->cra_type;
 321
 322        if (type) {
 323                if (tfm->exit)
 324                        tfm->exit(tfm);
 325                return;
 326        }
 327
 328        switch (crypto_tfm_alg_type(tfm)) {
 329        case CRYPTO_ALG_TYPE_CIPHER:
 330                crypto_exit_cipher_ops(tfm);
 331                break;
 332
 333        case CRYPTO_ALG_TYPE_COMPRESS:
 334                crypto_exit_compress_ops(tfm);
 335                break;
 336
 337        default:
 338                BUG();
 339        }
 340}
 341
 342static unsigned int crypto_ctxsize(struct crypto_alg *alg, u32 type, u32 mask)
 343{
 344        const struct crypto_type *type_obj = alg->cra_type;
 345        unsigned int len;
 346
 347        len = alg->cra_alignmask & ~(crypto_tfm_ctx_alignment() - 1);
 348        if (type_obj)
 349                return len + type_obj->ctxsize(alg, type, mask);
 350
 351        switch (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) {
 352        default:
 353                BUG();
 354
 355        case CRYPTO_ALG_TYPE_CIPHER:
 356                len += crypto_cipher_ctxsize(alg);
 357                break;
 358
 359        case CRYPTO_ALG_TYPE_COMPRESS:
 360                len += crypto_compress_ctxsize(alg);
 361                break;
 362        }
 363
 364        return len;
 365}
 366
 367void crypto_shoot_alg(struct crypto_alg *alg)
 368{
 369        down_write(&crypto_alg_sem);
 370        alg->cra_flags |= CRYPTO_ALG_DYING;
 371        up_write(&crypto_alg_sem);
 372}
 373EXPORT_SYMBOL_GPL(crypto_shoot_alg);
 374
 375struct crypto_tfm *__crypto_alloc_tfm(struct crypto_alg *alg, u32 type,
 376                                      u32 mask)
 377{
 378        struct crypto_tfm *tfm = NULL;
 379        unsigned int tfm_size;
 380        int err = -ENOMEM;
 381
 382        tfm_size = sizeof(*tfm) + crypto_ctxsize(alg, type, mask);
 383        tfm = kzalloc(tfm_size, GFP_KERNEL);
 384        if (tfm == NULL)
 385                goto out_err;
 386
 387        tfm->__crt_alg = alg;
 388
 389        err = crypto_init_ops(tfm, type, mask);
 390        if (err)
 391                goto out_free_tfm;
 392
 393        if (!tfm->exit && alg->cra_init && (err = alg->cra_init(tfm)))
 394                goto cra_init_failed;
 395
 396        goto out;
 397
 398cra_init_failed:
 399        crypto_exit_ops(tfm);
 400out_free_tfm:
 401        if (err == -EAGAIN)
 402                crypto_shoot_alg(alg);
 403        kfree(tfm);
 404out_err:
 405        tfm = ERR_PTR(err);
 406out:
 407        return tfm;
 408}
 409EXPORT_SYMBOL_GPL(__crypto_alloc_tfm);
 410
 411/*
 412 *      crypto_alloc_base - Locate algorithm and allocate transform
 413 *      @alg_name: Name of algorithm
 414 *      @type: Type of algorithm
 415 *      @mask: Mask for type comparison
 416 *
 417 *      This function should not be used by new algorithm types.
 418 *      Plesae use crypto_alloc_tfm instead.
 419 *
 420 *      crypto_alloc_base() will first attempt to locate an already loaded
 421 *      algorithm.  If that fails and the kernel supports dynamically loadable
 422 *      modules, it will then attempt to load a module of the same name or
 423 *      alias.  If that fails it will send a query to any loaded crypto manager
 424 *      to construct an algorithm on the fly.  A refcount is grabbed on the
 425 *      algorithm which is then associated with the new transform.
 426 *
 427 *      The returned transform is of a non-determinate type.  Most people
 428 *      should use one of the more specific allocation functions such as
 429 *      crypto_alloc_blkcipher.
 430 *
 431 *      In case of error the return value is an error pointer.
 432 */
 433struct crypto_tfm *crypto_alloc_base(const char *alg_name, u32 type, u32 mask)
 434{
 435        struct crypto_tfm *tfm;
 436        int err;
 437
 438        for (;;) {
 439                struct crypto_alg *alg;
 440
 441                alg = crypto_alg_mod_lookup(alg_name, type, mask);
 442                if (IS_ERR(alg)) {
 443                        err = PTR_ERR(alg);
 444                        goto err;
 445                }
 446
 447                tfm = __crypto_alloc_tfm(alg, type, mask);
 448                if (!IS_ERR(tfm))
 449                        return tfm;
 450
 451                crypto_mod_put(alg);
 452                err = PTR_ERR(tfm);
 453
 454err:
 455                if (err != -EAGAIN)
 456                        break;
 457                if (fatal_signal_pending(current)) {
 458                        err = -EINTR;
 459                        break;
 460                }
 461        }
 462
 463        return ERR_PTR(err);
 464}
 465EXPORT_SYMBOL_GPL(crypto_alloc_base);
 466
 467void *crypto_create_tfm(struct crypto_alg *alg,
 468                        const struct crypto_type *frontend)
 469{
 470        char *mem;
 471        struct crypto_tfm *tfm = NULL;
 472        unsigned int tfmsize;
 473        unsigned int total;
 474        int err = -ENOMEM;
 475
 476        tfmsize = frontend->tfmsize;
 477        total = tfmsize + sizeof(*tfm) + frontend->extsize(alg);
 478
 479        mem = kzalloc(total, GFP_KERNEL);
 480        if (mem == NULL)
 481                goto out_err;
 482
 483        tfm = (struct crypto_tfm *)(mem + tfmsize);
 484        tfm->__crt_alg = alg;
 485
 486        err = frontend->init_tfm(tfm);
 487        if (err)
 488                goto out_free_tfm;
 489
 490        if (!tfm->exit && alg->cra_init && (err = alg->cra_init(tfm)))
 491                goto cra_init_failed;
 492
 493        goto out;
 494
 495cra_init_failed:
 496        crypto_exit_ops(tfm);
 497out_free_tfm:
 498        if (err == -EAGAIN)
 499                crypto_shoot_alg(alg);
 500        kfree(mem);
 501out_err:
 502        mem = ERR_PTR(err);
 503out:
 504        return mem;
 505}
 506EXPORT_SYMBOL_GPL(crypto_create_tfm);
 507
 508struct crypto_alg *crypto_find_alg(const char *alg_name,
 509                                   const struct crypto_type *frontend,
 510                                   u32 type, u32 mask)
 511{
 512        struct crypto_alg *(*lookup)(const char *name, u32 type, u32 mask) =
 513                crypto_alg_mod_lookup;
 514
 515        if (frontend) {
 516                type &= frontend->maskclear;
 517                mask &= frontend->maskclear;
 518                type |= frontend->type;
 519                mask |= frontend->maskset;
 520
 521                if (frontend->lookup)
 522                        lookup = frontend->lookup;
 523        }
 524
 525        return lookup(alg_name, type, mask);
 526}
 527EXPORT_SYMBOL_GPL(crypto_find_alg);
 528
 529/*
 530 *      crypto_alloc_tfm - Locate algorithm and allocate transform
 531 *      @alg_name: Name of algorithm
 532 *      @frontend: Frontend algorithm type
 533 *      @type: Type of algorithm
 534 *      @mask: Mask for type comparison
 535 *
 536 *      crypto_alloc_tfm() will first attempt to locate an already loaded
 537 *      algorithm.  If that fails and the kernel supports dynamically loadable
 538 *      modules, it will then attempt to load a module of the same name or
 539 *      alias.  If that fails it will send a query to any loaded crypto manager
 540 *      to construct an algorithm on the fly.  A refcount is grabbed on the
 541 *      algorithm which is then associated with the new transform.
 542 *
 543 *      The returned transform is of a non-determinate type.  Most people
 544 *      should use one of the more specific allocation functions such as
 545 *      crypto_alloc_blkcipher.
 546 *
 547 *      In case of error the return value is an error pointer.
 548 */
 549void *crypto_alloc_tfm(const char *alg_name,
 550                       const struct crypto_type *frontend, u32 type, u32 mask)
 551{
 552        void *tfm;
 553        int err;
 554
 555        for (;;) {
 556                struct crypto_alg *alg;
 557
 558                alg = crypto_find_alg(alg_name, frontend, type, mask);
 559                if (IS_ERR(alg)) {
 560                        err = PTR_ERR(alg);
 561                        goto err;
 562                }
 563
 564                tfm = crypto_create_tfm(alg, frontend);
 565                if (!IS_ERR(tfm))
 566                        return tfm;
 567
 568                crypto_mod_put(alg);
 569                err = PTR_ERR(tfm);
 570
 571err:
 572                if (err != -EAGAIN)
 573                        break;
 574                if (fatal_signal_pending(current)) {
 575                        err = -EINTR;
 576                        break;
 577                }
 578        }
 579
 580        return ERR_PTR(err);
 581}
 582EXPORT_SYMBOL_GPL(crypto_alloc_tfm);
 583
 584/*
 585 *      crypto_destroy_tfm - Free crypto transform
 586 *      @mem: Start of tfm slab
 587 *      @tfm: Transform to free
 588 *
 589 *      This function frees up the transform and any associated resources,
 590 *      then drops the refcount on the associated algorithm.
 591 */
 592void crypto_destroy_tfm(void *mem, struct crypto_tfm *tfm)
 593{
 594        struct crypto_alg *alg;
 595
 596        if (unlikely(!mem))
 597                return;
 598
 599        alg = tfm->__crt_alg;
 600
 601        if (!tfm->exit && alg->cra_exit)
 602                alg->cra_exit(tfm);
 603        crypto_exit_ops(tfm);
 604        crypto_mod_put(alg);
 605        kzfree(mem);
 606}
 607EXPORT_SYMBOL_GPL(crypto_destroy_tfm);
 608
 609int crypto_has_alg(const char *name, u32 type, u32 mask)
 610{
 611        int ret = 0;
 612        struct crypto_alg *alg = crypto_alg_mod_lookup(name, type, mask);
 613
 614        if (!IS_ERR(alg)) {
 615                crypto_mod_put(alg);
 616                ret = 1;
 617        }
 618
 619        return ret;
 620}
 621EXPORT_SYMBOL_GPL(crypto_has_alg);
 622
 623MODULE_DESCRIPTION("Cryptographic core API");
 624MODULE_LICENSE("GPL");
 625