linux/crypto/api.c
<<
>>
Prefs
   1/*
   2 * Scatterlist Cryptographic API.
   3 *
   4 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
   5 * Copyright (c) 2002 David S. Miller (davem@redhat.com)
   6 * Copyright (c) 2005 Herbert Xu <herbert@gondor.apana.org.au>
   7 *
   8 * Portions derived from Cryptoapi, by Alexander Kjeldaas <astor@fast.no>
   9 * and Nettle, by Niels Möller.
  10 *
  11 * This program is free software; you can redistribute it and/or modify it
  12 * under the terms of the GNU General Public License as published by the Free
  13 * Software Foundation; either version 2 of the License, or (at your option)
  14 * any later version.
  15 *
  16 */
  17
  18#include <linux/err.h>
  19#include <linux/errno.h>
  20#include <linux/kernel.h>
  21#include <linux/kmod.h>
  22#include <linux/module.h>
  23#include <linux/param.h>
  24#include <linux/sched.h>
  25#include <linux/slab.h>
  26#include <linux/string.h>
  27#include "internal.h"
  28
  29LIST_HEAD(crypto_alg_list);
  30EXPORT_SYMBOL_GPL(crypto_alg_list);
  31DECLARE_RWSEM(crypto_alg_sem);
  32EXPORT_SYMBOL_GPL(crypto_alg_sem);
  33
  34BLOCKING_NOTIFIER_HEAD(crypto_chain);
  35EXPORT_SYMBOL_GPL(crypto_chain);
  36
  37static struct crypto_alg *crypto_larval_wait(struct crypto_alg *alg);
  38
  39struct crypto_alg *crypto_mod_get(struct crypto_alg *alg)
  40{
  41        return try_module_get(alg->cra_module) ? crypto_alg_get(alg) : NULL;
  42}
  43EXPORT_SYMBOL_GPL(crypto_mod_get);
  44
  45void crypto_mod_put(struct crypto_alg *alg)
  46{
  47        struct module *module = alg->cra_module;
  48
  49        crypto_alg_put(alg);
  50        module_put(module);
  51}
  52EXPORT_SYMBOL_GPL(crypto_mod_put);
  53
  54static inline int crypto_is_test_larval(struct crypto_larval *larval)
  55{
  56        return larval->alg.cra_driver_name[0];
  57}
  58
  59static struct crypto_alg *__crypto_alg_lookup(const char *name, u32 type,
  60                                              u32 mask)
  61{
  62        struct crypto_alg *q, *alg = NULL;
  63        int best = -2;
  64
  65        list_for_each_entry(q, &crypto_alg_list, cra_list) {
  66                int exact, fuzzy;
  67
  68                if (crypto_is_moribund(q))
  69                        continue;
  70
  71                if ((q->cra_flags ^ type) & mask)
  72                        continue;
  73
  74                if (crypto_is_larval(q) &&
  75                    !crypto_is_test_larval((struct crypto_larval *)q) &&
  76                    ((struct crypto_larval *)q)->mask != mask)
  77                        continue;
  78
  79                exact = !strcmp(q->cra_driver_name, name);
  80                fuzzy = !strcmp(q->cra_name, name);
  81                if (!exact && !(fuzzy && q->cra_priority > best))
  82                        continue;
  83
  84                if (unlikely(!crypto_mod_get(q)))
  85                        continue;
  86
  87                best = q->cra_priority;
  88                if (alg)
  89                        crypto_mod_put(alg);
  90                alg = q;
  91
  92                if (exact)
  93                        break;
  94        }
  95
  96        return alg;
  97}
  98
  99static void crypto_larval_destroy(struct crypto_alg *alg)
 100{
 101        struct crypto_larval *larval = (void *)alg;
 102
 103        BUG_ON(!crypto_is_larval(alg));
 104        if (larval->adult)
 105                crypto_mod_put(larval->adult);
 106        kfree(larval);
 107}
 108
 109struct crypto_larval *crypto_larval_alloc(const char *name, u32 type, u32 mask)
 110{
 111        struct crypto_larval *larval;
 112
 113        larval = kzalloc(sizeof(*larval), GFP_KERNEL);
 114        if (!larval)
 115                return ERR_PTR(-ENOMEM);
 116
 117        larval->mask = mask;
 118        larval->alg.cra_flags = CRYPTO_ALG_LARVAL | type;
 119        larval->alg.cra_priority = -1;
 120        larval->alg.cra_destroy = crypto_larval_destroy;
 121
 122        strlcpy(larval->alg.cra_name, name, CRYPTO_MAX_ALG_NAME);
 123        init_completion(&larval->completion);
 124
 125        return larval;
 126}
 127EXPORT_SYMBOL_GPL(crypto_larval_alloc);
 128
 129static struct crypto_alg *crypto_larval_add(const char *name, u32 type,
 130                                            u32 mask)
 131{
 132        struct crypto_alg *alg;
 133        struct crypto_larval *larval;
 134
 135        larval = crypto_larval_alloc(name, type, mask);
 136        if (IS_ERR(larval))
 137                return ERR_CAST(larval);
 138
 139        atomic_set(&larval->alg.cra_refcnt, 2);
 140
 141        down_write(&crypto_alg_sem);
 142        alg = __crypto_alg_lookup(name, type, mask);
 143        if (!alg) {
 144                alg = &larval->alg;
 145                list_add(&alg->cra_list, &crypto_alg_list);
 146        }
 147        up_write(&crypto_alg_sem);
 148
 149        if (alg != &larval->alg) {
 150                kfree(larval);
 151                if (crypto_is_larval(alg))
 152                        alg = crypto_larval_wait(alg);
 153        }
 154
 155        return alg;
 156}
 157
 158void crypto_larval_kill(struct crypto_alg *alg)
 159{
 160        struct crypto_larval *larval = (void *)alg;
 161
 162        down_write(&crypto_alg_sem);
 163        list_del(&alg->cra_list);
 164        up_write(&crypto_alg_sem);
 165        complete_all(&larval->completion);
 166        crypto_alg_put(alg);
 167}
 168EXPORT_SYMBOL_GPL(crypto_larval_kill);
 169
 170static struct crypto_alg *crypto_larval_wait(struct crypto_alg *alg)
 171{
 172        struct crypto_larval *larval = (void *)alg;
 173        long timeout;
 174
 175        timeout = wait_for_completion_killable_timeout(
 176                &larval->completion, 60 * HZ);
 177
 178        alg = larval->adult;
 179        if (timeout < 0)
 180                alg = ERR_PTR(-EINTR);
 181        else if (!timeout)
 182                alg = ERR_PTR(-ETIMEDOUT);
 183        else if (!alg)
 184                alg = ERR_PTR(-ENOENT);
 185        else if (crypto_is_test_larval(larval) &&
 186                 !(alg->cra_flags & CRYPTO_ALG_TESTED))
 187                alg = ERR_PTR(-EAGAIN);
 188        else if (!crypto_mod_get(alg))
 189                alg = ERR_PTR(-EAGAIN);
 190        crypto_mod_put(&larval->alg);
 191
 192        return alg;
 193}
 194
 195struct crypto_alg *crypto_alg_lookup(const char *name, u32 type, u32 mask)
 196{
 197        struct crypto_alg *alg;
 198
 199        down_read(&crypto_alg_sem);
 200        alg = __crypto_alg_lookup(name, type, mask);
 201        up_read(&crypto_alg_sem);
 202
 203        return alg;
 204}
 205EXPORT_SYMBOL_GPL(crypto_alg_lookup);
 206
 207struct crypto_alg *crypto_larval_lookup(const char *name, u32 type, u32 mask)
 208{
 209        struct crypto_alg *alg;
 210
 211        if (!name)
 212                return ERR_PTR(-ENOENT);
 213
 214        mask &= ~(CRYPTO_ALG_LARVAL | CRYPTO_ALG_DEAD);
 215        type &= mask;
 216
 217        alg = crypto_alg_lookup(name, type, mask);
 218        if (!alg) {
 219                request_module("crypto-%s", name);
 220
 221                if (!((type ^ CRYPTO_ALG_NEED_FALLBACK) & mask &
 222                      CRYPTO_ALG_NEED_FALLBACK))
 223                        request_module("crypto-%s-all", name);
 224
 225                alg = crypto_alg_lookup(name, type, mask);
 226        }
 227
 228        if (alg)
 229                return crypto_is_larval(alg) ? crypto_larval_wait(alg) : alg;
 230
 231        return crypto_larval_add(name, type, mask);
 232}
 233EXPORT_SYMBOL_GPL(crypto_larval_lookup);
 234
 235int crypto_probing_notify(unsigned long val, void *v)
 236{
 237        int ok;
 238
 239        ok = blocking_notifier_call_chain(&crypto_chain, val, v);
 240        if (ok == NOTIFY_DONE) {
 241                request_module("cryptomgr");
 242                ok = blocking_notifier_call_chain(&crypto_chain, val, v);
 243        }
 244
 245        return ok;
 246}
 247EXPORT_SYMBOL_GPL(crypto_probing_notify);
 248
 249struct crypto_alg *crypto_alg_mod_lookup(const char *name, u32 type, u32 mask)
 250{
 251        struct crypto_alg *alg;
 252        struct crypto_alg *larval;
 253        int ok;
 254
 255        if (!((type | mask) & CRYPTO_ALG_TESTED)) {
 256                type |= CRYPTO_ALG_TESTED;
 257                mask |= CRYPTO_ALG_TESTED;
 258        }
 259
 260        /*
 261         * If the internal flag is set for a cipher, require a caller to
 262         * to invoke the cipher with the internal flag to use that cipher.
 263         * Also, if a caller wants to allocate a cipher that may or may
 264         * not be an internal cipher, use type | CRYPTO_ALG_INTERNAL and
 265         * !(mask & CRYPTO_ALG_INTERNAL).
 266         */
 267        if (!((type | mask) & CRYPTO_ALG_INTERNAL))
 268                mask |= CRYPTO_ALG_INTERNAL;
 269
 270        larval = crypto_larval_lookup(name, type, mask);
 271        if (IS_ERR(larval) || !crypto_is_larval(larval))
 272                return larval;
 273
 274        ok = crypto_probing_notify(CRYPTO_MSG_ALG_REQUEST, larval);
 275
 276        if (ok == NOTIFY_STOP)
 277                alg = crypto_larval_wait(larval);
 278        else {
 279                crypto_mod_put(larval);
 280                alg = ERR_PTR(-ENOENT);
 281        }
 282        crypto_larval_kill(larval);
 283        return alg;
 284}
 285EXPORT_SYMBOL_GPL(crypto_alg_mod_lookup);
 286
 287static int crypto_init_ops(struct crypto_tfm *tfm, u32 type, u32 mask)
 288{
 289        const struct crypto_type *type_obj = tfm->__crt_alg->cra_type;
 290
 291        if (type_obj)
 292                return type_obj->init(tfm, type, mask);
 293
 294        switch (crypto_tfm_alg_type(tfm)) {
 295        case CRYPTO_ALG_TYPE_CIPHER:
 296                return crypto_init_cipher_ops(tfm);
 297
 298        case CRYPTO_ALG_TYPE_COMPRESS:
 299                return crypto_init_compress_ops(tfm);
 300
 301        default:
 302                break;
 303        }
 304
 305        BUG();
 306        return -EINVAL;
 307}
 308
 309static void crypto_exit_ops(struct crypto_tfm *tfm)
 310{
 311        const struct crypto_type *type = tfm->__crt_alg->cra_type;
 312
 313        if (type) {
 314                if (tfm->exit)
 315                        tfm->exit(tfm);
 316                return;
 317        }
 318
 319        switch (crypto_tfm_alg_type(tfm)) {
 320        case CRYPTO_ALG_TYPE_CIPHER:
 321                crypto_exit_cipher_ops(tfm);
 322                break;
 323
 324        case CRYPTO_ALG_TYPE_COMPRESS:
 325                crypto_exit_compress_ops(tfm);
 326                break;
 327
 328        default:
 329                BUG();
 330        }
 331}
 332
 333static unsigned int crypto_ctxsize(struct crypto_alg *alg, u32 type, u32 mask)
 334{
 335        const struct crypto_type *type_obj = alg->cra_type;
 336        unsigned int len;
 337
 338        len = alg->cra_alignmask & ~(crypto_tfm_ctx_alignment() - 1);
 339        if (type_obj)
 340                return len + type_obj->ctxsize(alg, type, mask);
 341
 342        switch (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) {
 343        default:
 344                BUG();
 345
 346        case CRYPTO_ALG_TYPE_CIPHER:
 347                len += crypto_cipher_ctxsize(alg);
 348                break;
 349
 350        case CRYPTO_ALG_TYPE_COMPRESS:
 351                len += crypto_compress_ctxsize(alg);
 352                break;
 353        }
 354
 355        return len;
 356}
 357
 358void crypto_shoot_alg(struct crypto_alg *alg)
 359{
 360        down_write(&crypto_alg_sem);
 361        alg->cra_flags |= CRYPTO_ALG_DYING;
 362        up_write(&crypto_alg_sem);
 363}
 364EXPORT_SYMBOL_GPL(crypto_shoot_alg);
 365
 366struct crypto_tfm *__crypto_alloc_tfm(struct crypto_alg *alg, u32 type,
 367                                      u32 mask)
 368{
 369        struct crypto_tfm *tfm = NULL;
 370        unsigned int tfm_size;
 371        int err = -ENOMEM;
 372
 373        tfm_size = sizeof(*tfm) + crypto_ctxsize(alg, type, mask);
 374        tfm = kzalloc(tfm_size, GFP_KERNEL);
 375        if (tfm == NULL)
 376                goto out_err;
 377
 378        tfm->__crt_alg = alg;
 379
 380        err = crypto_init_ops(tfm, type, mask);
 381        if (err)
 382                goto out_free_tfm;
 383
 384        if (!tfm->exit && alg->cra_init && (err = alg->cra_init(tfm)))
 385                goto cra_init_failed;
 386
 387        goto out;
 388
 389cra_init_failed:
 390        crypto_exit_ops(tfm);
 391out_free_tfm:
 392        if (err == -EAGAIN)
 393                crypto_shoot_alg(alg);
 394        kfree(tfm);
 395out_err:
 396        tfm = ERR_PTR(err);
 397out:
 398        return tfm;
 399}
 400EXPORT_SYMBOL_GPL(__crypto_alloc_tfm);
 401
 402/*
 403 *      crypto_alloc_base - Locate algorithm and allocate transform
 404 *      @alg_name: Name of algorithm
 405 *      @type: Type of algorithm
 406 *      @mask: Mask for type comparison
 407 *
 408 *      This function should not be used by new algorithm types.
 409 *      Please use crypto_alloc_tfm instead.
 410 *
 411 *      crypto_alloc_base() will first attempt to locate an already loaded
 412 *      algorithm.  If that fails and the kernel supports dynamically loadable
 413 *      modules, it will then attempt to load a module of the same name or
 414 *      alias.  If that fails it will send a query to any loaded crypto manager
 415 *      to construct an algorithm on the fly.  A refcount is grabbed on the
 416 *      algorithm which is then associated with the new transform.
 417 *
 418 *      The returned transform is of a non-determinate type.  Most people
 419 *      should use one of the more specific allocation functions such as
 420 *      crypto_alloc_blkcipher.
 421 *
 422 *      In case of error the return value is an error pointer.
 423 */
 424struct crypto_tfm *crypto_alloc_base(const char *alg_name, u32 type, u32 mask)
 425{
 426        struct crypto_tfm *tfm;
 427        int err;
 428
 429        for (;;) {
 430                struct crypto_alg *alg;
 431
 432                alg = crypto_alg_mod_lookup(alg_name, type, mask);
 433                if (IS_ERR(alg)) {
 434                        err = PTR_ERR(alg);
 435                        goto err;
 436                }
 437
 438                tfm = __crypto_alloc_tfm(alg, type, mask);
 439                if (!IS_ERR(tfm))
 440                        return tfm;
 441
 442                crypto_mod_put(alg);
 443                err = PTR_ERR(tfm);
 444
 445err:
 446                if (err != -EAGAIN)
 447                        break;
 448                if (fatal_signal_pending(current)) {
 449                        err = -EINTR;
 450                        break;
 451                }
 452        }
 453
 454        return ERR_PTR(err);
 455}
 456EXPORT_SYMBOL_GPL(crypto_alloc_base);
 457
 458void *crypto_create_tfm(struct crypto_alg *alg,
 459                        const struct crypto_type *frontend)
 460{
 461        char *mem;
 462        struct crypto_tfm *tfm = NULL;
 463        unsigned int tfmsize;
 464        unsigned int total;
 465        int err = -ENOMEM;
 466
 467        tfmsize = frontend->tfmsize;
 468        total = tfmsize + sizeof(*tfm) + frontend->extsize(alg);
 469
 470        mem = kzalloc(total, GFP_KERNEL);
 471        if (mem == NULL)
 472                goto out_err;
 473
 474        tfm = (struct crypto_tfm *)(mem + tfmsize);
 475        tfm->__crt_alg = alg;
 476
 477        err = frontend->init_tfm(tfm);
 478        if (err)
 479                goto out_free_tfm;
 480
 481        if (!tfm->exit && alg->cra_init && (err = alg->cra_init(tfm)))
 482                goto cra_init_failed;
 483
 484        goto out;
 485
 486cra_init_failed:
 487        crypto_exit_ops(tfm);
 488out_free_tfm:
 489        if (err == -EAGAIN)
 490                crypto_shoot_alg(alg);
 491        kfree(mem);
 492out_err:
 493        mem = ERR_PTR(err);
 494out:
 495        return mem;
 496}
 497EXPORT_SYMBOL_GPL(crypto_create_tfm);
 498
 499struct crypto_alg *crypto_find_alg(const char *alg_name,
 500                                   const struct crypto_type *frontend,
 501                                   u32 type, u32 mask)
 502{
 503        struct crypto_alg *(*lookup)(const char *name, u32 type, u32 mask) =
 504                crypto_alg_mod_lookup;
 505
 506        if (frontend) {
 507                type &= frontend->maskclear;
 508                mask &= frontend->maskclear;
 509                type |= frontend->type;
 510                mask |= frontend->maskset;
 511
 512                if (frontend->lookup)
 513                        lookup = frontend->lookup;
 514        }
 515
 516        return lookup(alg_name, type, mask);
 517}
 518EXPORT_SYMBOL_GPL(crypto_find_alg);
 519
 520/*
 521 *      crypto_alloc_tfm - Locate algorithm and allocate transform
 522 *      @alg_name: Name of algorithm
 523 *      @frontend: Frontend algorithm type
 524 *      @type: Type of algorithm
 525 *      @mask: Mask for type comparison
 526 *
 527 *      crypto_alloc_tfm() will first attempt to locate an already loaded
 528 *      algorithm.  If that fails and the kernel supports dynamically loadable
 529 *      modules, it will then attempt to load a module of the same name or
 530 *      alias.  If that fails it will send a query to any loaded crypto manager
 531 *      to construct an algorithm on the fly.  A refcount is grabbed on the
 532 *      algorithm which is then associated with the new transform.
 533 *
 534 *      The returned transform is of a non-determinate type.  Most people
 535 *      should use one of the more specific allocation functions such as
 536 *      crypto_alloc_blkcipher.
 537 *
 538 *      In case of error the return value is an error pointer.
 539 */
 540void *crypto_alloc_tfm(const char *alg_name,
 541                       const struct crypto_type *frontend, u32 type, u32 mask)
 542{
 543        void *tfm;
 544        int err;
 545
 546        for (;;) {
 547                struct crypto_alg *alg;
 548
 549                alg = crypto_find_alg(alg_name, frontend, type, mask);
 550                if (IS_ERR(alg)) {
 551                        err = PTR_ERR(alg);
 552                        goto err;
 553                }
 554
 555                tfm = crypto_create_tfm(alg, frontend);
 556                if (!IS_ERR(tfm))
 557                        return tfm;
 558
 559                crypto_mod_put(alg);
 560                err = PTR_ERR(tfm);
 561
 562err:
 563                if (err != -EAGAIN)
 564                        break;
 565                if (fatal_signal_pending(current)) {
 566                        err = -EINTR;
 567                        break;
 568                }
 569        }
 570
 571        return ERR_PTR(err);
 572}
 573EXPORT_SYMBOL_GPL(crypto_alloc_tfm);
 574
 575/*
 576 *      crypto_destroy_tfm - Free crypto transform
 577 *      @mem: Start of tfm slab
 578 *      @tfm: Transform to free
 579 *
 580 *      This function frees up the transform and any associated resources,
 581 *      then drops the refcount on the associated algorithm.
 582 */
 583void crypto_destroy_tfm(void *mem, struct crypto_tfm *tfm)
 584{
 585        struct crypto_alg *alg;
 586
 587        if (unlikely(!mem))
 588                return;
 589
 590        alg = tfm->__crt_alg;
 591
 592        if (!tfm->exit && alg->cra_exit)
 593                alg->cra_exit(tfm);
 594        crypto_exit_ops(tfm);
 595        crypto_mod_put(alg);
 596        kzfree(mem);
 597}
 598EXPORT_SYMBOL_GPL(crypto_destroy_tfm);
 599
 600int crypto_has_alg(const char *name, u32 type, u32 mask)
 601{
 602        int ret = 0;
 603        struct crypto_alg *alg = crypto_alg_mod_lookup(name, type, mask);
 604
 605        if (!IS_ERR(alg)) {
 606                crypto_mod_put(alg);
 607                ret = 1;
 608        }
 609
 610        return ret;
 611}
 612EXPORT_SYMBOL_GPL(crypto_has_alg);
 613
 614MODULE_DESCRIPTION("Cryptographic core API");
 615MODULE_LICENSE("GPL");
 616