linux/crypto/api.c
<<
>>
Prefs
   1// SPDX-License-Identifier: GPL-2.0-or-later
   2/*
   3 * Scatterlist Cryptographic API.
   4 *
   5 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
   6 * Copyright (c) 2002 David S. Miller (davem@redhat.com)
   7 * Copyright (c) 2005 Herbert Xu <herbert@gondor.apana.org.au>
   8 *
   9 * Portions derived from Cryptoapi, by Alexander Kjeldaas <astor@fast.no>
  10 * and Nettle, by Niels Möller.
  11 */
  12
  13#include <linux/err.h>
  14#include <linux/errno.h>
  15#include <linux/kernel.h>
  16#include <linux/kmod.h>
  17#include <linux/module.h>
  18#include <linux/param.h>
  19#include <linux/sched/signal.h>
  20#include <linux/slab.h>
  21#include <linux/string.h>
  22#include <linux/completion.h>
  23#include "internal.h"
  24
  25LIST_HEAD(crypto_alg_list);
  26EXPORT_SYMBOL_GPL(crypto_alg_list);
  27DECLARE_RWSEM(crypto_alg_sem);
  28EXPORT_SYMBOL_GPL(crypto_alg_sem);
  29
  30BLOCKING_NOTIFIER_HEAD(crypto_chain);
  31EXPORT_SYMBOL_GPL(crypto_chain);
  32
  33static struct crypto_alg *crypto_larval_wait(struct crypto_alg *alg);
  34
  35struct crypto_alg *crypto_mod_get(struct crypto_alg *alg)
  36{
  37        return try_module_get(alg->cra_module) ? crypto_alg_get(alg) : NULL;
  38}
  39EXPORT_SYMBOL_GPL(crypto_mod_get);
  40
  41void crypto_mod_put(struct crypto_alg *alg)
  42{
  43        struct module *module = alg->cra_module;
  44
  45        crypto_alg_put(alg);
  46        module_put(module);
  47}
  48EXPORT_SYMBOL_GPL(crypto_mod_put);
  49
  50static inline int crypto_is_test_larval(struct crypto_larval *larval)
  51{
  52        return larval->alg.cra_driver_name[0];
  53}
  54
  55static struct crypto_alg *__crypto_alg_lookup(const char *name, u32 type,
  56                                              u32 mask)
  57{
  58        struct crypto_alg *q, *alg = NULL;
  59        int best = -2;
  60
  61        list_for_each_entry(q, &crypto_alg_list, cra_list) {
  62                int exact, fuzzy;
  63
  64                if (crypto_is_moribund(q))
  65                        continue;
  66
  67                if ((q->cra_flags ^ type) & mask)
  68                        continue;
  69
  70                if (crypto_is_larval(q) &&
  71                    !crypto_is_test_larval((struct crypto_larval *)q) &&
  72                    ((struct crypto_larval *)q)->mask != mask)
  73                        continue;
  74
  75                exact = !strcmp(q->cra_driver_name, name);
  76                fuzzy = !strcmp(q->cra_name, name);
  77                if (!exact && !(fuzzy && q->cra_priority > best))
  78                        continue;
  79
  80                if (unlikely(!crypto_mod_get(q)))
  81                        continue;
  82
  83                best = q->cra_priority;
  84                if (alg)
  85                        crypto_mod_put(alg);
  86                alg = q;
  87
  88                if (exact)
  89                        break;
  90        }
  91
  92        return alg;
  93}
  94
  95static void crypto_larval_destroy(struct crypto_alg *alg)
  96{
  97        struct crypto_larval *larval = (void *)alg;
  98
  99        BUG_ON(!crypto_is_larval(alg));
 100        if (larval->adult)
 101                crypto_mod_put(larval->adult);
 102        kfree(larval);
 103}
 104
 105struct crypto_larval *crypto_larval_alloc(const char *name, u32 type, u32 mask)
 106{
 107        struct crypto_larval *larval;
 108
 109        larval = kzalloc(sizeof(*larval), GFP_KERNEL);
 110        if (!larval)
 111                return ERR_PTR(-ENOMEM);
 112
 113        larval->mask = mask;
 114        larval->alg.cra_flags = CRYPTO_ALG_LARVAL | type;
 115        larval->alg.cra_priority = -1;
 116        larval->alg.cra_destroy = crypto_larval_destroy;
 117
 118        strlcpy(larval->alg.cra_name, name, CRYPTO_MAX_ALG_NAME);
 119        init_completion(&larval->completion);
 120
 121        return larval;
 122}
 123EXPORT_SYMBOL_GPL(crypto_larval_alloc);
 124
 125static struct crypto_alg *crypto_larval_add(const char *name, u32 type,
 126                                            u32 mask)
 127{
 128        struct crypto_alg *alg;
 129        struct crypto_larval *larval;
 130
 131        larval = crypto_larval_alloc(name, type, mask);
 132        if (IS_ERR(larval))
 133                return ERR_CAST(larval);
 134
 135        refcount_set(&larval->alg.cra_refcnt, 2);
 136
 137        down_write(&crypto_alg_sem);
 138        alg = __crypto_alg_lookup(name, type, mask);
 139        if (!alg) {
 140                alg = &larval->alg;
 141                list_add(&alg->cra_list, &crypto_alg_list);
 142        }
 143        up_write(&crypto_alg_sem);
 144
 145        if (alg != &larval->alg) {
 146                kfree(larval);
 147                if (crypto_is_larval(alg))
 148                        alg = crypto_larval_wait(alg);
 149        }
 150
 151        return alg;
 152}
 153
 154void crypto_larval_kill(struct crypto_alg *alg)
 155{
 156        struct crypto_larval *larval = (void *)alg;
 157
 158        down_write(&crypto_alg_sem);
 159        list_del(&alg->cra_list);
 160        up_write(&crypto_alg_sem);
 161        complete_all(&larval->completion);
 162        crypto_alg_put(alg);
 163}
 164EXPORT_SYMBOL_GPL(crypto_larval_kill);
 165
 166static struct crypto_alg *crypto_larval_wait(struct crypto_alg *alg)
 167{
 168        struct crypto_larval *larval = (void *)alg;
 169        long timeout;
 170
 171        timeout = wait_for_completion_killable_timeout(
 172                &larval->completion, 60 * HZ);
 173
 174        alg = larval->adult;
 175        if (timeout < 0)
 176                alg = ERR_PTR(-EINTR);
 177        else if (!timeout)
 178                alg = ERR_PTR(-ETIMEDOUT);
 179        else if (!alg)
 180                alg = ERR_PTR(-ENOENT);
 181        else if (crypto_is_test_larval(larval) &&
 182                 !(alg->cra_flags & CRYPTO_ALG_TESTED))
 183                alg = ERR_PTR(-EAGAIN);
 184        else if (!crypto_mod_get(alg))
 185                alg = ERR_PTR(-EAGAIN);
 186        crypto_mod_put(&larval->alg);
 187
 188        return alg;
 189}
 190
 191static struct crypto_alg *crypto_alg_lookup(const char *name, u32 type,
 192                                            u32 mask)
 193{
 194        struct crypto_alg *alg;
 195        u32 test = 0;
 196
 197        if (!((type | mask) & CRYPTO_ALG_TESTED))
 198                test |= CRYPTO_ALG_TESTED;
 199
 200        down_read(&crypto_alg_sem);
 201        alg = __crypto_alg_lookup(name, type | test, mask | test);
 202        if (!alg && test) {
 203                alg = __crypto_alg_lookup(name, type, mask);
 204                if (alg && !crypto_is_larval(alg)) {
 205                        /* Test failed */
 206                        crypto_mod_put(alg);
 207                        alg = ERR_PTR(-ELIBBAD);
 208                }
 209        }
 210        up_read(&crypto_alg_sem);
 211
 212        return alg;
 213}
 214
 215static struct crypto_alg *crypto_larval_lookup(const char *name, u32 type,
 216                                               u32 mask)
 217{
 218        struct crypto_alg *alg;
 219
 220        if (!name)
 221                return ERR_PTR(-ENOENT);
 222
 223        type &= ~(CRYPTO_ALG_LARVAL | CRYPTO_ALG_DEAD);
 224        mask &= ~(CRYPTO_ALG_LARVAL | CRYPTO_ALG_DEAD);
 225
 226        alg = crypto_alg_lookup(name, type, mask);
 227        if (!alg && !(mask & CRYPTO_NOLOAD)) {
 228                request_module("crypto-%s", name);
 229
 230                if (!((type ^ CRYPTO_ALG_NEED_FALLBACK) & mask &
 231                      CRYPTO_ALG_NEED_FALLBACK))
 232                        request_module("crypto-%s-all", name);
 233
 234                alg = crypto_alg_lookup(name, type, mask);
 235        }
 236
 237        if (!IS_ERR_OR_NULL(alg) && crypto_is_larval(alg))
 238                alg = crypto_larval_wait(alg);
 239        else if (!alg)
 240                alg = crypto_larval_add(name, type, mask);
 241
 242        return alg;
 243}
 244
 245int crypto_probing_notify(unsigned long val, void *v)
 246{
 247        int ok;
 248
 249        ok = blocking_notifier_call_chain(&crypto_chain, val, v);
 250        if (ok == NOTIFY_DONE) {
 251                request_module("cryptomgr");
 252                ok = blocking_notifier_call_chain(&crypto_chain, val, v);
 253        }
 254
 255        return ok;
 256}
 257EXPORT_SYMBOL_GPL(crypto_probing_notify);
 258
 259struct crypto_alg *crypto_alg_mod_lookup(const char *name, u32 type, u32 mask)
 260{
 261        struct crypto_alg *alg;
 262        struct crypto_alg *larval;
 263        int ok;
 264
 265        /*
 266         * If the internal flag is set for a cipher, require a caller to
 267         * to invoke the cipher with the internal flag to use that cipher.
 268         * Also, if a caller wants to allocate a cipher that may or may
 269         * not be an internal cipher, use type | CRYPTO_ALG_INTERNAL and
 270         * !(mask & CRYPTO_ALG_INTERNAL).
 271         */
 272        if (!((type | mask) & CRYPTO_ALG_INTERNAL))
 273                mask |= CRYPTO_ALG_INTERNAL;
 274
 275        larval = crypto_larval_lookup(name, type, mask);
 276        if (IS_ERR(larval) || !crypto_is_larval(larval))
 277                return larval;
 278
 279        ok = crypto_probing_notify(CRYPTO_MSG_ALG_REQUEST, larval);
 280
 281        if (ok == NOTIFY_STOP)
 282                alg = crypto_larval_wait(larval);
 283        else {
 284                crypto_mod_put(larval);
 285                alg = ERR_PTR(-ENOENT);
 286        }
 287        crypto_larval_kill(larval);
 288        return alg;
 289}
 290EXPORT_SYMBOL_GPL(crypto_alg_mod_lookup);
 291
 292static int crypto_init_ops(struct crypto_tfm *tfm, u32 type, u32 mask)
 293{
 294        const struct crypto_type *type_obj = tfm->__crt_alg->cra_type;
 295
 296        if (type_obj)
 297                return type_obj->init(tfm, type, mask);
 298
 299        switch (crypto_tfm_alg_type(tfm)) {
 300        case CRYPTO_ALG_TYPE_CIPHER:
 301                return crypto_init_cipher_ops(tfm);
 302
 303        case CRYPTO_ALG_TYPE_COMPRESS:
 304                return crypto_init_compress_ops(tfm);
 305
 306        default:
 307                break;
 308        }
 309
 310        BUG();
 311        return -EINVAL;
 312}
 313
 314static void crypto_exit_ops(struct crypto_tfm *tfm)
 315{
 316        const struct crypto_type *type = tfm->__crt_alg->cra_type;
 317
 318        if (type && tfm->exit)
 319                tfm->exit(tfm);
 320}
 321
 322static unsigned int crypto_ctxsize(struct crypto_alg *alg, u32 type, u32 mask)
 323{
 324        const struct crypto_type *type_obj = alg->cra_type;
 325        unsigned int len;
 326
 327        len = alg->cra_alignmask & ~(crypto_tfm_ctx_alignment() - 1);
 328        if (type_obj)
 329                return len + type_obj->ctxsize(alg, type, mask);
 330
 331        switch (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) {
 332        default:
 333                BUG();
 334
 335        case CRYPTO_ALG_TYPE_CIPHER:
 336                len += crypto_cipher_ctxsize(alg);
 337                break;
 338
 339        case CRYPTO_ALG_TYPE_COMPRESS:
 340                len += crypto_compress_ctxsize(alg);
 341                break;
 342        }
 343
 344        return len;
 345}
 346
 347void crypto_shoot_alg(struct crypto_alg *alg)
 348{
 349        down_write(&crypto_alg_sem);
 350        alg->cra_flags |= CRYPTO_ALG_DYING;
 351        up_write(&crypto_alg_sem);
 352}
 353EXPORT_SYMBOL_GPL(crypto_shoot_alg);
 354
 355struct crypto_tfm *__crypto_alloc_tfm(struct crypto_alg *alg, u32 type,
 356                                      u32 mask)
 357{
 358        struct crypto_tfm *tfm = NULL;
 359        unsigned int tfm_size;
 360        int err = -ENOMEM;
 361
 362        tfm_size = sizeof(*tfm) + crypto_ctxsize(alg, type, mask);
 363        tfm = kzalloc(tfm_size, GFP_KERNEL);
 364        if (tfm == NULL)
 365                goto out_err;
 366
 367        tfm->__crt_alg = alg;
 368
 369        err = crypto_init_ops(tfm, type, mask);
 370        if (err)
 371                goto out_free_tfm;
 372
 373        if (!tfm->exit && alg->cra_init && (err = alg->cra_init(tfm)))
 374                goto cra_init_failed;
 375
 376        goto out;
 377
 378cra_init_failed:
 379        crypto_exit_ops(tfm);
 380out_free_tfm:
 381        if (err == -EAGAIN)
 382                crypto_shoot_alg(alg);
 383        kfree(tfm);
 384out_err:
 385        tfm = ERR_PTR(err);
 386out:
 387        return tfm;
 388}
 389EXPORT_SYMBOL_GPL(__crypto_alloc_tfm);
 390
 391/*
 392 *      crypto_alloc_base - Locate algorithm and allocate transform
 393 *      @alg_name: Name of algorithm
 394 *      @type: Type of algorithm
 395 *      @mask: Mask for type comparison
 396 *
 397 *      This function should not be used by new algorithm types.
 398 *      Please use crypto_alloc_tfm instead.
 399 *
 400 *      crypto_alloc_base() will first attempt to locate an already loaded
 401 *      algorithm.  If that fails and the kernel supports dynamically loadable
 402 *      modules, it will then attempt to load a module of the same name or
 403 *      alias.  If that fails it will send a query to any loaded crypto manager
 404 *      to construct an algorithm on the fly.  A refcount is grabbed on the
 405 *      algorithm which is then associated with the new transform.
 406 *
 407 *      The returned transform is of a non-determinate type.  Most people
 408 *      should use one of the more specific allocation functions such as
 409 *      crypto_alloc_blkcipher.
 410 *
 411 *      In case of error the return value is an error pointer.
 412 */
 413struct crypto_tfm *crypto_alloc_base(const char *alg_name, u32 type, u32 mask)
 414{
 415        struct crypto_tfm *tfm;
 416        int err;
 417
 418        for (;;) {
 419                struct crypto_alg *alg;
 420
 421                alg = crypto_alg_mod_lookup(alg_name, type, mask);
 422                if (IS_ERR(alg)) {
 423                        err = PTR_ERR(alg);
 424                        goto err;
 425                }
 426
 427                tfm = __crypto_alloc_tfm(alg, type, mask);
 428                if (!IS_ERR(tfm))
 429                        return tfm;
 430
 431                crypto_mod_put(alg);
 432                err = PTR_ERR(tfm);
 433
 434err:
 435                if (err != -EAGAIN)
 436                        break;
 437                if (fatal_signal_pending(current)) {
 438                        err = -EINTR;
 439                        break;
 440                }
 441        }
 442
 443        return ERR_PTR(err);
 444}
 445EXPORT_SYMBOL_GPL(crypto_alloc_base);
 446
 447void *crypto_create_tfm(struct crypto_alg *alg,
 448                        const struct crypto_type *frontend)
 449{
 450        char *mem;
 451        struct crypto_tfm *tfm = NULL;
 452        unsigned int tfmsize;
 453        unsigned int total;
 454        int err = -ENOMEM;
 455
 456        tfmsize = frontend->tfmsize;
 457        total = tfmsize + sizeof(*tfm) + frontend->extsize(alg);
 458
 459        mem = kzalloc(total, GFP_KERNEL);
 460        if (mem == NULL)
 461                goto out_err;
 462
 463        tfm = (struct crypto_tfm *)(mem + tfmsize);
 464        tfm->__crt_alg = alg;
 465
 466        err = frontend->init_tfm(tfm);
 467        if (err)
 468                goto out_free_tfm;
 469
 470        if (!tfm->exit && alg->cra_init && (err = alg->cra_init(tfm)))
 471                goto cra_init_failed;
 472
 473        goto out;
 474
 475cra_init_failed:
 476        crypto_exit_ops(tfm);
 477out_free_tfm:
 478        if (err == -EAGAIN)
 479                crypto_shoot_alg(alg);
 480        kfree(mem);
 481out_err:
 482        mem = ERR_PTR(err);
 483out:
 484        return mem;
 485}
 486EXPORT_SYMBOL_GPL(crypto_create_tfm);
 487
 488struct crypto_alg *crypto_find_alg(const char *alg_name,
 489                                   const struct crypto_type *frontend,
 490                                   u32 type, u32 mask)
 491{
 492        if (frontend) {
 493                type &= frontend->maskclear;
 494                mask &= frontend->maskclear;
 495                type |= frontend->type;
 496                mask |= frontend->maskset;
 497        }
 498
 499        return crypto_alg_mod_lookup(alg_name, type, mask);
 500}
 501EXPORT_SYMBOL_GPL(crypto_find_alg);
 502
 503/*
 504 *      crypto_alloc_tfm - Locate algorithm and allocate transform
 505 *      @alg_name: Name of algorithm
 506 *      @frontend: Frontend algorithm type
 507 *      @type: Type of algorithm
 508 *      @mask: Mask for type comparison
 509 *
 510 *      crypto_alloc_tfm() will first attempt to locate an already loaded
 511 *      algorithm.  If that fails and the kernel supports dynamically loadable
 512 *      modules, it will then attempt to load a module of the same name or
 513 *      alias.  If that fails it will send a query to any loaded crypto manager
 514 *      to construct an algorithm on the fly.  A refcount is grabbed on the
 515 *      algorithm which is then associated with the new transform.
 516 *
 517 *      The returned transform is of a non-determinate type.  Most people
 518 *      should use one of the more specific allocation functions such as
 519 *      crypto_alloc_blkcipher.
 520 *
 521 *      In case of error the return value is an error pointer.
 522 */
 523void *crypto_alloc_tfm(const char *alg_name,
 524                       const struct crypto_type *frontend, u32 type, u32 mask)
 525{
 526        void *tfm;
 527        int err;
 528
 529        for (;;) {
 530                struct crypto_alg *alg;
 531
 532                alg = crypto_find_alg(alg_name, frontend, type, mask);
 533                if (IS_ERR(alg)) {
 534                        err = PTR_ERR(alg);
 535                        goto err;
 536                }
 537
 538                tfm = crypto_create_tfm(alg, frontend);
 539                if (!IS_ERR(tfm))
 540                        return tfm;
 541
 542                crypto_mod_put(alg);
 543                err = PTR_ERR(tfm);
 544
 545err:
 546                if (err != -EAGAIN)
 547                        break;
 548                if (fatal_signal_pending(current)) {
 549                        err = -EINTR;
 550                        break;
 551                }
 552        }
 553
 554        return ERR_PTR(err);
 555}
 556EXPORT_SYMBOL_GPL(crypto_alloc_tfm);
 557
 558/*
 559 *      crypto_destroy_tfm - Free crypto transform
 560 *      @mem: Start of tfm slab
 561 *      @tfm: Transform to free
 562 *
 563 *      This function frees up the transform and any associated resources,
 564 *      then drops the refcount on the associated algorithm.
 565 */
 566void crypto_destroy_tfm(void *mem, struct crypto_tfm *tfm)
 567{
 568        struct crypto_alg *alg;
 569
 570        if (unlikely(!mem))
 571                return;
 572
 573        alg = tfm->__crt_alg;
 574
 575        if (!tfm->exit && alg->cra_exit)
 576                alg->cra_exit(tfm);
 577        crypto_exit_ops(tfm);
 578        crypto_mod_put(alg);
 579        kzfree(mem);
 580}
 581EXPORT_SYMBOL_GPL(crypto_destroy_tfm);
 582
 583int crypto_has_alg(const char *name, u32 type, u32 mask)
 584{
 585        int ret = 0;
 586        struct crypto_alg *alg = crypto_alg_mod_lookup(name, type, mask);
 587
 588        if (!IS_ERR(alg)) {
 589                crypto_mod_put(alg);
 590                ret = 1;
 591        }
 592
 593        return ret;
 594}
 595EXPORT_SYMBOL_GPL(crypto_has_alg);
 596
 597void crypto_req_done(struct crypto_async_request *req, int err)
 598{
 599        struct crypto_wait *wait = req->data;
 600
 601        if (err == -EINPROGRESS)
 602                return;
 603
 604        wait->err = err;
 605        complete(&wait->completion);
 606}
 607EXPORT_SYMBOL_GPL(crypto_req_done);
 608
 609MODULE_DESCRIPTION("Cryptographic core API");
 610MODULE_LICENSE("GPL");
 611