linux/crypto/ctr.c
<<
>>
Prefs
   1// SPDX-License-Identifier: GPL-2.0-or-later
   2/*
   3 * CTR: Counter mode
   4 *
   5 * (C) Copyright IBM Corp. 2007 - Joy Latten <latten@us.ibm.com>
   6 */
   7
   8#include <crypto/algapi.h>
   9#include <crypto/ctr.h>
  10#include <crypto/internal/skcipher.h>
  11#include <linux/err.h>
  12#include <linux/init.h>
  13#include <linux/kernel.h>
  14#include <linux/module.h>
  15#include <linux/slab.h>
  16
  17struct crypto_rfc3686_ctx {
  18        struct crypto_skcipher *child;
  19        u8 nonce[CTR_RFC3686_NONCE_SIZE];
  20};
  21
  22struct crypto_rfc3686_req_ctx {
  23        u8 iv[CTR_RFC3686_BLOCK_SIZE];
  24        struct skcipher_request subreq CRYPTO_MINALIGN_ATTR;
  25};
  26
  27static void crypto_ctr_crypt_final(struct skcipher_walk *walk,
  28                                   struct crypto_cipher *tfm)
  29{
  30        unsigned int bsize = crypto_cipher_blocksize(tfm);
  31        unsigned long alignmask = crypto_cipher_alignmask(tfm);
  32        u8 *ctrblk = walk->iv;
  33        u8 tmp[MAX_CIPHER_BLOCKSIZE + MAX_CIPHER_ALIGNMASK];
  34        u8 *keystream = PTR_ALIGN(tmp + 0, alignmask + 1);
  35        u8 *src = walk->src.virt.addr;
  36        u8 *dst = walk->dst.virt.addr;
  37        unsigned int nbytes = walk->nbytes;
  38
  39        crypto_cipher_encrypt_one(tfm, keystream, ctrblk);
  40        crypto_xor_cpy(dst, keystream, src, nbytes);
  41
  42        crypto_inc(ctrblk, bsize);
  43}
  44
  45static int crypto_ctr_crypt_segment(struct skcipher_walk *walk,
  46                                    struct crypto_cipher *tfm)
  47{
  48        void (*fn)(struct crypto_tfm *, u8 *, const u8 *) =
  49                   crypto_cipher_alg(tfm)->cia_encrypt;
  50        unsigned int bsize = crypto_cipher_blocksize(tfm);
  51        u8 *ctrblk = walk->iv;
  52        u8 *src = walk->src.virt.addr;
  53        u8 *dst = walk->dst.virt.addr;
  54        unsigned int nbytes = walk->nbytes;
  55
  56        do {
  57                /* create keystream */
  58                fn(crypto_cipher_tfm(tfm), dst, ctrblk);
  59                crypto_xor(dst, src, bsize);
  60
  61                /* increment counter in counterblock */
  62                crypto_inc(ctrblk, bsize);
  63
  64                src += bsize;
  65                dst += bsize;
  66        } while ((nbytes -= bsize) >= bsize);
  67
  68        return nbytes;
  69}
  70
  71static int crypto_ctr_crypt_inplace(struct skcipher_walk *walk,
  72                                    struct crypto_cipher *tfm)
  73{
  74        void (*fn)(struct crypto_tfm *, u8 *, const u8 *) =
  75                   crypto_cipher_alg(tfm)->cia_encrypt;
  76        unsigned int bsize = crypto_cipher_blocksize(tfm);
  77        unsigned long alignmask = crypto_cipher_alignmask(tfm);
  78        unsigned int nbytes = walk->nbytes;
  79        u8 *ctrblk = walk->iv;
  80        u8 *src = walk->src.virt.addr;
  81        u8 tmp[MAX_CIPHER_BLOCKSIZE + MAX_CIPHER_ALIGNMASK];
  82        u8 *keystream = PTR_ALIGN(tmp + 0, alignmask + 1);
  83
  84        do {
  85                /* create keystream */
  86                fn(crypto_cipher_tfm(tfm), keystream, ctrblk);
  87                crypto_xor(src, keystream, bsize);
  88
  89                /* increment counter in counterblock */
  90                crypto_inc(ctrblk, bsize);
  91
  92                src += bsize;
  93        } while ((nbytes -= bsize) >= bsize);
  94
  95        return nbytes;
  96}
  97
  98static int crypto_ctr_crypt(struct skcipher_request *req)
  99{
 100        struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
 101        struct crypto_cipher *cipher = skcipher_cipher_simple(tfm);
 102        const unsigned int bsize = crypto_cipher_blocksize(cipher);
 103        struct skcipher_walk walk;
 104        unsigned int nbytes;
 105        int err;
 106
 107        err = skcipher_walk_virt(&walk, req, false);
 108
 109        while (walk.nbytes >= bsize) {
 110                if (walk.src.virt.addr == walk.dst.virt.addr)
 111                        nbytes = crypto_ctr_crypt_inplace(&walk, cipher);
 112                else
 113                        nbytes = crypto_ctr_crypt_segment(&walk, cipher);
 114
 115                err = skcipher_walk_done(&walk, nbytes);
 116        }
 117
 118        if (walk.nbytes) {
 119                crypto_ctr_crypt_final(&walk, cipher);
 120                err = skcipher_walk_done(&walk, 0);
 121        }
 122
 123        return err;
 124}
 125
 126static int crypto_ctr_create(struct crypto_template *tmpl, struct rtattr **tb)
 127{
 128        struct skcipher_instance *inst;
 129        struct crypto_alg *alg;
 130        int err;
 131
 132        inst = skcipher_alloc_instance_simple(tmpl, tb, &alg);
 133        if (IS_ERR(inst))
 134                return PTR_ERR(inst);
 135
 136        /* Block size must be >= 4 bytes. */
 137        err = -EINVAL;
 138        if (alg->cra_blocksize < 4)
 139                goto out_free_inst;
 140
 141        /* If this is false we'd fail the alignment of crypto_inc. */
 142        if (alg->cra_blocksize % 4)
 143                goto out_free_inst;
 144
 145        /* CTR mode is a stream cipher. */
 146        inst->alg.base.cra_blocksize = 1;
 147
 148        /*
 149         * To simplify the implementation, configure the skcipher walk to only
 150         * give a partial block at the very end, never earlier.
 151         */
 152        inst->alg.chunksize = alg->cra_blocksize;
 153
 154        inst->alg.encrypt = crypto_ctr_crypt;
 155        inst->alg.decrypt = crypto_ctr_crypt;
 156
 157        err = skcipher_register_instance(tmpl, inst);
 158        if (err)
 159                goto out_free_inst;
 160        goto out_put_alg;
 161
 162out_free_inst:
 163        inst->free(inst);
 164out_put_alg:
 165        crypto_mod_put(alg);
 166        return err;
 167}
 168
 169static int crypto_rfc3686_setkey(struct crypto_skcipher *parent,
 170                                 const u8 *key, unsigned int keylen)
 171{
 172        struct crypto_rfc3686_ctx *ctx = crypto_skcipher_ctx(parent);
 173        struct crypto_skcipher *child = ctx->child;
 174        int err;
 175
 176        /* the nonce is stored in bytes at end of key */
 177        if (keylen < CTR_RFC3686_NONCE_SIZE)
 178                return -EINVAL;
 179
 180        memcpy(ctx->nonce, key + (keylen - CTR_RFC3686_NONCE_SIZE),
 181               CTR_RFC3686_NONCE_SIZE);
 182
 183        keylen -= CTR_RFC3686_NONCE_SIZE;
 184
 185        crypto_skcipher_clear_flags(child, CRYPTO_TFM_REQ_MASK);
 186        crypto_skcipher_set_flags(child, crypto_skcipher_get_flags(parent) &
 187                                         CRYPTO_TFM_REQ_MASK);
 188        err = crypto_skcipher_setkey(child, key, keylen);
 189        crypto_skcipher_set_flags(parent, crypto_skcipher_get_flags(child) &
 190                                          CRYPTO_TFM_RES_MASK);
 191
 192        return err;
 193}
 194
 195static int crypto_rfc3686_crypt(struct skcipher_request *req)
 196{
 197        struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
 198        struct crypto_rfc3686_ctx *ctx = crypto_skcipher_ctx(tfm);
 199        struct crypto_skcipher *child = ctx->child;
 200        unsigned long align = crypto_skcipher_alignmask(tfm);
 201        struct crypto_rfc3686_req_ctx *rctx =
 202                (void *)PTR_ALIGN((u8 *)skcipher_request_ctx(req), align + 1);
 203        struct skcipher_request *subreq = &rctx->subreq;
 204        u8 *iv = rctx->iv;
 205
 206        /* set up counter block */
 207        memcpy(iv, ctx->nonce, CTR_RFC3686_NONCE_SIZE);
 208        memcpy(iv + CTR_RFC3686_NONCE_SIZE, req->iv, CTR_RFC3686_IV_SIZE);
 209
 210        /* initialize counter portion of counter block */
 211        *(__be32 *)(iv + CTR_RFC3686_NONCE_SIZE + CTR_RFC3686_IV_SIZE) =
 212                cpu_to_be32(1);
 213
 214        skcipher_request_set_tfm(subreq, child);
 215        skcipher_request_set_callback(subreq, req->base.flags,
 216                                      req->base.complete, req->base.data);
 217        skcipher_request_set_crypt(subreq, req->src, req->dst,
 218                                   req->cryptlen, iv);
 219
 220        return crypto_skcipher_encrypt(subreq);
 221}
 222
 223static int crypto_rfc3686_init_tfm(struct crypto_skcipher *tfm)
 224{
 225        struct skcipher_instance *inst = skcipher_alg_instance(tfm);
 226        struct crypto_skcipher_spawn *spawn = skcipher_instance_ctx(inst);
 227        struct crypto_rfc3686_ctx *ctx = crypto_skcipher_ctx(tfm);
 228        struct crypto_skcipher *cipher;
 229        unsigned long align;
 230        unsigned int reqsize;
 231
 232        cipher = crypto_spawn_skcipher(spawn);
 233        if (IS_ERR(cipher))
 234                return PTR_ERR(cipher);
 235
 236        ctx->child = cipher;
 237
 238        align = crypto_skcipher_alignmask(tfm);
 239        align &= ~(crypto_tfm_ctx_alignment() - 1);
 240        reqsize = align + sizeof(struct crypto_rfc3686_req_ctx) +
 241                  crypto_skcipher_reqsize(cipher);
 242        crypto_skcipher_set_reqsize(tfm, reqsize);
 243
 244        return 0;
 245}
 246
 247static void crypto_rfc3686_exit_tfm(struct crypto_skcipher *tfm)
 248{
 249        struct crypto_rfc3686_ctx *ctx = crypto_skcipher_ctx(tfm);
 250
 251        crypto_free_skcipher(ctx->child);
 252}
 253
 254static void crypto_rfc3686_free(struct skcipher_instance *inst)
 255{
 256        struct crypto_skcipher_spawn *spawn = skcipher_instance_ctx(inst);
 257
 258        crypto_drop_skcipher(spawn);
 259        kfree(inst);
 260}
 261
 262static int crypto_rfc3686_create(struct crypto_template *tmpl,
 263                                 struct rtattr **tb)
 264{
 265        struct crypto_attr_type *algt;
 266        struct skcipher_instance *inst;
 267        struct skcipher_alg *alg;
 268        struct crypto_skcipher_spawn *spawn;
 269        const char *cipher_name;
 270        u32 mask;
 271
 272        int err;
 273
 274        algt = crypto_get_attr_type(tb);
 275        if (IS_ERR(algt))
 276                return PTR_ERR(algt);
 277
 278        if ((algt->type ^ CRYPTO_ALG_TYPE_SKCIPHER) & algt->mask)
 279                return -EINVAL;
 280
 281        cipher_name = crypto_attr_alg_name(tb[1]);
 282        if (IS_ERR(cipher_name))
 283                return PTR_ERR(cipher_name);
 284
 285        inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL);
 286        if (!inst)
 287                return -ENOMEM;
 288
 289        mask = crypto_requires_sync(algt->type, algt->mask) |
 290                crypto_requires_off(algt->type, algt->mask,
 291                                    CRYPTO_ALG_NEED_FALLBACK);
 292
 293        spawn = skcipher_instance_ctx(inst);
 294
 295        crypto_set_skcipher_spawn(spawn, skcipher_crypto_instance(inst));
 296        err = crypto_grab_skcipher(spawn, cipher_name, 0, mask);
 297        if (err)
 298                goto err_free_inst;
 299
 300        alg = crypto_spawn_skcipher_alg(spawn);
 301
 302        /* We only support 16-byte blocks. */
 303        err = -EINVAL;
 304        if (crypto_skcipher_alg_ivsize(alg) != CTR_RFC3686_BLOCK_SIZE)
 305                goto err_drop_spawn;
 306
 307        /* Not a stream cipher? */
 308        if (alg->base.cra_blocksize != 1)
 309                goto err_drop_spawn;
 310
 311        err = -ENAMETOOLONG;
 312        if (snprintf(inst->alg.base.cra_name, CRYPTO_MAX_ALG_NAME,
 313                     "rfc3686(%s)", alg->base.cra_name) >= CRYPTO_MAX_ALG_NAME)
 314                goto err_drop_spawn;
 315        if (snprintf(inst->alg.base.cra_driver_name, CRYPTO_MAX_ALG_NAME,
 316                     "rfc3686(%s)", alg->base.cra_driver_name) >=
 317            CRYPTO_MAX_ALG_NAME)
 318                goto err_drop_spawn;
 319
 320        inst->alg.base.cra_priority = alg->base.cra_priority;
 321        inst->alg.base.cra_blocksize = 1;
 322        inst->alg.base.cra_alignmask = alg->base.cra_alignmask;
 323
 324        inst->alg.base.cra_flags = alg->base.cra_flags & CRYPTO_ALG_ASYNC;
 325
 326        inst->alg.ivsize = CTR_RFC3686_IV_SIZE;
 327        inst->alg.chunksize = crypto_skcipher_alg_chunksize(alg);
 328        inst->alg.min_keysize = crypto_skcipher_alg_min_keysize(alg) +
 329                                CTR_RFC3686_NONCE_SIZE;
 330        inst->alg.max_keysize = crypto_skcipher_alg_max_keysize(alg) +
 331                                CTR_RFC3686_NONCE_SIZE;
 332
 333        inst->alg.setkey = crypto_rfc3686_setkey;
 334        inst->alg.encrypt = crypto_rfc3686_crypt;
 335        inst->alg.decrypt = crypto_rfc3686_crypt;
 336
 337        inst->alg.base.cra_ctxsize = sizeof(struct crypto_rfc3686_ctx);
 338
 339        inst->alg.init = crypto_rfc3686_init_tfm;
 340        inst->alg.exit = crypto_rfc3686_exit_tfm;
 341
 342        inst->free = crypto_rfc3686_free;
 343
 344        err = skcipher_register_instance(tmpl, inst);
 345        if (err)
 346                goto err_drop_spawn;
 347
 348out:
 349        return err;
 350
 351err_drop_spawn:
 352        crypto_drop_skcipher(spawn);
 353err_free_inst:
 354        kfree(inst);
 355        goto out;
 356}
 357
 358static struct crypto_template crypto_ctr_tmpls[] = {
 359        {
 360                .name = "ctr",
 361                .create = crypto_ctr_create,
 362                .module = THIS_MODULE,
 363        }, {
 364                .name = "rfc3686",
 365                .create = crypto_rfc3686_create,
 366                .module = THIS_MODULE,
 367        },
 368};
 369
 370static int __init crypto_ctr_module_init(void)
 371{
 372        return crypto_register_templates(crypto_ctr_tmpls,
 373                                         ARRAY_SIZE(crypto_ctr_tmpls));
 374}
 375
 376static void __exit crypto_ctr_module_exit(void)
 377{
 378        crypto_unregister_templates(crypto_ctr_tmpls,
 379                                    ARRAY_SIZE(crypto_ctr_tmpls));
 380}
 381
 382subsys_initcall(crypto_ctr_module_init);
 383module_exit(crypto_ctr_module_exit);
 384
 385MODULE_LICENSE("GPL");
 386MODULE_DESCRIPTION("CTR block cipher mode of operation");
 387MODULE_ALIAS_CRYPTO("rfc3686");
 388MODULE_ALIAS_CRYPTO("ctr");
 389