linux/arch/x86/crypto/cast6_avx_glue.c
<<
>>
Prefs
   1/*
   2 * Glue Code for the AVX assembler implemention of the Cast6 Cipher
   3 *
   4 * Copyright (C) 2012 Johannes Goetzfried
   5 *     <Johannes.Goetzfried@informatik.stud.uni-erlangen.de>
   6 *
   7 * Copyright © 2013 Jussi Kivilinna <jussi.kivilinna@iki.fi>
   8 *
   9 * This program is free software; you can redistribute it and/or modify
  10 * it under the terms of the GNU General Public License as published by
  11 * the Free Software Foundation; either version 2 of the License, or
  12 * (at your option) any later version.
  13 *
  14 * This program is distributed in the hope that it will be useful,
  15 * but WITHOUT ANY WARRANTY; without even the implied warranty of
  16 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
  17 * GNU General Public License for more details.
  18 *
  19 * You should have received a copy of the GNU General Public License
  20 * along with this program; if not, write to the Free Software
  21 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307
  22 * USA
  23 *
  24 */
  25
  26#include <linux/module.h>
  27#include <linux/hardirq.h>
  28#include <linux/types.h>
  29#include <linux/crypto.h>
  30#include <linux/err.h>
  31#include <crypto/ablk_helper.h>
  32#include <crypto/algapi.h>
  33#include <crypto/cast6.h>
  34#include <crypto/cryptd.h>
  35#include <crypto/b128ops.h>
  36#include <crypto/ctr.h>
  37#include <crypto/lrw.h>
  38#include <crypto/xts.h>
  39#include <asm/xcr.h>
  40#include <asm/xsave.h>
  41#include <asm/crypto/glue_helper.h>
  42
  43#define CAST6_PARALLEL_BLOCKS 8
  44
  45asmlinkage void cast6_ecb_enc_8way(struct cast6_ctx *ctx, u8 *dst,
  46                                   const u8 *src);
  47asmlinkage void cast6_ecb_dec_8way(struct cast6_ctx *ctx, u8 *dst,
  48                                   const u8 *src);
  49
  50asmlinkage void cast6_cbc_dec_8way(struct cast6_ctx *ctx, u8 *dst,
  51                                   const u8 *src);
  52asmlinkage void cast6_ctr_8way(struct cast6_ctx *ctx, u8 *dst, const u8 *src,
  53                               le128 *iv);
  54
  55asmlinkage void cast6_xts_enc_8way(struct cast6_ctx *ctx, u8 *dst,
  56                                   const u8 *src, le128 *iv);
  57asmlinkage void cast6_xts_dec_8way(struct cast6_ctx *ctx, u8 *dst,
  58                                   const u8 *src, le128 *iv);
  59
  60static void cast6_xts_enc(void *ctx, u128 *dst, const u128 *src, le128 *iv)
  61{
  62        glue_xts_crypt_128bit_one(ctx, dst, src, iv,
  63                                  GLUE_FUNC_CAST(__cast6_encrypt));
  64}
  65
  66static void cast6_xts_dec(void *ctx, u128 *dst, const u128 *src, le128 *iv)
  67{
  68        glue_xts_crypt_128bit_one(ctx, dst, src, iv,
  69                                  GLUE_FUNC_CAST(__cast6_decrypt));
  70}
  71
  72static void cast6_crypt_ctr(void *ctx, u128 *dst, const u128 *src, le128 *iv)
  73{
  74        be128 ctrblk;
  75
  76        le128_to_be128(&ctrblk, iv);
  77        le128_inc(iv);
  78
  79        __cast6_encrypt(ctx, (u8 *)&ctrblk, (u8 *)&ctrblk);
  80        u128_xor(dst, src, (u128 *)&ctrblk);
  81}
  82
  83static const struct common_glue_ctx cast6_enc = {
  84        .num_funcs = 2,
  85        .fpu_blocks_limit = CAST6_PARALLEL_BLOCKS,
  86
  87        .funcs = { {
  88                .num_blocks = CAST6_PARALLEL_BLOCKS,
  89                .fn_u = { .ecb = GLUE_FUNC_CAST(cast6_ecb_enc_8way) }
  90        }, {
  91                .num_blocks = 1,
  92                .fn_u = { .ecb = GLUE_FUNC_CAST(__cast6_encrypt) }
  93        } }
  94};
  95
  96static const struct common_glue_ctx cast6_ctr = {
  97        .num_funcs = 2,
  98        .fpu_blocks_limit = CAST6_PARALLEL_BLOCKS,
  99
 100        .funcs = { {
 101                .num_blocks = CAST6_PARALLEL_BLOCKS,
 102                .fn_u = { .ctr = GLUE_CTR_FUNC_CAST(cast6_ctr_8way) }
 103        }, {
 104                .num_blocks = 1,
 105                .fn_u = { .ctr = GLUE_CTR_FUNC_CAST(cast6_crypt_ctr) }
 106        } }
 107};
 108
 109static const struct common_glue_ctx cast6_enc_xts = {
 110        .num_funcs = 2,
 111        .fpu_blocks_limit = CAST6_PARALLEL_BLOCKS,
 112
 113        .funcs = { {
 114                .num_blocks = CAST6_PARALLEL_BLOCKS,
 115                .fn_u = { .xts = GLUE_XTS_FUNC_CAST(cast6_xts_enc_8way) }
 116        }, {
 117                .num_blocks = 1,
 118                .fn_u = { .xts = GLUE_XTS_FUNC_CAST(cast6_xts_enc) }
 119        } }
 120};
 121
 122static const struct common_glue_ctx cast6_dec = {
 123        .num_funcs = 2,
 124        .fpu_blocks_limit = CAST6_PARALLEL_BLOCKS,
 125
 126        .funcs = { {
 127                .num_blocks = CAST6_PARALLEL_BLOCKS,
 128                .fn_u = { .ecb = GLUE_FUNC_CAST(cast6_ecb_dec_8way) }
 129        }, {
 130                .num_blocks = 1,
 131                .fn_u = { .ecb = GLUE_FUNC_CAST(__cast6_decrypt) }
 132        } }
 133};
 134
 135static const struct common_glue_ctx cast6_dec_cbc = {
 136        .num_funcs = 2,
 137        .fpu_blocks_limit = CAST6_PARALLEL_BLOCKS,
 138
 139        .funcs = { {
 140                .num_blocks = CAST6_PARALLEL_BLOCKS,
 141                .fn_u = { .cbc = GLUE_CBC_FUNC_CAST(cast6_cbc_dec_8way) }
 142        }, {
 143                .num_blocks = 1,
 144                .fn_u = { .cbc = GLUE_CBC_FUNC_CAST(__cast6_decrypt) }
 145        } }
 146};
 147
 148static const struct common_glue_ctx cast6_dec_xts = {
 149        .num_funcs = 2,
 150        .fpu_blocks_limit = CAST6_PARALLEL_BLOCKS,
 151
 152        .funcs = { {
 153                .num_blocks = CAST6_PARALLEL_BLOCKS,
 154                .fn_u = { .xts = GLUE_XTS_FUNC_CAST(cast6_xts_dec_8way) }
 155        }, {
 156                .num_blocks = 1,
 157                .fn_u = { .xts = GLUE_XTS_FUNC_CAST(cast6_xts_dec) }
 158        } }
 159};
 160
 161static int ecb_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
 162                       struct scatterlist *src, unsigned int nbytes)
 163{
 164        return glue_ecb_crypt_128bit(&cast6_enc, desc, dst, src, nbytes);
 165}
 166
 167static int ecb_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
 168                       struct scatterlist *src, unsigned int nbytes)
 169{
 170        return glue_ecb_crypt_128bit(&cast6_dec, desc, dst, src, nbytes);
 171}
 172
 173static int cbc_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
 174                       struct scatterlist *src, unsigned int nbytes)
 175{
 176        return glue_cbc_encrypt_128bit(GLUE_FUNC_CAST(__cast6_encrypt), desc,
 177                                       dst, src, nbytes);
 178}
 179
 180static int cbc_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
 181                       struct scatterlist *src, unsigned int nbytes)
 182{
 183        return glue_cbc_decrypt_128bit(&cast6_dec_cbc, desc, dst, src,
 184                                       nbytes);
 185}
 186
 187static int ctr_crypt(struct blkcipher_desc *desc, struct scatterlist *dst,
 188                     struct scatterlist *src, unsigned int nbytes)
 189{
 190        return glue_ctr_crypt_128bit(&cast6_ctr, desc, dst, src, nbytes);
 191}
 192
 193static inline bool cast6_fpu_begin(bool fpu_enabled, unsigned int nbytes)
 194{
 195        return glue_fpu_begin(CAST6_BLOCK_SIZE, CAST6_PARALLEL_BLOCKS,
 196                              NULL, fpu_enabled, nbytes);
 197}
 198
 199static inline void cast6_fpu_end(bool fpu_enabled)
 200{
 201        glue_fpu_end(fpu_enabled);
 202}
 203
 204struct crypt_priv {
 205        struct cast6_ctx *ctx;
 206        bool fpu_enabled;
 207};
 208
 209static void encrypt_callback(void *priv, u8 *srcdst, unsigned int nbytes)
 210{
 211        const unsigned int bsize = CAST6_BLOCK_SIZE;
 212        struct crypt_priv *ctx = priv;
 213        int i;
 214
 215        ctx->fpu_enabled = cast6_fpu_begin(ctx->fpu_enabled, nbytes);
 216
 217        if (nbytes == bsize * CAST6_PARALLEL_BLOCKS) {
 218                cast6_ecb_enc_8way(ctx->ctx, srcdst, srcdst);
 219                return;
 220        }
 221
 222        for (i = 0; i < nbytes / bsize; i++, srcdst += bsize)
 223                __cast6_encrypt(ctx->ctx, srcdst, srcdst);
 224}
 225
 226static void decrypt_callback(void *priv, u8 *srcdst, unsigned int nbytes)
 227{
 228        const unsigned int bsize = CAST6_BLOCK_SIZE;
 229        struct crypt_priv *ctx = priv;
 230        int i;
 231
 232        ctx->fpu_enabled = cast6_fpu_begin(ctx->fpu_enabled, nbytes);
 233
 234        if (nbytes == bsize * CAST6_PARALLEL_BLOCKS) {
 235                cast6_ecb_dec_8way(ctx->ctx, srcdst, srcdst);
 236                return;
 237        }
 238
 239        for (i = 0; i < nbytes / bsize; i++, srcdst += bsize)
 240                __cast6_decrypt(ctx->ctx, srcdst, srcdst);
 241}
 242
 243struct cast6_lrw_ctx {
 244        struct lrw_table_ctx lrw_table;
 245        struct cast6_ctx cast6_ctx;
 246};
 247
 248static int lrw_cast6_setkey(struct crypto_tfm *tfm, const u8 *key,
 249                              unsigned int keylen)
 250{
 251        struct cast6_lrw_ctx *ctx = crypto_tfm_ctx(tfm);
 252        int err;
 253
 254        err = __cast6_setkey(&ctx->cast6_ctx, key, keylen - CAST6_BLOCK_SIZE,
 255                             &tfm->crt_flags);
 256        if (err)
 257                return err;
 258
 259        return lrw_init_table(&ctx->lrw_table, key + keylen - CAST6_BLOCK_SIZE);
 260}
 261
 262static int lrw_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
 263                       struct scatterlist *src, unsigned int nbytes)
 264{
 265        struct cast6_lrw_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
 266        be128 buf[CAST6_PARALLEL_BLOCKS];
 267        struct crypt_priv crypt_ctx = {
 268                .ctx = &ctx->cast6_ctx,
 269                .fpu_enabled = false,
 270        };
 271        struct lrw_crypt_req req = {
 272                .tbuf = buf,
 273                .tbuflen = sizeof(buf),
 274
 275                .table_ctx = &ctx->lrw_table,
 276                .crypt_ctx = &crypt_ctx,
 277                .crypt_fn = encrypt_callback,
 278        };
 279        int ret;
 280
 281        desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
 282        ret = lrw_crypt(desc, dst, src, nbytes, &req);
 283        cast6_fpu_end(crypt_ctx.fpu_enabled);
 284
 285        return ret;
 286}
 287
 288static int lrw_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
 289                       struct scatterlist *src, unsigned int nbytes)
 290{
 291        struct cast6_lrw_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
 292        be128 buf[CAST6_PARALLEL_BLOCKS];
 293        struct crypt_priv crypt_ctx = {
 294                .ctx = &ctx->cast6_ctx,
 295                .fpu_enabled = false,
 296        };
 297        struct lrw_crypt_req req = {
 298                .tbuf = buf,
 299                .tbuflen = sizeof(buf),
 300
 301                .table_ctx = &ctx->lrw_table,
 302                .crypt_ctx = &crypt_ctx,
 303                .crypt_fn = decrypt_callback,
 304        };
 305        int ret;
 306
 307        desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
 308        ret = lrw_crypt(desc, dst, src, nbytes, &req);
 309        cast6_fpu_end(crypt_ctx.fpu_enabled);
 310
 311        return ret;
 312}
 313
 314static void lrw_exit_tfm(struct crypto_tfm *tfm)
 315{
 316        struct cast6_lrw_ctx *ctx = crypto_tfm_ctx(tfm);
 317
 318        lrw_free_table(&ctx->lrw_table);
 319}
 320
 321struct cast6_xts_ctx {
 322        struct cast6_ctx tweak_ctx;
 323        struct cast6_ctx crypt_ctx;
 324};
 325
 326static int xts_cast6_setkey(struct crypto_tfm *tfm, const u8 *key,
 327                              unsigned int keylen)
 328{
 329        struct cast6_xts_ctx *ctx = crypto_tfm_ctx(tfm);
 330        u32 *flags = &tfm->crt_flags;
 331        int err;
 332
 333        /* key consists of keys of equal size concatenated, therefore
 334         * the length must be even
 335         */
 336        if (keylen % 2) {
 337                *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
 338                return -EINVAL;
 339        }
 340
 341        /* first half of xts-key is for crypt */
 342        err = __cast6_setkey(&ctx->crypt_ctx, key, keylen / 2, flags);
 343        if (err)
 344                return err;
 345
 346        /* second half of xts-key is for tweak */
 347        return __cast6_setkey(&ctx->tweak_ctx, key + keylen / 2, keylen / 2,
 348                              flags);
 349}
 350
 351static int xts_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
 352                       struct scatterlist *src, unsigned int nbytes)
 353{
 354        struct cast6_xts_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
 355
 356        return glue_xts_crypt_128bit(&cast6_enc_xts, desc, dst, src, nbytes,
 357                                     XTS_TWEAK_CAST(__cast6_encrypt),
 358                                     &ctx->tweak_ctx, &ctx->crypt_ctx);
 359}
 360
 361static int xts_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
 362                       struct scatterlist *src, unsigned int nbytes)
 363{
 364        struct cast6_xts_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
 365
 366        return glue_xts_crypt_128bit(&cast6_dec_xts, desc, dst, src, nbytes,
 367                                     XTS_TWEAK_CAST(__cast6_encrypt),
 368                                     &ctx->tweak_ctx, &ctx->crypt_ctx);
 369}
 370
 371static struct crypto_alg cast6_algs[10] = { {
 372        .cra_name               = "__ecb-cast6-avx",
 373        .cra_driver_name        = "__driver-ecb-cast6-avx",
 374        .cra_priority           = 0,
 375        .cra_flags              = CRYPTO_ALG_TYPE_BLKCIPHER,
 376        .cra_blocksize          = CAST6_BLOCK_SIZE,
 377        .cra_ctxsize            = sizeof(struct cast6_ctx),
 378        .cra_alignmask          = 0,
 379        .cra_type               = &crypto_blkcipher_type,
 380        .cra_module             = THIS_MODULE,
 381        .cra_u = {
 382                .blkcipher = {
 383                        .min_keysize    = CAST6_MIN_KEY_SIZE,
 384                        .max_keysize    = CAST6_MAX_KEY_SIZE,
 385                        .setkey         = cast6_setkey,
 386                        .encrypt        = ecb_encrypt,
 387                        .decrypt        = ecb_decrypt,
 388                },
 389        },
 390}, {
 391        .cra_name               = "__cbc-cast6-avx",
 392        .cra_driver_name        = "__driver-cbc-cast6-avx",
 393        .cra_priority           = 0,
 394        .cra_flags              = CRYPTO_ALG_TYPE_BLKCIPHER,
 395        .cra_blocksize          = CAST6_BLOCK_SIZE,
 396        .cra_ctxsize            = sizeof(struct cast6_ctx),
 397        .cra_alignmask          = 0,
 398        .cra_type               = &crypto_blkcipher_type,
 399        .cra_module             = THIS_MODULE,
 400        .cra_u = {
 401                .blkcipher = {
 402                        .min_keysize    = CAST6_MIN_KEY_SIZE,
 403                        .max_keysize    = CAST6_MAX_KEY_SIZE,
 404                        .setkey         = cast6_setkey,
 405                        .encrypt        = cbc_encrypt,
 406                        .decrypt        = cbc_decrypt,
 407                },
 408        },
 409}, {
 410        .cra_name               = "__ctr-cast6-avx",
 411        .cra_driver_name        = "__driver-ctr-cast6-avx",
 412        .cra_priority           = 0,
 413        .cra_flags              = CRYPTO_ALG_TYPE_BLKCIPHER,
 414        .cra_blocksize          = 1,
 415        .cra_ctxsize            = sizeof(struct cast6_ctx),
 416        .cra_alignmask          = 0,
 417        .cra_type               = &crypto_blkcipher_type,
 418        .cra_module             = THIS_MODULE,
 419        .cra_u = {
 420                .blkcipher = {
 421                        .min_keysize    = CAST6_MIN_KEY_SIZE,
 422                        .max_keysize    = CAST6_MAX_KEY_SIZE,
 423                        .ivsize         = CAST6_BLOCK_SIZE,
 424                        .setkey         = cast6_setkey,
 425                        .encrypt        = ctr_crypt,
 426                        .decrypt        = ctr_crypt,
 427                },
 428        },
 429}, {
 430        .cra_name               = "__lrw-cast6-avx",
 431        .cra_driver_name        = "__driver-lrw-cast6-avx",
 432        .cra_priority           = 0,
 433        .cra_flags              = CRYPTO_ALG_TYPE_BLKCIPHER,
 434        .cra_blocksize          = CAST6_BLOCK_SIZE,
 435        .cra_ctxsize            = sizeof(struct cast6_lrw_ctx),
 436        .cra_alignmask          = 0,
 437        .cra_type               = &crypto_blkcipher_type,
 438        .cra_module             = THIS_MODULE,
 439        .cra_exit               = lrw_exit_tfm,
 440        .cra_u = {
 441                .blkcipher = {
 442                        .min_keysize    = CAST6_MIN_KEY_SIZE +
 443                                          CAST6_BLOCK_SIZE,
 444                        .max_keysize    = CAST6_MAX_KEY_SIZE +
 445                                          CAST6_BLOCK_SIZE,
 446                        .ivsize         = CAST6_BLOCK_SIZE,
 447                        .setkey         = lrw_cast6_setkey,
 448                        .encrypt        = lrw_encrypt,
 449                        .decrypt        = lrw_decrypt,
 450                },
 451        },
 452}, {
 453        .cra_name               = "__xts-cast6-avx",
 454        .cra_driver_name        = "__driver-xts-cast6-avx",
 455        .cra_priority           = 0,
 456        .cra_flags              = CRYPTO_ALG_TYPE_BLKCIPHER,
 457        .cra_blocksize          = CAST6_BLOCK_SIZE,
 458        .cra_ctxsize            = sizeof(struct cast6_xts_ctx),
 459        .cra_alignmask          = 0,
 460        .cra_type               = &crypto_blkcipher_type,
 461        .cra_module             = THIS_MODULE,
 462        .cra_u = {
 463                .blkcipher = {
 464                        .min_keysize    = CAST6_MIN_KEY_SIZE * 2,
 465                        .max_keysize    = CAST6_MAX_KEY_SIZE * 2,
 466                        .ivsize         = CAST6_BLOCK_SIZE,
 467                        .setkey         = xts_cast6_setkey,
 468                        .encrypt        = xts_encrypt,
 469                        .decrypt        = xts_decrypt,
 470                },
 471        },
 472}, {
 473        .cra_name               = "ecb(cast6)",
 474        .cra_driver_name        = "ecb-cast6-avx",
 475        .cra_priority           = 200,
 476        .cra_flags              = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
 477        .cra_blocksize          = CAST6_BLOCK_SIZE,
 478        .cra_ctxsize            = sizeof(struct async_helper_ctx),
 479        .cra_alignmask          = 0,
 480        .cra_type               = &crypto_ablkcipher_type,
 481        .cra_module             = THIS_MODULE,
 482        .cra_init               = ablk_init,
 483        .cra_exit               = ablk_exit,
 484        .cra_u = {
 485                .ablkcipher = {
 486                        .min_keysize    = CAST6_MIN_KEY_SIZE,
 487                        .max_keysize    = CAST6_MAX_KEY_SIZE,
 488                        .setkey         = ablk_set_key,
 489                        .encrypt        = ablk_encrypt,
 490                        .decrypt        = ablk_decrypt,
 491                },
 492        },
 493}, {
 494        .cra_name               = "cbc(cast6)",
 495        .cra_driver_name        = "cbc-cast6-avx",
 496        .cra_priority           = 200,
 497        .cra_flags              = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
 498        .cra_blocksize          = CAST6_BLOCK_SIZE,
 499        .cra_ctxsize            = sizeof(struct async_helper_ctx),
 500        .cra_alignmask          = 0,
 501        .cra_type               = &crypto_ablkcipher_type,
 502        .cra_module             = THIS_MODULE,
 503        .cra_init               = ablk_init,
 504        .cra_exit               = ablk_exit,
 505        .cra_u = {
 506                .ablkcipher = {
 507                        .min_keysize    = CAST6_MIN_KEY_SIZE,
 508                        .max_keysize    = CAST6_MAX_KEY_SIZE,
 509                        .ivsize         = CAST6_BLOCK_SIZE,
 510                        .setkey         = ablk_set_key,
 511                        .encrypt        = __ablk_encrypt,
 512                        .decrypt        = ablk_decrypt,
 513                },
 514        },
 515}, {
 516        .cra_name               = "ctr(cast6)",
 517        .cra_driver_name        = "ctr-cast6-avx",
 518        .cra_priority           = 200,
 519        .cra_flags              = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
 520        .cra_blocksize          = 1,
 521        .cra_ctxsize            = sizeof(struct async_helper_ctx),
 522        .cra_alignmask          = 0,
 523        .cra_type               = &crypto_ablkcipher_type,
 524        .cra_module             = THIS_MODULE,
 525        .cra_init               = ablk_init,
 526        .cra_exit               = ablk_exit,
 527        .cra_u = {
 528                .ablkcipher = {
 529                        .min_keysize    = CAST6_MIN_KEY_SIZE,
 530                        .max_keysize    = CAST6_MAX_KEY_SIZE,
 531                        .ivsize         = CAST6_BLOCK_SIZE,
 532                        .setkey         = ablk_set_key,
 533                        .encrypt        = ablk_encrypt,
 534                        .decrypt        = ablk_encrypt,
 535                        .geniv          = "chainiv",
 536                },
 537        },
 538}, {
 539        .cra_name               = "lrw(cast6)",
 540        .cra_driver_name        = "lrw-cast6-avx",
 541        .cra_priority           = 200,
 542        .cra_flags              = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
 543        .cra_blocksize          = CAST6_BLOCK_SIZE,
 544        .cra_ctxsize            = sizeof(struct async_helper_ctx),
 545        .cra_alignmask          = 0,
 546        .cra_type               = &crypto_ablkcipher_type,
 547        .cra_module             = THIS_MODULE,
 548        .cra_init               = ablk_init,
 549        .cra_exit               = ablk_exit,
 550        .cra_u = {
 551                .ablkcipher = {
 552                        .min_keysize    = CAST6_MIN_KEY_SIZE +
 553                                          CAST6_BLOCK_SIZE,
 554                        .max_keysize    = CAST6_MAX_KEY_SIZE +
 555                                          CAST6_BLOCK_SIZE,
 556                        .ivsize         = CAST6_BLOCK_SIZE,
 557                        .setkey         = ablk_set_key,
 558                        .encrypt        = ablk_encrypt,
 559                        .decrypt        = ablk_decrypt,
 560                },
 561        },
 562}, {
 563        .cra_name               = "xts(cast6)",
 564        .cra_driver_name        = "xts-cast6-avx",
 565        .cra_priority           = 200,
 566        .cra_flags              = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
 567        .cra_blocksize          = CAST6_BLOCK_SIZE,
 568        .cra_ctxsize            = sizeof(struct async_helper_ctx),
 569        .cra_alignmask          = 0,
 570        .cra_type               = &crypto_ablkcipher_type,
 571        .cra_module             = THIS_MODULE,
 572        .cra_init               = ablk_init,
 573        .cra_exit               = ablk_exit,
 574        .cra_u = {
 575                .ablkcipher = {
 576                        .min_keysize    = CAST6_MIN_KEY_SIZE * 2,
 577                        .max_keysize    = CAST6_MAX_KEY_SIZE * 2,
 578                        .ivsize         = CAST6_BLOCK_SIZE,
 579                        .setkey         = ablk_set_key,
 580                        .encrypt        = ablk_encrypt,
 581                        .decrypt        = ablk_decrypt,
 582                },
 583        },
 584} };
 585
 586static int __init cast6_init(void)
 587{
 588        u64 xcr0;
 589
 590        if (!cpu_has_avx || !cpu_has_osxsave) {
 591                pr_info("AVX instructions are not detected.\n");
 592                return -ENODEV;
 593        }
 594
 595        xcr0 = xgetbv(XCR_XFEATURE_ENABLED_MASK);
 596        if ((xcr0 & (XSTATE_SSE | XSTATE_YMM)) != (XSTATE_SSE | XSTATE_YMM)) {
 597                pr_info("AVX detected but unusable.\n");
 598                return -ENODEV;
 599        }
 600
 601        return crypto_register_algs(cast6_algs, ARRAY_SIZE(cast6_algs));
 602}
 603
 604static void __exit cast6_exit(void)
 605{
 606        crypto_unregister_algs(cast6_algs, ARRAY_SIZE(cast6_algs));
 607}
 608
 609module_init(cast6_init);
 610module_exit(cast6_exit);
 611
 612MODULE_DESCRIPTION("Cast6 Cipher Algorithm, AVX optimized");
 613MODULE_LICENSE("GPL");
 614MODULE_ALIAS("cast6");
 615