linux/drivers/crypto/geode-aes.c
<<
>>
Prefs
   1// SPDX-License-Identifier: GPL-2.0-or-later
   2 /* Copyright (C) 2004-2006, Advanced Micro Devices, Inc.
   3  */
   4
   5#include <linux/module.h>
   6#include <linux/kernel.h>
   7#include <linux/pci.h>
   8#include <linux/pci_ids.h>
   9#include <linux/crypto.h>
  10#include <linux/spinlock.h>
  11#include <crypto/algapi.h>
  12#include <crypto/aes.h>
  13#include <crypto/internal/cipher.h>
  14#include <crypto/internal/skcipher.h>
  15
  16#include <linux/io.h>
  17#include <linux/delay.h>
  18
  19#include "geode-aes.h"
  20
  21/* Static structures */
  22
  23static void __iomem *_iobase;
  24static DEFINE_SPINLOCK(lock);
  25
  26/* Write a 128 bit field (either a writable key or IV) */
  27static inline void
  28_writefield(u32 offset, const void *value)
  29{
  30        int i;
  31
  32        for (i = 0; i < 4; i++)
  33                iowrite32(((const u32 *) value)[i], _iobase + offset + (i * 4));
  34}
  35
  36/* Read a 128 bit field (either a writable key or IV) */
  37static inline void
  38_readfield(u32 offset, void *value)
  39{
  40        int i;
  41
  42        for (i = 0; i < 4; i++)
  43                ((u32 *) value)[i] = ioread32(_iobase + offset + (i * 4));
  44}
  45
  46static int
  47do_crypt(const void *src, void *dst, u32 len, u32 flags)
  48{
  49        u32 status;
  50        u32 counter = AES_OP_TIMEOUT;
  51
  52        iowrite32(virt_to_phys((void *)src), _iobase + AES_SOURCEA_REG);
  53        iowrite32(virt_to_phys(dst), _iobase + AES_DSTA_REG);
  54        iowrite32(len,  _iobase + AES_LENA_REG);
  55
  56        /* Start the operation */
  57        iowrite32(AES_CTRL_START | flags, _iobase + AES_CTRLA_REG);
  58
  59        do {
  60                status = ioread32(_iobase + AES_INTR_REG);
  61                cpu_relax();
  62        } while (!(status & AES_INTRA_PENDING) && --counter);
  63
  64        /* Clear the event */
  65        iowrite32((status & 0xFF) | AES_INTRA_PENDING, _iobase + AES_INTR_REG);
  66        return counter ? 0 : 1;
  67}
  68
  69static void
  70geode_aes_crypt(const struct geode_aes_tfm_ctx *tctx, const void *src,
  71                void *dst, u32 len, u8 *iv, int mode, int dir)
  72{
  73        u32 flags = 0;
  74        unsigned long iflags;
  75        int ret;
  76
  77        /* If the source and destination is the same, then
  78         * we need to turn on the coherent flags, otherwise
  79         * we don't need to worry
  80         */
  81
  82        flags |= (AES_CTRL_DCA | AES_CTRL_SCA);
  83
  84        if (dir == AES_DIR_ENCRYPT)
  85                flags |= AES_CTRL_ENCRYPT;
  86
  87        /* Start the critical section */
  88
  89        spin_lock_irqsave(&lock, iflags);
  90
  91        if (mode == AES_MODE_CBC) {
  92                flags |= AES_CTRL_CBC;
  93                _writefield(AES_WRITEIV0_REG, iv);
  94        }
  95
  96        flags |= AES_CTRL_WRKEY;
  97        _writefield(AES_WRITEKEY0_REG, tctx->key);
  98
  99        ret = do_crypt(src, dst, len, flags);
 100        BUG_ON(ret);
 101
 102        if (mode == AES_MODE_CBC)
 103                _readfield(AES_WRITEIV0_REG, iv);
 104
 105        spin_unlock_irqrestore(&lock, iflags);
 106}
 107
 108/* CRYPTO-API Functions */
 109
 110static int geode_setkey_cip(struct crypto_tfm *tfm, const u8 *key,
 111                unsigned int len)
 112{
 113        struct geode_aes_tfm_ctx *tctx = crypto_tfm_ctx(tfm);
 114
 115        tctx->keylen = len;
 116
 117        if (len == AES_KEYSIZE_128) {
 118                memcpy(tctx->key, key, len);
 119                return 0;
 120        }
 121
 122        if (len != AES_KEYSIZE_192 && len != AES_KEYSIZE_256)
 123                /* not supported at all */
 124                return -EINVAL;
 125
 126        /*
 127         * The requested key size is not supported by HW, do a fallback
 128         */
 129        tctx->fallback.cip->base.crt_flags &= ~CRYPTO_TFM_REQ_MASK;
 130        tctx->fallback.cip->base.crt_flags |=
 131                (tfm->crt_flags & CRYPTO_TFM_REQ_MASK);
 132
 133        return crypto_cipher_setkey(tctx->fallback.cip, key, len);
 134}
 135
 136static int geode_setkey_skcipher(struct crypto_skcipher *tfm, const u8 *key,
 137                                 unsigned int len)
 138{
 139        struct geode_aes_tfm_ctx *tctx = crypto_skcipher_ctx(tfm);
 140
 141        tctx->keylen = len;
 142
 143        if (len == AES_KEYSIZE_128) {
 144                memcpy(tctx->key, key, len);
 145                return 0;
 146        }
 147
 148        if (len != AES_KEYSIZE_192 && len != AES_KEYSIZE_256)
 149                /* not supported at all */
 150                return -EINVAL;
 151
 152        /*
 153         * The requested key size is not supported by HW, do a fallback
 154         */
 155        crypto_skcipher_clear_flags(tctx->fallback.skcipher,
 156                                    CRYPTO_TFM_REQ_MASK);
 157        crypto_skcipher_set_flags(tctx->fallback.skcipher,
 158                                  crypto_skcipher_get_flags(tfm) &
 159                                  CRYPTO_TFM_REQ_MASK);
 160        return crypto_skcipher_setkey(tctx->fallback.skcipher, key, len);
 161}
 162
 163static void
 164geode_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
 165{
 166        const struct geode_aes_tfm_ctx *tctx = crypto_tfm_ctx(tfm);
 167
 168        if (unlikely(tctx->keylen != AES_KEYSIZE_128)) {
 169                crypto_cipher_encrypt_one(tctx->fallback.cip, out, in);
 170                return;
 171        }
 172
 173        geode_aes_crypt(tctx, in, out, AES_BLOCK_SIZE, NULL,
 174                        AES_MODE_ECB, AES_DIR_ENCRYPT);
 175}
 176
 177
 178static void
 179geode_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
 180{
 181        const struct geode_aes_tfm_ctx *tctx = crypto_tfm_ctx(tfm);
 182
 183        if (unlikely(tctx->keylen != AES_KEYSIZE_128)) {
 184                crypto_cipher_decrypt_one(tctx->fallback.cip, out, in);
 185                return;
 186        }
 187
 188        geode_aes_crypt(tctx, in, out, AES_BLOCK_SIZE, NULL,
 189                        AES_MODE_ECB, AES_DIR_DECRYPT);
 190}
 191
 192static int fallback_init_cip(struct crypto_tfm *tfm)
 193{
 194        const char *name = crypto_tfm_alg_name(tfm);
 195        struct geode_aes_tfm_ctx *tctx = crypto_tfm_ctx(tfm);
 196
 197        tctx->fallback.cip = crypto_alloc_cipher(name, 0,
 198                                                 CRYPTO_ALG_NEED_FALLBACK);
 199
 200        if (IS_ERR(tctx->fallback.cip)) {
 201                printk(KERN_ERR "Error allocating fallback algo %s\n", name);
 202                return PTR_ERR(tctx->fallback.cip);
 203        }
 204
 205        return 0;
 206}
 207
 208static void fallback_exit_cip(struct crypto_tfm *tfm)
 209{
 210        struct geode_aes_tfm_ctx *tctx = crypto_tfm_ctx(tfm);
 211
 212        crypto_free_cipher(tctx->fallback.cip);
 213}
 214
 215static struct crypto_alg geode_alg = {
 216        .cra_name                       =       "aes",
 217        .cra_driver_name        =       "geode-aes",
 218        .cra_priority           =       300,
 219        .cra_alignmask          =       15,
 220        .cra_flags                      =       CRYPTO_ALG_TYPE_CIPHER |
 221                                                        CRYPTO_ALG_NEED_FALLBACK,
 222        .cra_init                       =       fallback_init_cip,
 223        .cra_exit                       =       fallback_exit_cip,
 224        .cra_blocksize          =       AES_BLOCK_SIZE,
 225        .cra_ctxsize            =       sizeof(struct geode_aes_tfm_ctx),
 226        .cra_module                     =       THIS_MODULE,
 227        .cra_u                          =       {
 228                .cipher =       {
 229                        .cia_min_keysize        =       AES_MIN_KEY_SIZE,
 230                        .cia_max_keysize        =       AES_MAX_KEY_SIZE,
 231                        .cia_setkey                     =       geode_setkey_cip,
 232                        .cia_encrypt            =       geode_encrypt,
 233                        .cia_decrypt            =       geode_decrypt
 234                }
 235        }
 236};
 237
 238static int geode_init_skcipher(struct crypto_skcipher *tfm)
 239{
 240        const char *name = crypto_tfm_alg_name(&tfm->base);
 241        struct geode_aes_tfm_ctx *tctx = crypto_skcipher_ctx(tfm);
 242
 243        tctx->fallback.skcipher =
 244                crypto_alloc_skcipher(name, 0, CRYPTO_ALG_NEED_FALLBACK |
 245                                      CRYPTO_ALG_ASYNC);
 246        if (IS_ERR(tctx->fallback.skcipher)) {
 247                printk(KERN_ERR "Error allocating fallback algo %s\n", name);
 248                return PTR_ERR(tctx->fallback.skcipher);
 249        }
 250
 251        crypto_skcipher_set_reqsize(tfm, sizeof(struct skcipher_request) +
 252                                    crypto_skcipher_reqsize(tctx->fallback.skcipher));
 253        return 0;
 254}
 255
 256static void geode_exit_skcipher(struct crypto_skcipher *tfm)
 257{
 258        struct geode_aes_tfm_ctx *tctx = crypto_skcipher_ctx(tfm);
 259
 260        crypto_free_skcipher(tctx->fallback.skcipher);
 261}
 262
 263static int geode_skcipher_crypt(struct skcipher_request *req, int mode, int dir)
 264{
 265        struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
 266        const struct geode_aes_tfm_ctx *tctx = crypto_skcipher_ctx(tfm);
 267        struct skcipher_walk walk;
 268        unsigned int nbytes;
 269        int err;
 270
 271        if (unlikely(tctx->keylen != AES_KEYSIZE_128)) {
 272                struct skcipher_request *subreq = skcipher_request_ctx(req);
 273
 274                *subreq = *req;
 275                skcipher_request_set_tfm(subreq, tctx->fallback.skcipher);
 276                if (dir == AES_DIR_DECRYPT)
 277                        return crypto_skcipher_decrypt(subreq);
 278                else
 279                        return crypto_skcipher_encrypt(subreq);
 280        }
 281
 282        err = skcipher_walk_virt(&walk, req, false);
 283
 284        while ((nbytes = walk.nbytes) != 0) {
 285                geode_aes_crypt(tctx, walk.src.virt.addr, walk.dst.virt.addr,
 286                                round_down(nbytes, AES_BLOCK_SIZE),
 287                                walk.iv, mode, dir);
 288                err = skcipher_walk_done(&walk, nbytes % AES_BLOCK_SIZE);
 289        }
 290
 291        return err;
 292}
 293
 294static int geode_cbc_encrypt(struct skcipher_request *req)
 295{
 296        return geode_skcipher_crypt(req, AES_MODE_CBC, AES_DIR_ENCRYPT);
 297}
 298
 299static int geode_cbc_decrypt(struct skcipher_request *req)
 300{
 301        return geode_skcipher_crypt(req, AES_MODE_CBC, AES_DIR_DECRYPT);
 302}
 303
 304static int geode_ecb_encrypt(struct skcipher_request *req)
 305{
 306        return geode_skcipher_crypt(req, AES_MODE_ECB, AES_DIR_ENCRYPT);
 307}
 308
 309static int geode_ecb_decrypt(struct skcipher_request *req)
 310{
 311        return geode_skcipher_crypt(req, AES_MODE_ECB, AES_DIR_DECRYPT);
 312}
 313
 314static struct skcipher_alg geode_skcipher_algs[] = {
 315        {
 316                .base.cra_name          = "cbc(aes)",
 317                .base.cra_driver_name   = "cbc-aes-geode",
 318                .base.cra_priority      = 400,
 319                .base.cra_flags         = CRYPTO_ALG_KERN_DRIVER_ONLY |
 320                                          CRYPTO_ALG_NEED_FALLBACK,
 321                .base.cra_blocksize     = AES_BLOCK_SIZE,
 322                .base.cra_ctxsize       = sizeof(struct geode_aes_tfm_ctx),
 323                .base.cra_alignmask     = 15,
 324                .base.cra_module        = THIS_MODULE,
 325                .init                   = geode_init_skcipher,
 326                .exit                   = geode_exit_skcipher,
 327                .setkey                 = geode_setkey_skcipher,
 328                .encrypt                = geode_cbc_encrypt,
 329                .decrypt                = geode_cbc_decrypt,
 330                .min_keysize            = AES_MIN_KEY_SIZE,
 331                .max_keysize            = AES_MAX_KEY_SIZE,
 332                .ivsize                 = AES_BLOCK_SIZE,
 333        }, {
 334                .base.cra_name          = "ecb(aes)",
 335                .base.cra_driver_name   = "ecb-aes-geode",
 336                .base.cra_priority      = 400,
 337                .base.cra_flags         = CRYPTO_ALG_KERN_DRIVER_ONLY |
 338                                          CRYPTO_ALG_NEED_FALLBACK,
 339                .base.cra_blocksize     = AES_BLOCK_SIZE,
 340                .base.cra_ctxsize       = sizeof(struct geode_aes_tfm_ctx),
 341                .base.cra_alignmask     = 15,
 342                .base.cra_module        = THIS_MODULE,
 343                .init                   = geode_init_skcipher,
 344                .exit                   = geode_exit_skcipher,
 345                .setkey                 = geode_setkey_skcipher,
 346                .encrypt                = geode_ecb_encrypt,
 347                .decrypt                = geode_ecb_decrypt,
 348                .min_keysize            = AES_MIN_KEY_SIZE,
 349                .max_keysize            = AES_MAX_KEY_SIZE,
 350        },
 351};
 352
 353static void geode_aes_remove(struct pci_dev *dev)
 354{
 355        crypto_unregister_alg(&geode_alg);
 356        crypto_unregister_skciphers(geode_skcipher_algs,
 357                                    ARRAY_SIZE(geode_skcipher_algs));
 358
 359        pci_iounmap(dev, _iobase);
 360        _iobase = NULL;
 361
 362        pci_release_regions(dev);
 363        pci_disable_device(dev);
 364}
 365
 366
 367static int geode_aes_probe(struct pci_dev *dev, const struct pci_device_id *id)
 368{
 369        int ret;
 370
 371        ret = pci_enable_device(dev);
 372        if (ret)
 373                return ret;
 374
 375        ret = pci_request_regions(dev, "geode-aes");
 376        if (ret)
 377                goto eenable;
 378
 379        _iobase = pci_iomap(dev, 0, 0);
 380
 381        if (_iobase == NULL) {
 382                ret = -ENOMEM;
 383                goto erequest;
 384        }
 385
 386        /* Clear any pending activity */
 387        iowrite32(AES_INTR_PENDING | AES_INTR_MASK, _iobase + AES_INTR_REG);
 388
 389        ret = crypto_register_alg(&geode_alg);
 390        if (ret)
 391                goto eiomap;
 392
 393        ret = crypto_register_skciphers(geode_skcipher_algs,
 394                                        ARRAY_SIZE(geode_skcipher_algs));
 395        if (ret)
 396                goto ealg;
 397
 398        dev_notice(&dev->dev, "GEODE AES engine enabled.\n");
 399        return 0;
 400
 401 ealg:
 402        crypto_unregister_alg(&geode_alg);
 403
 404 eiomap:
 405        pci_iounmap(dev, _iobase);
 406
 407 erequest:
 408        pci_release_regions(dev);
 409
 410 eenable:
 411        pci_disable_device(dev);
 412
 413        dev_err(&dev->dev, "GEODE AES initialization failed.\n");
 414        return ret;
 415}
 416
 417static struct pci_device_id geode_aes_tbl[] = {
 418        { PCI_VDEVICE(AMD, PCI_DEVICE_ID_AMD_LX_AES), },
 419        { 0, }
 420};
 421
 422MODULE_DEVICE_TABLE(pci, geode_aes_tbl);
 423
 424static struct pci_driver geode_aes_driver = {
 425        .name = "Geode LX AES",
 426        .id_table = geode_aes_tbl,
 427        .probe = geode_aes_probe,
 428        .remove = geode_aes_remove,
 429};
 430
 431module_pci_driver(geode_aes_driver);
 432
 433MODULE_AUTHOR("Advanced Micro Devices, Inc.");
 434MODULE_DESCRIPTION("Geode LX Hardware AES driver");
 435MODULE_LICENSE("GPL");
 436MODULE_IMPORT_NS(CRYPTO_INTERNAL);
 437