linux/drivers/crypto/sa2ul.c
<<
>>
Prefs
   1// SPDX-License-Identifier: GPL-2.0
   2/*
   3 * K3 SA2UL crypto accelerator driver
   4 *
   5 * Copyright (C) 2018-2020 Texas Instruments Incorporated - http://www.ti.com
   6 *
   7 * Authors:     Keerthy
   8 *              Vitaly Andrianov
   9 *              Tero Kristo
  10 */
  11#include <linux/clk.h>
  12#include <linux/dma-mapping.h>
  13#include <linux/dmaengine.h>
  14#include <linux/dmapool.h>
  15#include <linux/kernel.h>
  16#include <linux/module.h>
  17#include <linux/of_device.h>
  18#include <linux/platform_device.h>
  19#include <linux/pm_runtime.h>
  20
  21#include <crypto/aes.h>
  22#include <crypto/authenc.h>
  23#include <crypto/des.h>
  24#include <crypto/internal/aead.h>
  25#include <crypto/internal/hash.h>
  26#include <crypto/internal/skcipher.h>
  27#include <crypto/scatterwalk.h>
  28#include <crypto/sha1.h>
  29#include <crypto/sha2.h>
  30
  31#include "sa2ul.h"
  32
  33/* Byte offset for key in encryption security context */
  34#define SC_ENC_KEY_OFFSET (1 + 27 + 4)
  35/* Byte offset for Aux-1 in encryption security context */
  36#define SC_ENC_AUX1_OFFSET (1 + 27 + 4 + 32)
  37
  38#define SA_CMDL_UPD_ENC         0x0001
  39#define SA_CMDL_UPD_AUTH        0x0002
  40#define SA_CMDL_UPD_ENC_IV      0x0004
  41#define SA_CMDL_UPD_AUTH_IV     0x0008
  42#define SA_CMDL_UPD_AUX_KEY     0x0010
  43
  44#define SA_AUTH_SUBKEY_LEN      16
  45#define SA_CMDL_PAYLOAD_LENGTH_MASK     0xFFFF
  46#define SA_CMDL_SOP_BYPASS_LEN_MASK     0xFF000000
  47
  48#define MODE_CONTROL_BYTES      27
  49#define SA_HASH_PROCESSING      0
  50#define SA_CRYPTO_PROCESSING    0
  51#define SA_UPLOAD_HASH_TO_TLR   BIT(6)
  52
  53#define SA_SW0_FLAGS_MASK       0xF0000
  54#define SA_SW0_CMDL_INFO_MASK   0x1F00000
  55#define SA_SW0_CMDL_PRESENT     BIT(4)
  56#define SA_SW0_ENG_ID_MASK      0x3E000000
  57#define SA_SW0_DEST_INFO_PRESENT        BIT(30)
  58#define SA_SW2_EGRESS_LENGTH            0xFF000000
  59#define SA_BASIC_HASH           0x10
  60
  61#define SHA256_DIGEST_WORDS    8
  62/* Make 32-bit word from 4 bytes */
  63#define SA_MK_U32(b0, b1, b2, b3) (((b0) << 24) | ((b1) << 16) | \
  64                                   ((b2) << 8) | (b3))
  65
  66/* size of SCCTL structure in bytes */
  67#define SA_SCCTL_SZ 16
  68
  69/* Max Authentication tag size */
  70#define SA_MAX_AUTH_TAG_SZ 64
  71
  72enum sa_algo_id {
  73        SA_ALG_CBC_AES = 0,
  74        SA_ALG_EBC_AES,
  75        SA_ALG_CBC_DES3,
  76        SA_ALG_ECB_DES3,
  77        SA_ALG_SHA1,
  78        SA_ALG_SHA256,
  79        SA_ALG_SHA512,
  80        SA_ALG_AUTHENC_SHA1_AES,
  81        SA_ALG_AUTHENC_SHA256_AES,
  82};
  83
  84struct sa_match_data {
  85        u8 priv;
  86        u8 priv_id;
  87        u32 supported_algos;
  88        bool skip_engine_control;
  89};
  90
  91static struct device *sa_k3_dev;
  92
  93/**
  94 * struct sa_cmdl_cfg - Command label configuration descriptor
  95 * @aalg: authentication algorithm ID
  96 * @enc_eng_id: Encryption Engine ID supported by the SA hardware
  97 * @auth_eng_id: Authentication Engine ID
  98 * @iv_size: Initialization Vector size
  99 * @akey: Authentication key
 100 * @akey_len: Authentication key length
 101 * @enc: True, if this is an encode request
 102 */
 103struct sa_cmdl_cfg {
 104        int aalg;
 105        u8 enc_eng_id;
 106        u8 auth_eng_id;
 107        u8 iv_size;
 108        const u8 *akey;
 109        u16 akey_len;
 110        bool enc;
 111};
 112
 113/**
 114 * struct algo_data - Crypto algorithm specific data
 115 * @enc_eng: Encryption engine info structure
 116 * @auth_eng: Authentication engine info structure
 117 * @auth_ctrl: Authentication control word
 118 * @hash_size: Size of digest
 119 * @iv_idx: iv index in psdata
 120 * @iv_out_size: iv out size
 121 * @ealg_id: Encryption Algorithm ID
 122 * @aalg_id: Authentication algorithm ID
 123 * @mci_enc: Mode Control Instruction for Encryption algorithm
 124 * @mci_dec: Mode Control Instruction for Decryption
 125 * @inv_key: Whether the encryption algorithm demands key inversion
 126 * @ctx: Pointer to the algorithm context
 127 * @keyed_mac: Whether the authentication algorithm has key
 128 * @prep_iopad: Function pointer to generate intermediate ipad/opad
 129 */
 130struct algo_data {
 131        struct sa_eng_info enc_eng;
 132        struct sa_eng_info auth_eng;
 133        u8 auth_ctrl;
 134        u8 hash_size;
 135        u8 iv_idx;
 136        u8 iv_out_size;
 137        u8 ealg_id;
 138        u8 aalg_id;
 139        u8 *mci_enc;
 140        u8 *mci_dec;
 141        bool inv_key;
 142        struct sa_tfm_ctx *ctx;
 143        bool keyed_mac;
 144        void (*prep_iopad)(struct algo_data *algo, const u8 *key,
 145                           u16 key_sz, __be32 *ipad, __be32 *opad);
 146};
 147
 148/**
 149 * struct sa_alg_tmpl: A generic template encompassing crypto/aead algorithms
 150 * @type: Type of the crypto algorithm.
 151 * @alg: Union of crypto algorithm definitions.
 152 * @registered: Flag indicating if the crypto algorithm is already registered
 153 */
 154struct sa_alg_tmpl {
 155        u32 type;               /* CRYPTO_ALG_TYPE from <linux/crypto.h> */
 156        union {
 157                struct skcipher_alg skcipher;
 158                struct ahash_alg ahash;
 159                struct aead_alg aead;
 160        } alg;
 161        bool registered;
 162};
 163
 164/**
 165 * struct sa_mapped_sg: scatterlist information for tx and rx
 166 * @mapped: Set to true if the @sgt is mapped
 167 * @dir: mapping direction used for @sgt
 168 * @split_sg: Set if the sg is split and needs to be freed up
 169 * @static_sg: Static scatterlist entry for overriding data
 170 * @sgt: scatterlist table for DMA API use
 171 */
 172struct sa_mapped_sg {
 173        bool mapped;
 174        enum dma_data_direction dir;
 175        struct scatterlist static_sg;
 176        struct scatterlist *split_sg;
 177        struct sg_table sgt;
 178};
 179/**
 180 * struct sa_rx_data: RX Packet miscellaneous data place holder
 181 * @req: crypto request data pointer
 182 * @ddev: pointer to the DMA device
 183 * @tx_in: dma_async_tx_descriptor pointer for rx channel
 184 * @mapped_sg: Information on tx (0) and rx (1) scatterlist DMA mapping
 185 * @enc: Flag indicating either encryption or decryption
 186 * @enc_iv_size: Initialisation vector size
 187 * @iv_idx: Initialisation vector index
 188 */
 189struct sa_rx_data {
 190        void *req;
 191        struct device *ddev;
 192        struct dma_async_tx_descriptor *tx_in;
 193        struct sa_mapped_sg mapped_sg[2];
 194        u8 enc;
 195        u8 enc_iv_size;
 196        u8 iv_idx;
 197};
 198
 199/**
 200 * struct sa_req: SA request definition
 201 * @dev: device for the request
 202 * @size: total data to the xmitted via DMA
 203 * @enc_offset: offset of cipher data
 204 * @enc_size: data to be passed to cipher engine
 205 * @enc_iv: cipher IV
 206 * @auth_offset: offset of the authentication data
 207 * @auth_size: size of the authentication data
 208 * @auth_iv: authentication IV
 209 * @type: algorithm type for the request
 210 * @cmdl: command label pointer
 211 * @base: pointer to the base request
 212 * @ctx: pointer to the algorithm context data
 213 * @enc: true if this is an encode request
 214 * @src: source data
 215 * @dst: destination data
 216 * @callback: DMA callback for the request
 217 * @mdata_size: metadata size passed to DMA
 218 */
 219struct sa_req {
 220        struct device *dev;
 221        u16 size;
 222        u8 enc_offset;
 223        u16 enc_size;
 224        u8 *enc_iv;
 225        u8 auth_offset;
 226        u16 auth_size;
 227        u8 *auth_iv;
 228        u32 type;
 229        u32 *cmdl;
 230        struct crypto_async_request *base;
 231        struct sa_tfm_ctx *ctx;
 232        bool enc;
 233        struct scatterlist *src;
 234        struct scatterlist *dst;
 235        dma_async_tx_callback callback;
 236        u16 mdata_size;
 237};
 238
 239/*
 240 * Mode Control Instructions for various Key lengths 128, 192, 256
 241 * For CBC (Cipher Block Chaining) mode for encryption
 242 */
 243static u8 mci_cbc_enc_array[3][MODE_CONTROL_BYTES] = {
 244        {       0x61, 0x00, 0x00, 0x18, 0x88, 0x0a, 0xaa, 0x4b, 0x7e, 0x00,
 245                0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
 246                0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00        },
 247        {       0x61, 0x00, 0x00, 0x18, 0x88, 0x4a, 0xaa, 0x4b, 0x7e, 0x00,
 248                0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
 249                0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00        },
 250        {       0x61, 0x00, 0x00, 0x18, 0x88, 0x8a, 0xaa, 0x4b, 0x7e, 0x00,
 251                0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
 252                0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00        },
 253};
 254
 255/*
 256 * Mode Control Instructions for various Key lengths 128, 192, 256
 257 * For CBC (Cipher Block Chaining) mode for decryption
 258 */
 259static u8 mci_cbc_dec_array[3][MODE_CONTROL_BYTES] = {
 260        {       0x71, 0x00, 0x00, 0x80, 0x8a, 0xca, 0x98, 0xf4, 0x40, 0xc0,
 261                0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
 262                0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00        },
 263        {       0x71, 0x00, 0x00, 0x84, 0x8a, 0xca, 0x98, 0xf4, 0x40, 0xc0,
 264                0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
 265                0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00        },
 266        {       0x71, 0x00, 0x00, 0x88, 0x8a, 0xca, 0x98, 0xf4, 0x40, 0xc0,
 267                0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
 268                0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00        },
 269};
 270
 271/*
 272 * Mode Control Instructions for various Key lengths 128, 192, 256
 273 * For CBC (Cipher Block Chaining) mode for encryption
 274 */
 275static u8 mci_cbc_enc_no_iv_array[3][MODE_CONTROL_BYTES] = {
 276        {       0x21, 0x00, 0x00, 0x18, 0x88, 0x0a, 0xaa, 0x4b, 0x7e, 0x00,
 277                0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
 278                0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00        },
 279        {       0x21, 0x00, 0x00, 0x18, 0x88, 0x4a, 0xaa, 0x4b, 0x7e, 0x00,
 280                0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
 281                0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00        },
 282        {       0x21, 0x00, 0x00, 0x18, 0x88, 0x8a, 0xaa, 0x4b, 0x7e, 0x00,
 283                0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
 284                0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00        },
 285};
 286
 287/*
 288 * Mode Control Instructions for various Key lengths 128, 192, 256
 289 * For CBC (Cipher Block Chaining) mode for decryption
 290 */
 291static u8 mci_cbc_dec_no_iv_array[3][MODE_CONTROL_BYTES] = {
 292        {       0x31, 0x00, 0x00, 0x80, 0x8a, 0xca, 0x98, 0xf4, 0x40, 0xc0,
 293                0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
 294                0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00        },
 295        {       0x31, 0x00, 0x00, 0x84, 0x8a, 0xca, 0x98, 0xf4, 0x40, 0xc0,
 296                0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
 297                0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00        },
 298        {       0x31, 0x00, 0x00, 0x88, 0x8a, 0xca, 0x98, 0xf4, 0x40, 0xc0,
 299                0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
 300                0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00        },
 301};
 302
 303/*
 304 * Mode Control Instructions for various Key lengths 128, 192, 256
 305 * For ECB (Electronic Code Book) mode for encryption
 306 */
 307static u8 mci_ecb_enc_array[3][27] = {
 308        {       0x21, 0x00, 0x00, 0x80, 0x8a, 0x04, 0xb7, 0x90, 0x00, 0x00,
 309                0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
 310                0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00        },
 311        {       0x21, 0x00, 0x00, 0x84, 0x8a, 0x04, 0xb7, 0x90, 0x00, 0x00,
 312                0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
 313                0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00        },
 314        {       0x21, 0x00, 0x00, 0x88, 0x8a, 0x04, 0xb7, 0x90, 0x00, 0x00,
 315                0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
 316                0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00        },
 317};
 318
 319/*
 320 * Mode Control Instructions for various Key lengths 128, 192, 256
 321 * For ECB (Electronic Code Book) mode for decryption
 322 */
 323static u8 mci_ecb_dec_array[3][27] = {
 324        {       0x31, 0x00, 0x00, 0x80, 0x8a, 0x04, 0xb7, 0x90, 0x00, 0x00,
 325                0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
 326                0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00        },
 327        {       0x31, 0x00, 0x00, 0x84, 0x8a, 0x04, 0xb7, 0x90, 0x00, 0x00,
 328                0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
 329                0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00        },
 330        {       0x31, 0x00, 0x00, 0x88, 0x8a, 0x04, 0xb7, 0x90, 0x00, 0x00,
 331                0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
 332                0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00        },
 333};
 334
 335/*
 336 * Mode Control Instructions for DES algorithm
 337 * For CBC (Cipher Block Chaining) mode and ECB mode
 338 * encryption and for decryption respectively
 339 */
 340static u8 mci_cbc_3des_enc_array[MODE_CONTROL_BYTES] = {
 341        0x60, 0x00, 0x00, 0x18, 0x88, 0x52, 0xaa, 0x4b, 0x7e, 0x00, 0x00, 0x00,
 342        0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
 343        0x00, 0x00, 0x00,
 344};
 345
 346static u8 mci_cbc_3des_dec_array[MODE_CONTROL_BYTES] = {
 347        0x70, 0x00, 0x00, 0x85, 0x0a, 0xca, 0x98, 0xf4, 0x40, 0xc0, 0x00, 0x00,
 348        0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
 349        0x00, 0x00, 0x00,
 350};
 351
 352static u8 mci_ecb_3des_enc_array[MODE_CONTROL_BYTES] = {
 353        0x20, 0x00, 0x00, 0x85, 0x0a, 0x04, 0xb7, 0x90, 0x00, 0x00, 0x00, 0x00,
 354        0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
 355        0x00, 0x00, 0x00,
 356};
 357
 358static u8 mci_ecb_3des_dec_array[MODE_CONTROL_BYTES] = {
 359        0x30, 0x00, 0x00, 0x85, 0x0a, 0x04, 0xb7, 0x90, 0x00, 0x00, 0x00, 0x00,
 360        0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
 361        0x00, 0x00, 0x00,
 362};
 363
 364/*
 365 * Perform 16 byte or 128 bit swizzling
 366 * The SA2UL Expects the security context to
 367 * be in little Endian and the bus width is 128 bits or 16 bytes
 368 * Hence swap 16 bytes at a time from higher to lower address
 369 */
 370static void sa_swiz_128(u8 *in, u16 len)
 371{
 372        u8 data[16];
 373        int i, j;
 374
 375        for (i = 0; i < len; i += 16) {
 376                memcpy(data, &in[i], 16);
 377                for (j = 0; j < 16; j++)
 378                        in[i + j] = data[15 - j];
 379        }
 380}
 381
 382/* Prepare the ipad and opad from key as per SHA algorithm step 1*/
 383static void prepare_kipad(u8 *k_ipad, const u8 *key, u16 key_sz)
 384{
 385        int i;
 386
 387        for (i = 0; i < key_sz; i++)
 388                k_ipad[i] = key[i] ^ 0x36;
 389
 390        /* Instead of XOR with 0 */
 391        for (; i < SHA1_BLOCK_SIZE; i++)
 392                k_ipad[i] = 0x36;
 393}
 394
 395static void prepare_kopad(u8 *k_opad, const u8 *key, u16 key_sz)
 396{
 397        int i;
 398
 399        for (i = 0; i < key_sz; i++)
 400                k_opad[i] = key[i] ^ 0x5c;
 401
 402        /* Instead of XOR with 0 */
 403        for (; i < SHA1_BLOCK_SIZE; i++)
 404                k_opad[i] = 0x5c;
 405}
 406
 407static void sa_export_shash(void *state, struct shash_desc *hash,
 408                            int digest_size, __be32 *out)
 409{
 410        struct sha1_state *sha1;
 411        struct sha256_state *sha256;
 412        u32 *result;
 413
 414        switch (digest_size) {
 415        case SHA1_DIGEST_SIZE:
 416                sha1 = state;
 417                result = sha1->state;
 418                break;
 419        case SHA256_DIGEST_SIZE:
 420                sha256 = state;
 421                result = sha256->state;
 422                break;
 423        default:
 424                dev_err(sa_k3_dev, "%s: bad digest_size=%d\n", __func__,
 425                        digest_size);
 426                return;
 427        }
 428
 429        crypto_shash_export(hash, state);
 430
 431        cpu_to_be32_array(out, result, digest_size / 4);
 432}
 433
 434static void sa_prepare_iopads(struct algo_data *data, const u8 *key,
 435                              u16 key_sz, __be32 *ipad, __be32 *opad)
 436{
 437        SHASH_DESC_ON_STACK(shash, data->ctx->shash);
 438        int block_size = crypto_shash_blocksize(data->ctx->shash);
 439        int digest_size = crypto_shash_digestsize(data->ctx->shash);
 440        union {
 441                struct sha1_state sha1;
 442                struct sha256_state sha256;
 443                u8 k_pad[SHA1_BLOCK_SIZE];
 444        } sha;
 445
 446        shash->tfm = data->ctx->shash;
 447
 448        prepare_kipad(sha.k_pad, key, key_sz);
 449
 450        crypto_shash_init(shash);
 451        crypto_shash_update(shash, sha.k_pad, block_size);
 452        sa_export_shash(&sha, shash, digest_size, ipad);
 453
 454        prepare_kopad(sha.k_pad, key, key_sz);
 455
 456        crypto_shash_init(shash);
 457        crypto_shash_update(shash, sha.k_pad, block_size);
 458
 459        sa_export_shash(&sha, shash, digest_size, opad);
 460
 461        memzero_explicit(&sha, sizeof(sha));
 462}
 463
 464/* Derive the inverse key used in AES-CBC decryption operation */
 465static inline int sa_aes_inv_key(u8 *inv_key, const u8 *key, u16 key_sz)
 466{
 467        struct crypto_aes_ctx ctx;
 468        int key_pos;
 469
 470        if (aes_expandkey(&ctx, key, key_sz)) {
 471                dev_err(sa_k3_dev, "%s: bad key len(%d)\n", __func__, key_sz);
 472                return -EINVAL;
 473        }
 474
 475        /* work around to get the right inverse for AES_KEYSIZE_192 size keys */
 476        if (key_sz == AES_KEYSIZE_192) {
 477                ctx.key_enc[52] = ctx.key_enc[51] ^ ctx.key_enc[46];
 478                ctx.key_enc[53] = ctx.key_enc[52] ^ ctx.key_enc[47];
 479        }
 480
 481        /* Based crypto_aes_expand_key logic */
 482        switch (key_sz) {
 483        case AES_KEYSIZE_128:
 484        case AES_KEYSIZE_192:
 485                key_pos = key_sz + 24;
 486                break;
 487
 488        case AES_KEYSIZE_256:
 489                key_pos = key_sz + 24 - 4;
 490                break;
 491
 492        default:
 493                dev_err(sa_k3_dev, "%s: bad key len(%d)\n", __func__, key_sz);
 494                return -EINVAL;
 495        }
 496
 497        memcpy(inv_key, &ctx.key_enc[key_pos], key_sz);
 498        return 0;
 499}
 500
 501/* Set Security context for the encryption engine */
 502static int sa_set_sc_enc(struct algo_data *ad, const u8 *key, u16 key_sz,
 503                         u8 enc, u8 *sc_buf)
 504{
 505        const u8 *mci = NULL;
 506
 507        /* Set Encryption mode selector to crypto processing */
 508        sc_buf[0] = SA_CRYPTO_PROCESSING;
 509
 510        if (enc)
 511                mci = ad->mci_enc;
 512        else
 513                mci = ad->mci_dec;
 514        /* Set the mode control instructions in security context */
 515        if (mci)
 516                memcpy(&sc_buf[1], mci, MODE_CONTROL_BYTES);
 517
 518        /* For AES-CBC decryption get the inverse key */
 519        if (ad->inv_key && !enc) {
 520                if (sa_aes_inv_key(&sc_buf[SC_ENC_KEY_OFFSET], key, key_sz))
 521                        return -EINVAL;
 522        /* For all other cases: key is used */
 523        } else {
 524                memcpy(&sc_buf[SC_ENC_KEY_OFFSET], key, key_sz);
 525        }
 526
 527        return 0;
 528}
 529
 530/* Set Security context for the authentication engine */
 531static void sa_set_sc_auth(struct algo_data *ad, const u8 *key, u16 key_sz,
 532                           u8 *sc_buf)
 533{
 534        __be32 *ipad = (void *)(sc_buf + 32);
 535        __be32 *opad = (void *)(sc_buf + 64);
 536
 537        /* Set Authentication mode selector to hash processing */
 538        sc_buf[0] = SA_HASH_PROCESSING;
 539        /* Auth SW ctrl word: bit[6]=1 (upload computed hash to TLR section) */
 540        sc_buf[1] = SA_UPLOAD_HASH_TO_TLR;
 541        sc_buf[1] |= ad->auth_ctrl;
 542
 543        /* Copy the keys or ipad/opad */
 544        if (ad->keyed_mac)
 545                ad->prep_iopad(ad, key, key_sz, ipad, opad);
 546        else {
 547                /* basic hash */
 548                sc_buf[1] |= SA_BASIC_HASH;
 549        }
 550}
 551
 552static inline void sa_copy_iv(__be32 *out, const u8 *iv, bool size16)
 553{
 554        int j;
 555
 556        for (j = 0; j < ((size16) ? 4 : 2); j++) {
 557                *out = cpu_to_be32(*((u32 *)iv));
 558                iv += 4;
 559                out++;
 560        }
 561}
 562
 563/* Format general command label */
 564static int sa_format_cmdl_gen(struct sa_cmdl_cfg *cfg, u8 *cmdl,
 565                              struct sa_cmdl_upd_info *upd_info)
 566{
 567        u8 enc_offset = 0, auth_offset = 0, total = 0;
 568        u8 enc_next_eng = SA_ENG_ID_OUTPORT2;
 569        u8 auth_next_eng = SA_ENG_ID_OUTPORT2;
 570        u32 *word_ptr = (u32 *)cmdl;
 571        int i;
 572
 573        /* Clear the command label */
 574        memzero_explicit(cmdl, (SA_MAX_CMDL_WORDS * sizeof(u32)));
 575
 576        /* Iniialize the command update structure */
 577        memzero_explicit(upd_info, sizeof(*upd_info));
 578
 579        if (cfg->enc_eng_id && cfg->auth_eng_id) {
 580                if (cfg->enc) {
 581                        auth_offset = SA_CMDL_HEADER_SIZE_BYTES;
 582                        enc_next_eng = cfg->auth_eng_id;
 583
 584                        if (cfg->iv_size)
 585                                auth_offset += cfg->iv_size;
 586                } else {
 587                        enc_offset = SA_CMDL_HEADER_SIZE_BYTES;
 588                        auth_next_eng = cfg->enc_eng_id;
 589                }
 590        }
 591
 592        if (cfg->enc_eng_id) {
 593                upd_info->flags |= SA_CMDL_UPD_ENC;
 594                upd_info->enc_size.index = enc_offset >> 2;
 595                upd_info->enc_offset.index = upd_info->enc_size.index + 1;
 596                /* Encryption command label */
 597                cmdl[enc_offset + SA_CMDL_OFFSET_NESC] = enc_next_eng;
 598
 599                /* Encryption modes requiring IV */
 600                if (cfg->iv_size) {
 601                        upd_info->flags |= SA_CMDL_UPD_ENC_IV;
 602                        upd_info->enc_iv.index =
 603                                (enc_offset + SA_CMDL_HEADER_SIZE_BYTES) >> 2;
 604                        upd_info->enc_iv.size = cfg->iv_size;
 605
 606                        cmdl[enc_offset + SA_CMDL_OFFSET_LABEL_LEN] =
 607                                SA_CMDL_HEADER_SIZE_BYTES + cfg->iv_size;
 608
 609                        cmdl[enc_offset + SA_CMDL_OFFSET_OPTION_CTRL1] =
 610                                (SA_CTX_ENC_AUX2_OFFSET | (cfg->iv_size >> 3));
 611                        total += SA_CMDL_HEADER_SIZE_BYTES + cfg->iv_size;
 612                } else {
 613                        cmdl[enc_offset + SA_CMDL_OFFSET_LABEL_LEN] =
 614                                                SA_CMDL_HEADER_SIZE_BYTES;
 615                        total += SA_CMDL_HEADER_SIZE_BYTES;
 616                }
 617        }
 618
 619        if (cfg->auth_eng_id) {
 620                upd_info->flags |= SA_CMDL_UPD_AUTH;
 621                upd_info->auth_size.index = auth_offset >> 2;
 622                upd_info->auth_offset.index = upd_info->auth_size.index + 1;
 623                cmdl[auth_offset + SA_CMDL_OFFSET_NESC] = auth_next_eng;
 624                cmdl[auth_offset + SA_CMDL_OFFSET_LABEL_LEN] =
 625                        SA_CMDL_HEADER_SIZE_BYTES;
 626                total += SA_CMDL_HEADER_SIZE_BYTES;
 627        }
 628
 629        total = roundup(total, 8);
 630
 631        for (i = 0; i < total / 4; i++)
 632                word_ptr[i] = swab32(word_ptr[i]);
 633
 634        return total;
 635}
 636
 637/* Update Command label */
 638static inline void sa_update_cmdl(struct sa_req *req, u32 *cmdl,
 639                                  struct sa_cmdl_upd_info *upd_info)
 640{
 641        int i = 0, j;
 642
 643        if (likely(upd_info->flags & SA_CMDL_UPD_ENC)) {
 644                cmdl[upd_info->enc_size.index] &= ~SA_CMDL_PAYLOAD_LENGTH_MASK;
 645                cmdl[upd_info->enc_size.index] |= req->enc_size;
 646                cmdl[upd_info->enc_offset.index] &=
 647                                                ~SA_CMDL_SOP_BYPASS_LEN_MASK;
 648                cmdl[upd_info->enc_offset.index] |=
 649                        ((u32)req->enc_offset <<
 650                         __ffs(SA_CMDL_SOP_BYPASS_LEN_MASK));
 651
 652                if (likely(upd_info->flags & SA_CMDL_UPD_ENC_IV)) {
 653                        __be32 *data = (__be32 *)&cmdl[upd_info->enc_iv.index];
 654                        u32 *enc_iv = (u32 *)req->enc_iv;
 655
 656                        for (j = 0; i < upd_info->enc_iv.size; i += 4, j++) {
 657                                data[j] = cpu_to_be32(*enc_iv);
 658                                enc_iv++;
 659                        }
 660                }
 661        }
 662
 663        if (likely(upd_info->flags & SA_CMDL_UPD_AUTH)) {
 664                cmdl[upd_info->auth_size.index] &= ~SA_CMDL_PAYLOAD_LENGTH_MASK;
 665                cmdl[upd_info->auth_size.index] |= req->auth_size;
 666                cmdl[upd_info->auth_offset.index] &=
 667                        ~SA_CMDL_SOP_BYPASS_LEN_MASK;
 668                cmdl[upd_info->auth_offset.index] |=
 669                        ((u32)req->auth_offset <<
 670                         __ffs(SA_CMDL_SOP_BYPASS_LEN_MASK));
 671                if (upd_info->flags & SA_CMDL_UPD_AUTH_IV) {
 672                        sa_copy_iv((void *)&cmdl[upd_info->auth_iv.index],
 673                                   req->auth_iv,
 674                                   (upd_info->auth_iv.size > 8));
 675                }
 676                if (upd_info->flags & SA_CMDL_UPD_AUX_KEY) {
 677                        int offset = (req->auth_size & 0xF) ? 4 : 0;
 678
 679                        memcpy(&cmdl[upd_info->aux_key_info.index],
 680                               &upd_info->aux_key[offset], 16);
 681                }
 682        }
 683}
 684
 685/* Format SWINFO words to be sent to SA */
 686static
 687void sa_set_swinfo(u8 eng_id, u16 sc_id, dma_addr_t sc_phys,
 688                   u8 cmdl_present, u8 cmdl_offset, u8 flags,
 689                   u8 hash_size, u32 *swinfo)
 690{
 691        swinfo[0] = sc_id;
 692        swinfo[0] |= (flags << __ffs(SA_SW0_FLAGS_MASK));
 693        if (likely(cmdl_present))
 694                swinfo[0] |= ((cmdl_offset | SA_SW0_CMDL_PRESENT) <<
 695                                                __ffs(SA_SW0_CMDL_INFO_MASK));
 696        swinfo[0] |= (eng_id << __ffs(SA_SW0_ENG_ID_MASK));
 697
 698        swinfo[0] |= SA_SW0_DEST_INFO_PRESENT;
 699        swinfo[1] = (u32)(sc_phys & 0xFFFFFFFFULL);
 700        swinfo[2] = (u32)((sc_phys & 0xFFFFFFFF00000000ULL) >> 32);
 701        swinfo[2] |= (hash_size << __ffs(SA_SW2_EGRESS_LENGTH));
 702}
 703
 704/* Dump the security context */
 705static void sa_dump_sc(u8 *buf, dma_addr_t dma_addr)
 706{
 707#ifdef DEBUG
 708        dev_info(sa_k3_dev, "Security context dump:: 0x%pad\n", &dma_addr);
 709        print_hex_dump(KERN_CONT, "", DUMP_PREFIX_OFFSET,
 710                       16, 1, buf, SA_CTX_MAX_SZ, false);
 711#endif
 712}
 713
 714static
 715int sa_init_sc(struct sa_ctx_info *ctx, const struct sa_match_data *match_data,
 716               const u8 *enc_key, u16 enc_key_sz,
 717               const u8 *auth_key, u16 auth_key_sz,
 718               struct algo_data *ad, u8 enc, u32 *swinfo)
 719{
 720        int enc_sc_offset = 0;
 721        int auth_sc_offset = 0;
 722        u8 *sc_buf = ctx->sc;
 723        u16 sc_id = ctx->sc_id;
 724        u8 first_engine = 0;
 725
 726        memzero_explicit(sc_buf, SA_CTX_MAX_SZ);
 727
 728        if (ad->auth_eng.eng_id) {
 729                if (enc)
 730                        first_engine = ad->enc_eng.eng_id;
 731                else
 732                        first_engine = ad->auth_eng.eng_id;
 733
 734                enc_sc_offset = SA_CTX_PHP_PE_CTX_SZ;
 735                auth_sc_offset = enc_sc_offset + ad->enc_eng.sc_size;
 736                sc_buf[1] = SA_SCCTL_FE_AUTH_ENC;
 737                if (!ad->hash_size)
 738                        return -EINVAL;
 739                ad->hash_size = roundup(ad->hash_size, 8);
 740
 741        } else if (ad->enc_eng.eng_id && !ad->auth_eng.eng_id) {
 742                enc_sc_offset = SA_CTX_PHP_PE_CTX_SZ;
 743                first_engine = ad->enc_eng.eng_id;
 744                sc_buf[1] = SA_SCCTL_FE_ENC;
 745                ad->hash_size = ad->iv_out_size;
 746        }
 747
 748        /* SCCTL Owner info: 0=host, 1=CP_ACE */
 749        sc_buf[SA_CTX_SCCTL_OWNER_OFFSET] = 0;
 750        memcpy(&sc_buf[2], &sc_id, 2);
 751        sc_buf[4] = 0x0;
 752        sc_buf[5] = match_data->priv_id;
 753        sc_buf[6] = match_data->priv;
 754        sc_buf[7] = 0x0;
 755
 756        /* Prepare context for encryption engine */
 757        if (ad->enc_eng.sc_size) {
 758                if (sa_set_sc_enc(ad, enc_key, enc_key_sz, enc,
 759                                  &sc_buf[enc_sc_offset]))
 760                        return -EINVAL;
 761        }
 762
 763        /* Prepare context for authentication engine */
 764        if (ad->auth_eng.sc_size)
 765                sa_set_sc_auth(ad, auth_key, auth_key_sz,
 766                               &sc_buf[auth_sc_offset]);
 767
 768        /* Set the ownership of context to CP_ACE */
 769        sc_buf[SA_CTX_SCCTL_OWNER_OFFSET] = 0x80;
 770
 771        /* swizzle the security context */
 772        sa_swiz_128(sc_buf, SA_CTX_MAX_SZ);
 773
 774        sa_set_swinfo(first_engine, ctx->sc_id, ctx->sc_phys, 1, 0,
 775                      SA_SW_INFO_FLAG_EVICT, ad->hash_size, swinfo);
 776
 777        sa_dump_sc(sc_buf, ctx->sc_phys);
 778
 779        return 0;
 780}
 781
 782/* Free the per direction context memory */
 783static void sa_free_ctx_info(struct sa_ctx_info *ctx,
 784                             struct sa_crypto_data *data)
 785{
 786        unsigned long bn;
 787
 788        bn = ctx->sc_id - data->sc_id_start;
 789        spin_lock(&data->scid_lock);
 790        __clear_bit(bn, data->ctx_bm);
 791        data->sc_id--;
 792        spin_unlock(&data->scid_lock);
 793
 794        if (ctx->sc) {
 795                dma_pool_free(data->sc_pool, ctx->sc, ctx->sc_phys);
 796                ctx->sc = NULL;
 797        }
 798}
 799
 800static int sa_init_ctx_info(struct sa_ctx_info *ctx,
 801                            struct sa_crypto_data *data)
 802{
 803        unsigned long bn;
 804        int err;
 805
 806        spin_lock(&data->scid_lock);
 807        bn = find_first_zero_bit(data->ctx_bm, SA_MAX_NUM_CTX);
 808        __set_bit(bn, data->ctx_bm);
 809        data->sc_id++;
 810        spin_unlock(&data->scid_lock);
 811
 812        ctx->sc_id = (u16)(data->sc_id_start + bn);
 813
 814        ctx->sc = dma_pool_alloc(data->sc_pool, GFP_KERNEL, &ctx->sc_phys);
 815        if (!ctx->sc) {
 816                dev_err(&data->pdev->dev, "Failed to allocate SC memory\n");
 817                err = -ENOMEM;
 818                goto scid_rollback;
 819        }
 820
 821        return 0;
 822
 823scid_rollback:
 824        spin_lock(&data->scid_lock);
 825        __clear_bit(bn, data->ctx_bm);
 826        data->sc_id--;
 827        spin_unlock(&data->scid_lock);
 828
 829        return err;
 830}
 831
 832static void sa_cipher_cra_exit(struct crypto_skcipher *tfm)
 833{
 834        struct sa_tfm_ctx *ctx = crypto_skcipher_ctx(tfm);
 835        struct sa_crypto_data *data = dev_get_drvdata(sa_k3_dev);
 836
 837        dev_dbg(sa_k3_dev, "%s(0x%p) sc-ids(0x%x(0x%pad), 0x%x(0x%pad))\n",
 838                __func__, tfm, ctx->enc.sc_id, &ctx->enc.sc_phys,
 839                ctx->dec.sc_id, &ctx->dec.sc_phys);
 840
 841        sa_free_ctx_info(&ctx->enc, data);
 842        sa_free_ctx_info(&ctx->dec, data);
 843
 844        crypto_free_skcipher(ctx->fallback.skcipher);
 845}
 846
 847static int sa_cipher_cra_init(struct crypto_skcipher *tfm)
 848{
 849        struct sa_tfm_ctx *ctx = crypto_skcipher_ctx(tfm);
 850        struct sa_crypto_data *data = dev_get_drvdata(sa_k3_dev);
 851        const char *name = crypto_tfm_alg_name(&tfm->base);
 852        struct crypto_skcipher *child;
 853        int ret;
 854
 855        memzero_explicit(ctx, sizeof(*ctx));
 856        ctx->dev_data = data;
 857
 858        ret = sa_init_ctx_info(&ctx->enc, data);
 859        if (ret)
 860                return ret;
 861        ret = sa_init_ctx_info(&ctx->dec, data);
 862        if (ret) {
 863                sa_free_ctx_info(&ctx->enc, data);
 864                return ret;
 865        }
 866
 867        child = crypto_alloc_skcipher(name, 0, CRYPTO_ALG_NEED_FALLBACK);
 868
 869        if (IS_ERR(child)) {
 870                dev_err(sa_k3_dev, "Error allocating fallback algo %s\n", name);
 871                return PTR_ERR(child);
 872        }
 873
 874        ctx->fallback.skcipher = child;
 875        crypto_skcipher_set_reqsize(tfm, crypto_skcipher_reqsize(child) +
 876                                         sizeof(struct skcipher_request));
 877
 878        dev_dbg(sa_k3_dev, "%s(0x%p) sc-ids(0x%x(0x%pad), 0x%x(0x%pad))\n",
 879                __func__, tfm, ctx->enc.sc_id, &ctx->enc.sc_phys,
 880                ctx->dec.sc_id, &ctx->dec.sc_phys);
 881        return 0;
 882}
 883
 884static int sa_cipher_setkey(struct crypto_skcipher *tfm, const u8 *key,
 885                            unsigned int keylen, struct algo_data *ad)
 886{
 887        struct sa_tfm_ctx *ctx = crypto_skcipher_ctx(tfm);
 888        struct crypto_skcipher *child = ctx->fallback.skcipher;
 889        int cmdl_len;
 890        struct sa_cmdl_cfg cfg;
 891        int ret;
 892
 893        if (keylen != AES_KEYSIZE_128 && keylen != AES_KEYSIZE_192 &&
 894            keylen != AES_KEYSIZE_256)
 895                return -EINVAL;
 896
 897        ad->enc_eng.eng_id = SA_ENG_ID_EM1;
 898        ad->enc_eng.sc_size = SA_CTX_ENC_TYPE1_SZ;
 899
 900        memzero_explicit(&cfg, sizeof(cfg));
 901        cfg.enc_eng_id = ad->enc_eng.eng_id;
 902        cfg.iv_size = crypto_skcipher_ivsize(tfm);
 903
 904        crypto_skcipher_clear_flags(child, CRYPTO_TFM_REQ_MASK);
 905        crypto_skcipher_set_flags(child, tfm->base.crt_flags &
 906                                         CRYPTO_TFM_REQ_MASK);
 907        ret = crypto_skcipher_setkey(child, key, keylen);
 908        if (ret)
 909                return ret;
 910
 911        /* Setup Encryption Security Context & Command label template */
 912        if (sa_init_sc(&ctx->enc, ctx->dev_data->match_data, key, keylen, NULL, 0,
 913                       ad, 1, &ctx->enc.epib[1]))
 914                goto badkey;
 915
 916        cmdl_len = sa_format_cmdl_gen(&cfg,
 917                                      (u8 *)ctx->enc.cmdl,
 918                                      &ctx->enc.cmdl_upd_info);
 919        if (cmdl_len <= 0 || (cmdl_len > SA_MAX_CMDL_WORDS * sizeof(u32)))
 920                goto badkey;
 921
 922        ctx->enc.cmdl_size = cmdl_len;
 923
 924        /* Setup Decryption Security Context & Command label template */
 925        if (sa_init_sc(&ctx->dec, ctx->dev_data->match_data, key, keylen, NULL, 0,
 926                       ad, 0, &ctx->dec.epib[1]))
 927                goto badkey;
 928
 929        cfg.enc_eng_id = ad->enc_eng.eng_id;
 930        cmdl_len = sa_format_cmdl_gen(&cfg, (u8 *)ctx->dec.cmdl,
 931                                      &ctx->dec.cmdl_upd_info);
 932
 933        if (cmdl_len <= 0 || (cmdl_len > SA_MAX_CMDL_WORDS * sizeof(u32)))
 934                goto badkey;
 935
 936        ctx->dec.cmdl_size = cmdl_len;
 937        ctx->iv_idx = ad->iv_idx;
 938
 939        return 0;
 940
 941badkey:
 942        dev_err(sa_k3_dev, "%s: badkey\n", __func__);
 943        return -EINVAL;
 944}
 945
 946static int sa_aes_cbc_setkey(struct crypto_skcipher *tfm, const u8 *key,
 947                             unsigned int keylen)
 948{
 949        struct algo_data ad = { 0 };
 950        /* Convert the key size (16/24/32) to the key size index (0/1/2) */
 951        int key_idx = (keylen >> 3) - 2;
 952
 953        if (key_idx >= 3)
 954                return -EINVAL;
 955
 956        ad.mci_enc = mci_cbc_enc_array[key_idx];
 957        ad.mci_dec = mci_cbc_dec_array[key_idx];
 958        ad.inv_key = true;
 959        ad.ealg_id = SA_EALG_ID_AES_CBC;
 960        ad.iv_idx = 4;
 961        ad.iv_out_size = 16;
 962
 963        return sa_cipher_setkey(tfm, key, keylen, &ad);
 964}
 965
 966static int sa_aes_ecb_setkey(struct crypto_skcipher *tfm, const u8 *key,
 967                             unsigned int keylen)
 968{
 969        struct algo_data ad = { 0 };
 970        /* Convert the key size (16/24/32) to the key size index (0/1/2) */
 971        int key_idx = (keylen >> 3) - 2;
 972
 973        if (key_idx >= 3)
 974                return -EINVAL;
 975
 976        ad.mci_enc = mci_ecb_enc_array[key_idx];
 977        ad.mci_dec = mci_ecb_dec_array[key_idx];
 978        ad.inv_key = true;
 979        ad.ealg_id = SA_EALG_ID_AES_ECB;
 980
 981        return sa_cipher_setkey(tfm, key, keylen, &ad);
 982}
 983
 984static int sa_3des_cbc_setkey(struct crypto_skcipher *tfm, const u8 *key,
 985                              unsigned int keylen)
 986{
 987        struct algo_data ad = { 0 };
 988
 989        ad.mci_enc = mci_cbc_3des_enc_array;
 990        ad.mci_dec = mci_cbc_3des_dec_array;
 991        ad.ealg_id = SA_EALG_ID_3DES_CBC;
 992        ad.iv_idx = 6;
 993        ad.iv_out_size = 8;
 994
 995        return sa_cipher_setkey(tfm, key, keylen, &ad);
 996}
 997
 998static int sa_3des_ecb_setkey(struct crypto_skcipher *tfm, const u8 *key,
 999                              unsigned int keylen)
1000{
1001        struct algo_data ad = { 0 };
1002
1003        ad.mci_enc = mci_ecb_3des_enc_array;
1004        ad.mci_dec = mci_ecb_3des_dec_array;
1005
1006        return sa_cipher_setkey(tfm, key, keylen, &ad);
1007}
1008
1009static void sa_sync_from_device(struct sa_rx_data *rxd)
1010{
1011        struct sg_table *sgt;
1012
1013        if (rxd->mapped_sg[0].dir == DMA_BIDIRECTIONAL)
1014                sgt = &rxd->mapped_sg[0].sgt;
1015        else
1016                sgt = &rxd->mapped_sg[1].sgt;
1017
1018        dma_sync_sgtable_for_cpu(rxd->ddev, sgt, DMA_FROM_DEVICE);
1019}
1020
1021static void sa_free_sa_rx_data(struct sa_rx_data *rxd)
1022{
1023        int i;
1024
1025        for (i = 0; i < ARRAY_SIZE(rxd->mapped_sg); i++) {
1026                struct sa_mapped_sg *mapped_sg = &rxd->mapped_sg[i];
1027
1028                if (mapped_sg->mapped) {
1029                        dma_unmap_sgtable(rxd->ddev, &mapped_sg->sgt,
1030                                          mapped_sg->dir, 0);
1031                        kfree(mapped_sg->split_sg);
1032                }
1033        }
1034
1035        kfree(rxd);
1036}
1037
1038static void sa_aes_dma_in_callback(void *data)
1039{
1040        struct sa_rx_data *rxd = (struct sa_rx_data *)data;
1041        struct skcipher_request *req;
1042        u32 *result;
1043        __be32 *mdptr;
1044        size_t ml, pl;
1045        int i;
1046
1047        sa_sync_from_device(rxd);
1048        req = container_of(rxd->req, struct skcipher_request, base);
1049
1050        if (req->iv) {
1051                mdptr = (__be32 *)dmaengine_desc_get_metadata_ptr(rxd->tx_in, &pl,
1052                                                               &ml);
1053                result = (u32 *)req->iv;
1054
1055                for (i = 0; i < (rxd->enc_iv_size / 4); i++)
1056                        result[i] = be32_to_cpu(mdptr[i + rxd->iv_idx]);
1057        }
1058
1059        sa_free_sa_rx_data(rxd);
1060
1061        skcipher_request_complete(req, 0);
1062}
1063
1064static void
1065sa_prepare_tx_desc(u32 *mdptr, u32 pslen, u32 *psdata, u32 epiblen, u32 *epib)
1066{
1067        u32 *out, *in;
1068        int i;
1069
1070        for (out = mdptr, in = epib, i = 0; i < epiblen / sizeof(u32); i++)
1071                *out++ = *in++;
1072
1073        mdptr[4] = (0xFFFF << 16);
1074        for (out = &mdptr[5], in = psdata, i = 0;
1075             i < pslen / sizeof(u32); i++)
1076                *out++ = *in++;
1077}
1078
1079static int sa_run(struct sa_req *req)
1080{
1081        struct sa_rx_data *rxd;
1082        gfp_t gfp_flags;
1083        u32 cmdl[SA_MAX_CMDL_WORDS];
1084        struct sa_crypto_data *pdata = dev_get_drvdata(sa_k3_dev);
1085        struct device *ddev;
1086        struct dma_chan *dma_rx;
1087        int sg_nents, src_nents, dst_nents;
1088        struct scatterlist *src, *dst;
1089        size_t pl, ml, split_size;
1090        struct sa_ctx_info *sa_ctx = req->enc ? &req->ctx->enc : &req->ctx->dec;
1091        int ret;
1092        struct dma_async_tx_descriptor *tx_out;
1093        u32 *mdptr;
1094        bool diff_dst;
1095        enum dma_data_direction dir_src;
1096        struct sa_mapped_sg *mapped_sg;
1097
1098        gfp_flags = req->base->flags & CRYPTO_TFM_REQ_MAY_SLEEP ?
1099                GFP_KERNEL : GFP_ATOMIC;
1100
1101        rxd = kzalloc(sizeof(*rxd), gfp_flags);
1102        if (!rxd)
1103                return -ENOMEM;
1104
1105        if (req->src != req->dst) {
1106                diff_dst = true;
1107                dir_src = DMA_TO_DEVICE;
1108        } else {
1109                diff_dst = false;
1110                dir_src = DMA_BIDIRECTIONAL;
1111        }
1112
1113        /*
1114         * SA2UL has an interesting feature where the receive DMA channel
1115         * is selected based on the data passed to the engine. Within the
1116         * transition range, there is also a space where it is impossible
1117         * to determine where the data will end up, and this should be
1118         * avoided. This will be handled by the SW fallback mechanism by
1119         * the individual algorithm implementations.
1120         */
1121        if (req->size >= 256)
1122                dma_rx = pdata->dma_rx2;
1123        else
1124                dma_rx = pdata->dma_rx1;
1125
1126        ddev = dmaengine_get_dma_device(pdata->dma_tx);
1127        rxd->ddev = ddev;
1128
1129        memcpy(cmdl, sa_ctx->cmdl, sa_ctx->cmdl_size);
1130
1131        sa_update_cmdl(req, cmdl, &sa_ctx->cmdl_upd_info);
1132
1133        if (req->type != CRYPTO_ALG_TYPE_AHASH) {
1134                if (req->enc)
1135                        req->type |=
1136                                (SA_REQ_SUBTYPE_ENC << SA_REQ_SUBTYPE_SHIFT);
1137                else
1138                        req->type |=
1139                                (SA_REQ_SUBTYPE_DEC << SA_REQ_SUBTYPE_SHIFT);
1140        }
1141
1142        cmdl[sa_ctx->cmdl_size / sizeof(u32)] = req->type;
1143
1144        /*
1145         * Map the packets, first we check if the data fits into a single
1146         * sg entry and use that if possible. If it does not fit, we check
1147         * if we need to do sg_split to align the scatterlist data on the
1148         * actual data size being processed by the crypto engine.
1149         */
1150        src = req->src;
1151        sg_nents = sg_nents_for_len(src, req->size);
1152
1153        split_size = req->size;
1154
1155        mapped_sg = &rxd->mapped_sg[0];
1156        if (sg_nents == 1 && split_size <= req->src->length) {
1157                src = &mapped_sg->static_sg;
1158                src_nents = 1;
1159                sg_init_table(src, 1);
1160                sg_set_page(src, sg_page(req->src), split_size,
1161                            req->src->offset);
1162
1163                mapped_sg->sgt.sgl = src;
1164                mapped_sg->sgt.orig_nents = src_nents;
1165                ret = dma_map_sgtable(ddev, &mapped_sg->sgt, dir_src, 0);
1166                if (ret) {
1167                        kfree(rxd);
1168                        return ret;
1169                }
1170
1171                mapped_sg->dir = dir_src;
1172                mapped_sg->mapped = true;
1173        } else {
1174                mapped_sg->sgt.sgl = req->src;
1175                mapped_sg->sgt.orig_nents = sg_nents;
1176                ret = dma_map_sgtable(ddev, &mapped_sg->sgt, dir_src, 0);
1177                if (ret) {
1178                        kfree(rxd);
1179                        return ret;
1180                }
1181
1182                mapped_sg->dir = dir_src;
1183                mapped_sg->mapped = true;
1184
1185                ret = sg_split(mapped_sg->sgt.sgl, mapped_sg->sgt.nents, 0, 1,
1186                               &split_size, &src, &src_nents, gfp_flags);
1187                if (ret) {
1188                        src_nents = mapped_sg->sgt.nents;
1189                        src = mapped_sg->sgt.sgl;
1190                } else {
1191                        mapped_sg->split_sg = src;
1192                }
1193        }
1194
1195        dma_sync_sgtable_for_device(ddev, &mapped_sg->sgt, DMA_TO_DEVICE);
1196
1197        if (!diff_dst) {
1198                dst_nents = src_nents;
1199                dst = src;
1200        } else {
1201                dst_nents = sg_nents_for_len(req->dst, req->size);
1202                mapped_sg = &rxd->mapped_sg[1];
1203
1204                if (dst_nents == 1 && split_size <= req->dst->length) {
1205                        dst = &mapped_sg->static_sg;
1206                        dst_nents = 1;
1207                        sg_init_table(dst, 1);
1208                        sg_set_page(dst, sg_page(req->dst), split_size,
1209                                    req->dst->offset);
1210
1211                        mapped_sg->sgt.sgl = dst;
1212                        mapped_sg->sgt.orig_nents = dst_nents;
1213                        ret = dma_map_sgtable(ddev, &mapped_sg->sgt,
1214                                              DMA_FROM_DEVICE, 0);
1215                        if (ret)
1216                                goto err_cleanup;
1217
1218                        mapped_sg->dir = DMA_FROM_DEVICE;
1219                        mapped_sg->mapped = true;
1220                } else {
1221                        mapped_sg->sgt.sgl = req->dst;
1222                        mapped_sg->sgt.orig_nents = dst_nents;
1223                        ret = dma_map_sgtable(ddev, &mapped_sg->sgt,
1224                                              DMA_FROM_DEVICE, 0);
1225                        if (ret)
1226                                goto err_cleanup;
1227
1228                        mapped_sg->dir = DMA_FROM_DEVICE;
1229                        mapped_sg->mapped = true;
1230
1231                        ret = sg_split(mapped_sg->sgt.sgl, mapped_sg->sgt.nents,
1232                                       0, 1, &split_size, &dst, &dst_nents,
1233                                       gfp_flags);
1234                        if (ret) {
1235                                dst_nents = mapped_sg->sgt.nents;
1236                                dst = mapped_sg->sgt.sgl;
1237                        } else {
1238                                mapped_sg->split_sg = dst;
1239                        }
1240                }
1241        }
1242
1243        rxd->tx_in = dmaengine_prep_slave_sg(dma_rx, dst, dst_nents,
1244                                             DMA_DEV_TO_MEM,
1245                                             DMA_PREP_INTERRUPT | DMA_CTRL_ACK);
1246        if (!rxd->tx_in) {
1247                dev_err(pdata->dev, "IN prep_slave_sg() failed\n");
1248                ret = -EINVAL;
1249                goto err_cleanup;
1250        }
1251
1252        rxd->req = (void *)req->base;
1253        rxd->enc = req->enc;
1254        rxd->iv_idx = req->ctx->iv_idx;
1255        rxd->enc_iv_size = sa_ctx->cmdl_upd_info.enc_iv.size;
1256        rxd->tx_in->callback = req->callback;
1257        rxd->tx_in->callback_param = rxd;
1258
1259        tx_out = dmaengine_prep_slave_sg(pdata->dma_tx, src,
1260                                         src_nents, DMA_MEM_TO_DEV,
1261                                         DMA_PREP_INTERRUPT | DMA_CTRL_ACK);
1262
1263        if (!tx_out) {
1264                dev_err(pdata->dev, "OUT prep_slave_sg() failed\n");
1265                ret = -EINVAL;
1266                goto err_cleanup;
1267        }
1268
1269        /*
1270         * Prepare metadata for DMA engine. This essentially describes the
1271         * crypto algorithm to be used, data sizes, different keys etc.
1272         */
1273        mdptr = (u32 *)dmaengine_desc_get_metadata_ptr(tx_out, &pl, &ml);
1274
1275        sa_prepare_tx_desc(mdptr, (sa_ctx->cmdl_size + (SA_PSDATA_CTX_WORDS *
1276                                   sizeof(u32))), cmdl, sizeof(sa_ctx->epib),
1277                           sa_ctx->epib);
1278
1279        ml = sa_ctx->cmdl_size + (SA_PSDATA_CTX_WORDS * sizeof(u32));
1280        dmaengine_desc_set_metadata_len(tx_out, req->mdata_size);
1281
1282        dmaengine_submit(tx_out);
1283        dmaengine_submit(rxd->tx_in);
1284
1285        dma_async_issue_pending(dma_rx);
1286        dma_async_issue_pending(pdata->dma_tx);
1287
1288        return -EINPROGRESS;
1289
1290err_cleanup:
1291        sa_free_sa_rx_data(rxd);
1292
1293        return ret;
1294}
1295
1296static int sa_cipher_run(struct skcipher_request *req, u8 *iv, int enc)
1297{
1298        struct sa_tfm_ctx *ctx =
1299            crypto_skcipher_ctx(crypto_skcipher_reqtfm(req));
1300        struct crypto_alg *alg = req->base.tfm->__crt_alg;
1301        struct sa_req sa_req = { 0 };
1302
1303        if (!req->cryptlen)
1304                return 0;
1305
1306        if (req->cryptlen % alg->cra_blocksize)
1307                return -EINVAL;
1308
1309        /* Use SW fallback if the data size is not supported */
1310        if (req->cryptlen > SA_MAX_DATA_SZ ||
1311            (req->cryptlen >= SA_UNSAFE_DATA_SZ_MIN &&
1312             req->cryptlen <= SA_UNSAFE_DATA_SZ_MAX)) {
1313                struct skcipher_request *subreq = skcipher_request_ctx(req);
1314
1315                skcipher_request_set_tfm(subreq, ctx->fallback.skcipher);
1316                skcipher_request_set_callback(subreq, req->base.flags,
1317                                              req->base.complete,
1318                                              req->base.data);
1319                skcipher_request_set_crypt(subreq, req->src, req->dst,
1320                                           req->cryptlen, req->iv);
1321                if (enc)
1322                        return crypto_skcipher_encrypt(subreq);
1323                else
1324                        return crypto_skcipher_decrypt(subreq);
1325        }
1326
1327        sa_req.size = req->cryptlen;
1328        sa_req.enc_size = req->cryptlen;
1329        sa_req.src = req->src;
1330        sa_req.dst = req->dst;
1331        sa_req.enc_iv = iv;
1332        sa_req.type = CRYPTO_ALG_TYPE_SKCIPHER;
1333        sa_req.enc = enc;
1334        sa_req.callback = sa_aes_dma_in_callback;
1335        sa_req.mdata_size = 44;
1336        sa_req.base = &req->base;
1337        sa_req.ctx = ctx;
1338
1339        return sa_run(&sa_req);
1340}
1341
1342static int sa_encrypt(struct skcipher_request *req)
1343{
1344        return sa_cipher_run(req, req->iv, 1);
1345}
1346
1347static int sa_decrypt(struct skcipher_request *req)
1348{
1349        return sa_cipher_run(req, req->iv, 0);
1350}
1351
1352static void sa_sha_dma_in_callback(void *data)
1353{
1354        struct sa_rx_data *rxd = (struct sa_rx_data *)data;
1355        struct ahash_request *req;
1356        struct crypto_ahash *tfm;
1357        unsigned int authsize;
1358        int i;
1359        size_t ml, pl;
1360        u32 *result;
1361        __be32 *mdptr;
1362
1363        sa_sync_from_device(rxd);
1364        req = container_of(rxd->req, struct ahash_request, base);
1365        tfm = crypto_ahash_reqtfm(req);
1366        authsize = crypto_ahash_digestsize(tfm);
1367
1368        mdptr = (__be32 *)dmaengine_desc_get_metadata_ptr(rxd->tx_in, &pl, &ml);
1369        result = (u32 *)req->result;
1370
1371        for (i = 0; i < (authsize / 4); i++)
1372                result[i] = be32_to_cpu(mdptr[i + 4]);
1373
1374        sa_free_sa_rx_data(rxd);
1375
1376        ahash_request_complete(req, 0);
1377}
1378
1379static int zero_message_process(struct ahash_request *req)
1380{
1381        struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
1382        int sa_digest_size = crypto_ahash_digestsize(tfm);
1383
1384        switch (sa_digest_size) {
1385        case SHA1_DIGEST_SIZE:
1386                memcpy(req->result, sha1_zero_message_hash, sa_digest_size);
1387                break;
1388        case SHA256_DIGEST_SIZE:
1389                memcpy(req->result, sha256_zero_message_hash, sa_digest_size);
1390                break;
1391        case SHA512_DIGEST_SIZE:
1392                memcpy(req->result, sha512_zero_message_hash, sa_digest_size);
1393                break;
1394        default:
1395                return -EINVAL;
1396        }
1397
1398        return 0;
1399}
1400
1401static int sa_sha_run(struct ahash_request *req)
1402{
1403        struct sa_tfm_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
1404        struct sa_sha_req_ctx *rctx = ahash_request_ctx(req);
1405        struct sa_req sa_req = { 0 };
1406        size_t auth_len;
1407
1408        auth_len = req->nbytes;
1409
1410        if (!auth_len)
1411                return zero_message_process(req);
1412
1413        if (auth_len > SA_MAX_DATA_SZ ||
1414            (auth_len >= SA_UNSAFE_DATA_SZ_MIN &&
1415             auth_len <= SA_UNSAFE_DATA_SZ_MAX)) {
1416                struct ahash_request *subreq = &rctx->fallback_req;
1417                int ret = 0;
1418
1419                ahash_request_set_tfm(subreq, ctx->fallback.ahash);
1420                subreq->base.flags = req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP;
1421
1422                crypto_ahash_init(subreq);
1423
1424                subreq->nbytes = auth_len;
1425                subreq->src = req->src;
1426                subreq->result = req->result;
1427
1428                ret |= crypto_ahash_update(subreq);
1429
1430                subreq->nbytes = 0;
1431
1432                ret |= crypto_ahash_final(subreq);
1433
1434                return ret;
1435        }
1436
1437        sa_req.size = auth_len;
1438        sa_req.auth_size = auth_len;
1439        sa_req.src = req->src;
1440        sa_req.dst = req->src;
1441        sa_req.enc = true;
1442        sa_req.type = CRYPTO_ALG_TYPE_AHASH;
1443        sa_req.callback = sa_sha_dma_in_callback;
1444        sa_req.mdata_size = 28;
1445        sa_req.ctx = ctx;
1446        sa_req.base = &req->base;
1447
1448        return sa_run(&sa_req);
1449}
1450
1451static int sa_sha_setup(struct sa_tfm_ctx *ctx, struct  algo_data *ad)
1452{
1453        int bs = crypto_shash_blocksize(ctx->shash);
1454        int cmdl_len;
1455        struct sa_cmdl_cfg cfg;
1456
1457        ad->enc_eng.sc_size = SA_CTX_ENC_TYPE1_SZ;
1458        ad->auth_eng.eng_id = SA_ENG_ID_AM1;
1459        ad->auth_eng.sc_size = SA_CTX_AUTH_TYPE2_SZ;
1460
1461        memset(ctx->authkey, 0, bs);
1462        memset(&cfg, 0, sizeof(cfg));
1463        cfg.aalg = ad->aalg_id;
1464        cfg.enc_eng_id = ad->enc_eng.eng_id;
1465        cfg.auth_eng_id = ad->auth_eng.eng_id;
1466        cfg.iv_size = 0;
1467        cfg.akey = NULL;
1468        cfg.akey_len = 0;
1469
1470        ctx->dev_data = dev_get_drvdata(sa_k3_dev);
1471        /* Setup Encryption Security Context & Command label template */
1472        if (sa_init_sc(&ctx->enc, ctx->dev_data->match_data, NULL, 0, NULL, 0,
1473                       ad, 0, &ctx->enc.epib[1]))
1474                goto badkey;
1475
1476        cmdl_len = sa_format_cmdl_gen(&cfg,
1477                                      (u8 *)ctx->enc.cmdl,
1478                                      &ctx->enc.cmdl_upd_info);
1479        if (cmdl_len <= 0 || (cmdl_len > SA_MAX_CMDL_WORDS * sizeof(u32)))
1480                goto badkey;
1481
1482        ctx->enc.cmdl_size = cmdl_len;
1483
1484        return 0;
1485
1486badkey:
1487        dev_err(sa_k3_dev, "%s: badkey\n", __func__);
1488        return -EINVAL;
1489}
1490
1491static int sa_sha_cra_init_alg(struct crypto_tfm *tfm, const char *alg_base)
1492{
1493        struct sa_tfm_ctx *ctx = crypto_tfm_ctx(tfm);
1494        struct sa_crypto_data *data = dev_get_drvdata(sa_k3_dev);
1495        int ret;
1496
1497        memset(ctx, 0, sizeof(*ctx));
1498        ctx->dev_data = data;
1499        ret = sa_init_ctx_info(&ctx->enc, data);
1500        if (ret)
1501                return ret;
1502
1503        if (alg_base) {
1504                ctx->shash = crypto_alloc_shash(alg_base, 0,
1505                                                CRYPTO_ALG_NEED_FALLBACK);
1506                if (IS_ERR(ctx->shash)) {
1507                        dev_err(sa_k3_dev, "base driver %s couldn't be loaded\n",
1508                                alg_base);
1509                        return PTR_ERR(ctx->shash);
1510                }
1511                /* for fallback */
1512                ctx->fallback.ahash =
1513                        crypto_alloc_ahash(alg_base, 0,
1514                                           CRYPTO_ALG_NEED_FALLBACK);
1515                if (IS_ERR(ctx->fallback.ahash)) {
1516                        dev_err(ctx->dev_data->dev,
1517                                "Could not load fallback driver\n");
1518                        return PTR_ERR(ctx->fallback.ahash);
1519                }
1520        }
1521
1522        dev_dbg(sa_k3_dev, "%s(0x%p) sc-ids(0x%x(0x%pad), 0x%x(0x%pad))\n",
1523                __func__, tfm, ctx->enc.sc_id, &ctx->enc.sc_phys,
1524                ctx->dec.sc_id, &ctx->dec.sc_phys);
1525
1526        crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm),
1527                                 sizeof(struct sa_sha_req_ctx) +
1528                                 crypto_ahash_reqsize(ctx->fallback.ahash));
1529
1530        return 0;
1531}
1532
1533static int sa_sha_digest(struct ahash_request *req)
1534{
1535        return sa_sha_run(req);
1536}
1537
1538static int sa_sha_init(struct ahash_request *req)
1539{
1540        struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
1541        struct sa_sha_req_ctx *rctx = ahash_request_ctx(req);
1542        struct sa_tfm_ctx *ctx = crypto_ahash_ctx(tfm);
1543
1544        dev_dbg(sa_k3_dev, "init: digest size: %u, rctx=%p\n",
1545                crypto_ahash_digestsize(tfm), rctx);
1546
1547        ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback.ahash);
1548        rctx->fallback_req.base.flags =
1549                req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP;
1550
1551        return crypto_ahash_init(&rctx->fallback_req);
1552}
1553
1554static int sa_sha_update(struct ahash_request *req)
1555{
1556        struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
1557        struct sa_sha_req_ctx *rctx = ahash_request_ctx(req);
1558        struct sa_tfm_ctx *ctx = crypto_ahash_ctx(tfm);
1559
1560        ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback.ahash);
1561        rctx->fallback_req.base.flags =
1562                req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP;
1563        rctx->fallback_req.nbytes = req->nbytes;
1564        rctx->fallback_req.src = req->src;
1565
1566        return crypto_ahash_update(&rctx->fallback_req);
1567}
1568
1569static int sa_sha_final(struct ahash_request *req)
1570{
1571        struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
1572        struct sa_sha_req_ctx *rctx = ahash_request_ctx(req);
1573        struct sa_tfm_ctx *ctx = crypto_ahash_ctx(tfm);
1574
1575        ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback.ahash);
1576        rctx->fallback_req.base.flags =
1577                req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP;
1578        rctx->fallback_req.result = req->result;
1579
1580        return crypto_ahash_final(&rctx->fallback_req);
1581}
1582
1583static int sa_sha_finup(struct ahash_request *req)
1584{
1585        struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
1586        struct sa_sha_req_ctx *rctx = ahash_request_ctx(req);
1587        struct sa_tfm_ctx *ctx = crypto_ahash_ctx(tfm);
1588
1589        ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback.ahash);
1590        rctx->fallback_req.base.flags =
1591                req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP;
1592
1593        rctx->fallback_req.nbytes = req->nbytes;
1594        rctx->fallback_req.src = req->src;
1595        rctx->fallback_req.result = req->result;
1596
1597        return crypto_ahash_finup(&rctx->fallback_req);
1598}
1599
1600static int sa_sha_import(struct ahash_request *req, const void *in)
1601{
1602        struct sa_sha_req_ctx *rctx = ahash_request_ctx(req);
1603        struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
1604        struct sa_tfm_ctx *ctx = crypto_ahash_ctx(tfm);
1605
1606        ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback.ahash);
1607        rctx->fallback_req.base.flags = req->base.flags &
1608                CRYPTO_TFM_REQ_MAY_SLEEP;
1609
1610        return crypto_ahash_import(&rctx->fallback_req, in);
1611}
1612
1613static int sa_sha_export(struct ahash_request *req, void *out)
1614{
1615        struct sa_sha_req_ctx *rctx = ahash_request_ctx(req);
1616        struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
1617        struct sa_tfm_ctx *ctx = crypto_ahash_ctx(tfm);
1618        struct ahash_request *subreq = &rctx->fallback_req;
1619
1620        ahash_request_set_tfm(subreq, ctx->fallback.ahash);
1621        subreq->base.flags = req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP;
1622
1623        return crypto_ahash_export(subreq, out);
1624}
1625
1626static int sa_sha1_cra_init(struct crypto_tfm *tfm)
1627{
1628        struct algo_data ad = { 0 };
1629        struct sa_tfm_ctx *ctx = crypto_tfm_ctx(tfm);
1630
1631        sa_sha_cra_init_alg(tfm, "sha1");
1632
1633        ad.aalg_id = SA_AALG_ID_SHA1;
1634        ad.hash_size = SHA1_DIGEST_SIZE;
1635        ad.auth_ctrl = SA_AUTH_SW_CTRL_SHA1;
1636
1637        sa_sha_setup(ctx, &ad);
1638
1639        return 0;
1640}
1641
1642static int sa_sha256_cra_init(struct crypto_tfm *tfm)
1643{
1644        struct algo_data ad = { 0 };
1645        struct sa_tfm_ctx *ctx = crypto_tfm_ctx(tfm);
1646
1647        sa_sha_cra_init_alg(tfm, "sha256");
1648
1649        ad.aalg_id = SA_AALG_ID_SHA2_256;
1650        ad.hash_size = SHA256_DIGEST_SIZE;
1651        ad.auth_ctrl = SA_AUTH_SW_CTRL_SHA256;
1652
1653        sa_sha_setup(ctx, &ad);
1654
1655        return 0;
1656}
1657
1658static int sa_sha512_cra_init(struct crypto_tfm *tfm)
1659{
1660        struct algo_data ad = { 0 };
1661        struct sa_tfm_ctx *ctx = crypto_tfm_ctx(tfm);
1662
1663        sa_sha_cra_init_alg(tfm, "sha512");
1664
1665        ad.aalg_id = SA_AALG_ID_SHA2_512;
1666        ad.hash_size = SHA512_DIGEST_SIZE;
1667        ad.auth_ctrl = SA_AUTH_SW_CTRL_SHA512;
1668
1669        sa_sha_setup(ctx, &ad);
1670
1671        return 0;
1672}
1673
1674static void sa_sha_cra_exit(struct crypto_tfm *tfm)
1675{
1676        struct sa_tfm_ctx *ctx = crypto_tfm_ctx(tfm);
1677        struct sa_crypto_data *data = dev_get_drvdata(sa_k3_dev);
1678
1679        dev_dbg(sa_k3_dev, "%s(0x%p) sc-ids(0x%x(0x%pad), 0x%x(0x%pad))\n",
1680                __func__, tfm, ctx->enc.sc_id, &ctx->enc.sc_phys,
1681                ctx->dec.sc_id, &ctx->dec.sc_phys);
1682
1683        if (crypto_tfm_alg_type(tfm) == CRYPTO_ALG_TYPE_AHASH)
1684                sa_free_ctx_info(&ctx->enc, data);
1685
1686        crypto_free_shash(ctx->shash);
1687        crypto_free_ahash(ctx->fallback.ahash);
1688}
1689
1690static void sa_aead_dma_in_callback(void *data)
1691{
1692        struct sa_rx_data *rxd = (struct sa_rx_data *)data;
1693        struct aead_request *req;
1694        struct crypto_aead *tfm;
1695        unsigned int start;
1696        unsigned int authsize;
1697        u8 auth_tag[SA_MAX_AUTH_TAG_SZ];
1698        size_t pl, ml;
1699        int i;
1700        int err = 0;
1701        u32 *mdptr;
1702
1703        sa_sync_from_device(rxd);
1704        req = container_of(rxd->req, struct aead_request, base);
1705        tfm = crypto_aead_reqtfm(req);
1706        start = req->assoclen + req->cryptlen;
1707        authsize = crypto_aead_authsize(tfm);
1708
1709        mdptr = (u32 *)dmaengine_desc_get_metadata_ptr(rxd->tx_in, &pl, &ml);
1710        for (i = 0; i < (authsize / 4); i++)
1711                mdptr[i + 4] = swab32(mdptr[i + 4]);
1712
1713        if (rxd->enc) {
1714                scatterwalk_map_and_copy(&mdptr[4], req->dst, start, authsize,
1715                                         1);
1716        } else {
1717                start -= authsize;
1718                scatterwalk_map_and_copy(auth_tag, req->src, start, authsize,
1719                                         0);
1720
1721                err = memcmp(&mdptr[4], auth_tag, authsize) ? -EBADMSG : 0;
1722        }
1723
1724        sa_free_sa_rx_data(rxd);
1725
1726        aead_request_complete(req, err);
1727}
1728
1729static int sa_cra_init_aead(struct crypto_aead *tfm, const char *hash,
1730                            const char *fallback)
1731{
1732        struct sa_tfm_ctx *ctx = crypto_aead_ctx(tfm);
1733        struct sa_crypto_data *data = dev_get_drvdata(sa_k3_dev);
1734        int ret;
1735
1736        memzero_explicit(ctx, sizeof(*ctx));
1737        ctx->dev_data = data;
1738
1739        ctx->shash = crypto_alloc_shash(hash, 0, CRYPTO_ALG_NEED_FALLBACK);
1740        if (IS_ERR(ctx->shash)) {
1741                dev_err(sa_k3_dev, "base driver %s couldn't be loaded\n", hash);
1742                return PTR_ERR(ctx->shash);
1743        }
1744
1745        ctx->fallback.aead = crypto_alloc_aead(fallback, 0,
1746                                               CRYPTO_ALG_NEED_FALLBACK);
1747
1748        if (IS_ERR(ctx->fallback.aead)) {
1749                dev_err(sa_k3_dev, "fallback driver %s couldn't be loaded\n",
1750                        fallback);
1751                return PTR_ERR(ctx->fallback.aead);
1752        }
1753
1754        crypto_aead_set_reqsize(tfm, sizeof(struct aead_request) +
1755                                crypto_aead_reqsize(ctx->fallback.aead));
1756
1757        ret = sa_init_ctx_info(&ctx->enc, data);
1758        if (ret)
1759                return ret;
1760
1761        ret = sa_init_ctx_info(&ctx->dec, data);
1762        if (ret) {
1763                sa_free_ctx_info(&ctx->enc, data);
1764                return ret;
1765        }
1766
1767        dev_dbg(sa_k3_dev, "%s(0x%p) sc-ids(0x%x(0x%pad), 0x%x(0x%pad))\n",
1768                __func__, tfm, ctx->enc.sc_id, &ctx->enc.sc_phys,
1769                ctx->dec.sc_id, &ctx->dec.sc_phys);
1770
1771        return ret;
1772}
1773
1774static int sa_cra_init_aead_sha1(struct crypto_aead *tfm)
1775{
1776        return sa_cra_init_aead(tfm, "sha1",
1777                                "authenc(hmac(sha1-ce),cbc(aes-ce))");
1778}
1779
1780static int sa_cra_init_aead_sha256(struct crypto_aead *tfm)
1781{
1782        return sa_cra_init_aead(tfm, "sha256",
1783                                "authenc(hmac(sha256-ce),cbc(aes-ce))");
1784}
1785
1786static void sa_exit_tfm_aead(struct crypto_aead *tfm)
1787{
1788        struct sa_tfm_ctx *ctx = crypto_aead_ctx(tfm);
1789        struct sa_crypto_data *data = dev_get_drvdata(sa_k3_dev);
1790
1791        crypto_free_shash(ctx->shash);
1792        crypto_free_aead(ctx->fallback.aead);
1793
1794        sa_free_ctx_info(&ctx->enc, data);
1795        sa_free_ctx_info(&ctx->dec, data);
1796}
1797
1798/* AEAD algorithm configuration interface function */
1799static int sa_aead_setkey(struct crypto_aead *authenc,
1800                          const u8 *key, unsigned int keylen,
1801                          struct algo_data *ad)
1802{
1803        struct sa_tfm_ctx *ctx = crypto_aead_ctx(authenc);
1804        struct crypto_authenc_keys keys;
1805        int cmdl_len;
1806        struct sa_cmdl_cfg cfg;
1807        int key_idx;
1808
1809        if (crypto_authenc_extractkeys(&keys, key, keylen) != 0)
1810                return -EINVAL;
1811
1812        /* Convert the key size (16/24/32) to the key size index (0/1/2) */
1813        key_idx = (keys.enckeylen >> 3) - 2;
1814        if (key_idx >= 3)
1815                return -EINVAL;
1816
1817        ad->ctx = ctx;
1818        ad->enc_eng.eng_id = SA_ENG_ID_EM1;
1819        ad->enc_eng.sc_size = SA_CTX_ENC_TYPE1_SZ;
1820        ad->auth_eng.eng_id = SA_ENG_ID_AM1;
1821        ad->auth_eng.sc_size = SA_CTX_AUTH_TYPE2_SZ;
1822        ad->mci_enc = mci_cbc_enc_no_iv_array[key_idx];
1823        ad->mci_dec = mci_cbc_dec_no_iv_array[key_idx];
1824        ad->inv_key = true;
1825        ad->keyed_mac = true;
1826        ad->ealg_id = SA_EALG_ID_AES_CBC;
1827        ad->prep_iopad = sa_prepare_iopads;
1828
1829        memset(&cfg, 0, sizeof(cfg));
1830        cfg.enc = true;
1831        cfg.aalg = ad->aalg_id;
1832        cfg.enc_eng_id = ad->enc_eng.eng_id;
1833        cfg.auth_eng_id = ad->auth_eng.eng_id;
1834        cfg.iv_size = crypto_aead_ivsize(authenc);
1835        cfg.akey = keys.authkey;
1836        cfg.akey_len = keys.authkeylen;
1837
1838        /* Setup Encryption Security Context & Command label template */
1839        if (sa_init_sc(&ctx->enc, ctx->dev_data->match_data, keys.enckey,
1840                       keys.enckeylen, keys.authkey, keys.authkeylen,
1841                       ad, 1, &ctx->enc.epib[1]))
1842                return -EINVAL;
1843
1844        cmdl_len = sa_format_cmdl_gen(&cfg,
1845                                      (u8 *)ctx->enc.cmdl,
1846                                      &ctx->enc.cmdl_upd_info);
1847        if (cmdl_len <= 0 || (cmdl_len > SA_MAX_CMDL_WORDS * sizeof(u32)))
1848                return -EINVAL;
1849
1850        ctx->enc.cmdl_size = cmdl_len;
1851
1852        /* Setup Decryption Security Context & Command label template */
1853        if (sa_init_sc(&ctx->dec, ctx->dev_data->match_data, keys.enckey,
1854                       keys.enckeylen, keys.authkey, keys.authkeylen,
1855                       ad, 0, &ctx->dec.epib[1]))
1856                return -EINVAL;
1857
1858        cfg.enc = false;
1859        cmdl_len = sa_format_cmdl_gen(&cfg, (u8 *)ctx->dec.cmdl,
1860                                      &ctx->dec.cmdl_upd_info);
1861
1862        if (cmdl_len <= 0 || (cmdl_len > SA_MAX_CMDL_WORDS * sizeof(u32)))
1863                return -EINVAL;
1864
1865        ctx->dec.cmdl_size = cmdl_len;
1866
1867        crypto_aead_clear_flags(ctx->fallback.aead, CRYPTO_TFM_REQ_MASK);
1868        crypto_aead_set_flags(ctx->fallback.aead,
1869                              crypto_aead_get_flags(authenc) &
1870                              CRYPTO_TFM_REQ_MASK);
1871        crypto_aead_setkey(ctx->fallback.aead, key, keylen);
1872
1873        return 0;
1874}
1875
1876static int sa_aead_setauthsize(struct crypto_aead *tfm, unsigned int authsize)
1877{
1878        struct sa_tfm_ctx *ctx = crypto_tfm_ctx(crypto_aead_tfm(tfm));
1879
1880        return crypto_aead_setauthsize(ctx->fallback.aead, authsize);
1881}
1882
1883static int sa_aead_cbc_sha1_setkey(struct crypto_aead *authenc,
1884                                   const u8 *key, unsigned int keylen)
1885{
1886        struct algo_data ad = { 0 };
1887
1888        ad.ealg_id = SA_EALG_ID_AES_CBC;
1889        ad.aalg_id = SA_AALG_ID_HMAC_SHA1;
1890        ad.hash_size = SHA1_DIGEST_SIZE;
1891        ad.auth_ctrl = SA_AUTH_SW_CTRL_SHA1;
1892
1893        return sa_aead_setkey(authenc, key, keylen, &ad);
1894}
1895
1896static int sa_aead_cbc_sha256_setkey(struct crypto_aead *authenc,
1897                                     const u8 *key, unsigned int keylen)
1898{
1899        struct algo_data ad = { 0 };
1900
1901        ad.ealg_id = SA_EALG_ID_AES_CBC;
1902        ad.aalg_id = SA_AALG_ID_HMAC_SHA2_256;
1903        ad.hash_size = SHA256_DIGEST_SIZE;
1904        ad.auth_ctrl = SA_AUTH_SW_CTRL_SHA256;
1905
1906        return sa_aead_setkey(authenc, key, keylen, &ad);
1907}
1908
1909static int sa_aead_run(struct aead_request *req, u8 *iv, int enc)
1910{
1911        struct crypto_aead *tfm = crypto_aead_reqtfm(req);
1912        struct sa_tfm_ctx *ctx = crypto_aead_ctx(tfm);
1913        struct sa_req sa_req = { 0 };
1914        size_t auth_size, enc_size;
1915
1916        enc_size = req->cryptlen;
1917        auth_size = req->assoclen + req->cryptlen;
1918
1919        if (!enc) {
1920                enc_size -= crypto_aead_authsize(tfm);
1921                auth_size -= crypto_aead_authsize(tfm);
1922        }
1923
1924        if (auth_size > SA_MAX_DATA_SZ ||
1925            (auth_size >= SA_UNSAFE_DATA_SZ_MIN &&
1926             auth_size <= SA_UNSAFE_DATA_SZ_MAX)) {
1927                struct aead_request *subreq = aead_request_ctx(req);
1928                int ret;
1929
1930                aead_request_set_tfm(subreq, ctx->fallback.aead);
1931                aead_request_set_callback(subreq, req->base.flags,
1932                                          req->base.complete, req->base.data);
1933                aead_request_set_crypt(subreq, req->src, req->dst,
1934                                       req->cryptlen, req->iv);
1935                aead_request_set_ad(subreq, req->assoclen);
1936
1937                ret = enc ? crypto_aead_encrypt(subreq) :
1938                        crypto_aead_decrypt(subreq);
1939                return ret;
1940        }
1941
1942        sa_req.enc_offset = req->assoclen;
1943        sa_req.enc_size = enc_size;
1944        sa_req.auth_size = auth_size;
1945        sa_req.size = auth_size;
1946        sa_req.enc_iv = iv;
1947        sa_req.type = CRYPTO_ALG_TYPE_AEAD;
1948        sa_req.enc = enc;
1949        sa_req.callback = sa_aead_dma_in_callback;
1950        sa_req.mdata_size = 52;
1951        sa_req.base = &req->base;
1952        sa_req.ctx = ctx;
1953        sa_req.src = req->src;
1954        sa_req.dst = req->dst;
1955
1956        return sa_run(&sa_req);
1957}
1958
1959/* AEAD algorithm encrypt interface function */
1960static int sa_aead_encrypt(struct aead_request *req)
1961{
1962        return sa_aead_run(req, req->iv, 1);
1963}
1964
1965/* AEAD algorithm decrypt interface function */
1966static int sa_aead_decrypt(struct aead_request *req)
1967{
1968        return sa_aead_run(req, req->iv, 0);
1969}
1970
1971static struct sa_alg_tmpl sa_algs[] = {
1972        [SA_ALG_CBC_AES] = {
1973                .type = CRYPTO_ALG_TYPE_SKCIPHER,
1974                .alg.skcipher = {
1975                        .base.cra_name          = "cbc(aes)",
1976                        .base.cra_driver_name   = "cbc-aes-sa2ul",
1977                        .base.cra_priority      = 30000,
1978                        .base.cra_flags         = CRYPTO_ALG_TYPE_SKCIPHER |
1979                                                  CRYPTO_ALG_KERN_DRIVER_ONLY |
1980                                                  CRYPTO_ALG_ASYNC |
1981                                                  CRYPTO_ALG_NEED_FALLBACK,
1982                        .base.cra_blocksize     = AES_BLOCK_SIZE,
1983                        .base.cra_ctxsize       = sizeof(struct sa_tfm_ctx),
1984                        .base.cra_module        = THIS_MODULE,
1985                        .init                   = sa_cipher_cra_init,
1986                        .exit                   = sa_cipher_cra_exit,
1987                        .min_keysize            = AES_MIN_KEY_SIZE,
1988                        .max_keysize            = AES_MAX_KEY_SIZE,
1989                        .ivsize                 = AES_BLOCK_SIZE,
1990                        .setkey                 = sa_aes_cbc_setkey,
1991                        .encrypt                = sa_encrypt,
1992                        .decrypt                = sa_decrypt,
1993                }
1994        },
1995        [SA_ALG_EBC_AES] = {
1996                .type = CRYPTO_ALG_TYPE_SKCIPHER,
1997                .alg.skcipher = {
1998                        .base.cra_name          = "ecb(aes)",
1999                        .base.cra_driver_name   = "ecb-aes-sa2ul",
2000                        .base.cra_priority      = 30000,
2001                        .base.cra_flags         = CRYPTO_ALG_TYPE_SKCIPHER |
2002                                                  CRYPTO_ALG_KERN_DRIVER_ONLY |
2003                                                  CRYPTO_ALG_ASYNC |
2004                                                  CRYPTO_ALG_NEED_FALLBACK,
2005                        .base.cra_blocksize     = AES_BLOCK_SIZE,
2006                        .base.cra_ctxsize       = sizeof(struct sa_tfm_ctx),
2007                        .base.cra_module        = THIS_MODULE,
2008                        .init                   = sa_cipher_cra_init,
2009                        .exit                   = sa_cipher_cra_exit,
2010                        .min_keysize            = AES_MIN_KEY_SIZE,
2011                        .max_keysize            = AES_MAX_KEY_SIZE,
2012                        .setkey                 = sa_aes_ecb_setkey,
2013                        .encrypt                = sa_encrypt,
2014                        .decrypt                = sa_decrypt,
2015                }
2016        },
2017        [SA_ALG_CBC_DES3] = {
2018                .type = CRYPTO_ALG_TYPE_SKCIPHER,
2019                .alg.skcipher = {
2020                        .base.cra_name          = "cbc(des3_ede)",
2021                        .base.cra_driver_name   = "cbc-des3-sa2ul",
2022                        .base.cra_priority      = 30000,
2023                        .base.cra_flags         = CRYPTO_ALG_TYPE_SKCIPHER |
2024                                                  CRYPTO_ALG_KERN_DRIVER_ONLY |
2025                                                  CRYPTO_ALG_ASYNC |
2026                                                  CRYPTO_ALG_NEED_FALLBACK,
2027                        .base.cra_blocksize     = DES_BLOCK_SIZE,
2028                        .base.cra_ctxsize       = sizeof(struct sa_tfm_ctx),
2029                        .base.cra_module        = THIS_MODULE,
2030                        .init                   = sa_cipher_cra_init,
2031                        .exit                   = sa_cipher_cra_exit,
2032                        .min_keysize            = 3 * DES_KEY_SIZE,
2033                        .max_keysize            = 3 * DES_KEY_SIZE,
2034                        .ivsize                 = DES_BLOCK_SIZE,
2035                        .setkey                 = sa_3des_cbc_setkey,
2036                        .encrypt                = sa_encrypt,
2037                        .decrypt                = sa_decrypt,
2038                }
2039        },
2040        [SA_ALG_ECB_DES3] = {
2041                .type = CRYPTO_ALG_TYPE_SKCIPHER,
2042                .alg.skcipher = {
2043                        .base.cra_name          = "ecb(des3_ede)",
2044                        .base.cra_driver_name   = "ecb-des3-sa2ul",
2045                        .base.cra_priority      = 30000,
2046                        .base.cra_flags         = CRYPTO_ALG_TYPE_SKCIPHER |
2047                                                  CRYPTO_ALG_KERN_DRIVER_ONLY |
2048                                                  CRYPTO_ALG_ASYNC |
2049                                                  CRYPTO_ALG_NEED_FALLBACK,
2050                        .base.cra_blocksize     = DES_BLOCK_SIZE,
2051                        .base.cra_ctxsize       = sizeof(struct sa_tfm_ctx),
2052                        .base.cra_module        = THIS_MODULE,
2053                        .init                   = sa_cipher_cra_init,
2054                        .exit                   = sa_cipher_cra_exit,
2055                        .min_keysize            = 3 * DES_KEY_SIZE,
2056                        .max_keysize            = 3 * DES_KEY_SIZE,
2057                        .setkey                 = sa_3des_ecb_setkey,
2058                        .encrypt                = sa_encrypt,
2059                        .decrypt                = sa_decrypt,
2060                }
2061        },
2062        [SA_ALG_SHA1] = {
2063                .type = CRYPTO_ALG_TYPE_AHASH,
2064                .alg.ahash = {
2065                        .halg.base = {
2066                                .cra_name       = "sha1",
2067                                .cra_driver_name        = "sha1-sa2ul",
2068                                .cra_priority   = 400,
2069                                .cra_flags      = CRYPTO_ALG_TYPE_AHASH |
2070                                                  CRYPTO_ALG_ASYNC |
2071                                                  CRYPTO_ALG_KERN_DRIVER_ONLY |
2072                                                  CRYPTO_ALG_NEED_FALLBACK,
2073                                .cra_blocksize  = SHA1_BLOCK_SIZE,
2074                                .cra_ctxsize    = sizeof(struct sa_tfm_ctx),
2075                                .cra_module     = THIS_MODULE,
2076                                .cra_init       = sa_sha1_cra_init,
2077                                .cra_exit       = sa_sha_cra_exit,
2078                        },
2079                        .halg.digestsize        = SHA1_DIGEST_SIZE,
2080                        .halg.statesize         = sizeof(struct sa_sha_req_ctx) +
2081                                                  sizeof(struct sha1_state),
2082                        .init                   = sa_sha_init,
2083                        .update                 = sa_sha_update,
2084                        .final                  = sa_sha_final,
2085                        .finup                  = sa_sha_finup,
2086                        .digest                 = sa_sha_digest,
2087                        .export                 = sa_sha_export,
2088                        .import                 = sa_sha_import,
2089                },
2090        },
2091        [SA_ALG_SHA256] = {
2092                .type = CRYPTO_ALG_TYPE_AHASH,
2093                .alg.ahash = {
2094                        .halg.base = {
2095                                .cra_name       = "sha256",
2096                                .cra_driver_name        = "sha256-sa2ul",
2097                                .cra_priority   = 400,
2098                                .cra_flags      = CRYPTO_ALG_TYPE_AHASH |
2099                                                  CRYPTO_ALG_ASYNC |
2100                                                  CRYPTO_ALG_KERN_DRIVER_ONLY |
2101                                                  CRYPTO_ALG_NEED_FALLBACK,
2102                                .cra_blocksize  = SHA256_BLOCK_SIZE,
2103                                .cra_ctxsize    = sizeof(struct sa_tfm_ctx),
2104                                .cra_module     = THIS_MODULE,
2105                                .cra_init       = sa_sha256_cra_init,
2106                                .cra_exit       = sa_sha_cra_exit,
2107                        },
2108                        .halg.digestsize        = SHA256_DIGEST_SIZE,
2109                        .halg.statesize         = sizeof(struct sa_sha_req_ctx) +
2110                                                  sizeof(struct sha256_state),
2111                        .init                   = sa_sha_init,
2112                        .update                 = sa_sha_update,
2113                        .final                  = sa_sha_final,
2114                        .finup                  = sa_sha_finup,
2115                        .digest                 = sa_sha_digest,
2116                        .export                 = sa_sha_export,
2117                        .import                 = sa_sha_import,
2118                },
2119        },
2120        [SA_ALG_SHA512] = {
2121                .type = CRYPTO_ALG_TYPE_AHASH,
2122                .alg.ahash = {
2123                        .halg.base = {
2124                                .cra_name       = "sha512",
2125                                .cra_driver_name        = "sha512-sa2ul",
2126                                .cra_priority   = 400,
2127                                .cra_flags      = CRYPTO_ALG_TYPE_AHASH |
2128                                                  CRYPTO_ALG_ASYNC |
2129                                                  CRYPTO_ALG_KERN_DRIVER_ONLY |
2130                                                  CRYPTO_ALG_NEED_FALLBACK,
2131                                .cra_blocksize  = SHA512_BLOCK_SIZE,
2132                                .cra_ctxsize    = sizeof(struct sa_tfm_ctx),
2133                                .cra_module     = THIS_MODULE,
2134                                .cra_init       = sa_sha512_cra_init,
2135                                .cra_exit       = sa_sha_cra_exit,
2136                        },
2137                        .halg.digestsize        = SHA512_DIGEST_SIZE,
2138                        .halg.statesize         = sizeof(struct sa_sha_req_ctx) +
2139                                                  sizeof(struct sha512_state),
2140                        .init                   = sa_sha_init,
2141                        .update                 = sa_sha_update,
2142                        .final                  = sa_sha_final,
2143                        .finup                  = sa_sha_finup,
2144                        .digest                 = sa_sha_digest,
2145                        .export                 = sa_sha_export,
2146                        .import                 = sa_sha_import,
2147                },
2148        },
2149        [SA_ALG_AUTHENC_SHA1_AES] = {
2150                .type   = CRYPTO_ALG_TYPE_AEAD,
2151                .alg.aead = {
2152                        .base = {
2153                                .cra_name = "authenc(hmac(sha1),cbc(aes))",
2154                                .cra_driver_name =
2155                                        "authenc(hmac(sha1),cbc(aes))-sa2ul",
2156                                .cra_blocksize = AES_BLOCK_SIZE,
2157                                .cra_flags = CRYPTO_ALG_TYPE_AEAD |
2158                                        CRYPTO_ALG_KERN_DRIVER_ONLY |
2159                                        CRYPTO_ALG_ASYNC |
2160                                        CRYPTO_ALG_NEED_FALLBACK,
2161                                .cra_ctxsize = sizeof(struct sa_tfm_ctx),
2162                                .cra_module = THIS_MODULE,
2163                                .cra_priority = 3000,
2164                        },
2165                        .ivsize = AES_BLOCK_SIZE,
2166                        .maxauthsize = SHA1_DIGEST_SIZE,
2167
2168                        .init = sa_cra_init_aead_sha1,
2169                        .exit = sa_exit_tfm_aead,
2170                        .setkey = sa_aead_cbc_sha1_setkey,
2171                        .setauthsize = sa_aead_setauthsize,
2172                        .encrypt = sa_aead_encrypt,
2173                        .decrypt = sa_aead_decrypt,
2174                },
2175        },
2176        [SA_ALG_AUTHENC_SHA256_AES] = {
2177                .type   = CRYPTO_ALG_TYPE_AEAD,
2178                .alg.aead = {
2179                        .base = {
2180                                .cra_name = "authenc(hmac(sha256),cbc(aes))",
2181                                .cra_driver_name =
2182                                        "authenc(hmac(sha256),cbc(aes))-sa2ul",
2183                                .cra_blocksize = AES_BLOCK_SIZE,
2184                                .cra_flags = CRYPTO_ALG_TYPE_AEAD |
2185                                        CRYPTO_ALG_KERN_DRIVER_ONLY |
2186                                        CRYPTO_ALG_ASYNC |
2187                                        CRYPTO_ALG_NEED_FALLBACK,
2188                                .cra_ctxsize = sizeof(struct sa_tfm_ctx),
2189                                .cra_module = THIS_MODULE,
2190                                .cra_alignmask = 0,
2191                                .cra_priority = 3000,
2192                        },
2193                        .ivsize = AES_BLOCK_SIZE,
2194                        .maxauthsize = SHA256_DIGEST_SIZE,
2195
2196                        .init = sa_cra_init_aead_sha256,
2197                        .exit = sa_exit_tfm_aead,
2198                        .setkey = sa_aead_cbc_sha256_setkey,
2199                        .setauthsize = sa_aead_setauthsize,
2200                        .encrypt = sa_aead_encrypt,
2201                        .decrypt = sa_aead_decrypt,
2202                },
2203        },
2204};
2205
2206/* Register the algorithms in crypto framework */
2207static void sa_register_algos(struct sa_crypto_data *dev_data)
2208{
2209        const struct sa_match_data *match_data = dev_data->match_data;
2210        struct device *dev = dev_data->dev;
2211        char *alg_name;
2212        u32 type;
2213        int i, err;
2214
2215        for (i = 0; i < ARRAY_SIZE(sa_algs); i++) {
2216                /* Skip unsupported algos */
2217                if (!(match_data->supported_algos & BIT(i)))
2218                        continue;
2219
2220                type = sa_algs[i].type;
2221                if (type == CRYPTO_ALG_TYPE_SKCIPHER) {
2222                        alg_name = sa_algs[i].alg.skcipher.base.cra_name;
2223                        err = crypto_register_skcipher(&sa_algs[i].alg.skcipher);
2224                } else if (type == CRYPTO_ALG_TYPE_AHASH) {
2225                        alg_name = sa_algs[i].alg.ahash.halg.base.cra_name;
2226                        err = crypto_register_ahash(&sa_algs[i].alg.ahash);
2227                } else if (type == CRYPTO_ALG_TYPE_AEAD) {
2228                        alg_name = sa_algs[i].alg.aead.base.cra_name;
2229                        err = crypto_register_aead(&sa_algs[i].alg.aead);
2230                } else {
2231                        dev_err(dev,
2232                                "un-supported crypto algorithm (%d)",
2233                                sa_algs[i].type);
2234                        continue;
2235                }
2236
2237                if (err)
2238                        dev_err(dev, "Failed to register '%s'\n", alg_name);
2239                else
2240                        sa_algs[i].registered = true;
2241        }
2242}
2243
2244/* Unregister the algorithms in crypto framework */
2245static void sa_unregister_algos(const struct device *dev)
2246{
2247        u32 type;
2248        int i;
2249
2250        for (i = 0; i < ARRAY_SIZE(sa_algs); i++) {
2251                type = sa_algs[i].type;
2252                if (!sa_algs[i].registered)
2253                        continue;
2254                if (type == CRYPTO_ALG_TYPE_SKCIPHER)
2255                        crypto_unregister_skcipher(&sa_algs[i].alg.skcipher);
2256                else if (type == CRYPTO_ALG_TYPE_AHASH)
2257                        crypto_unregister_ahash(&sa_algs[i].alg.ahash);
2258                else if (type == CRYPTO_ALG_TYPE_AEAD)
2259                        crypto_unregister_aead(&sa_algs[i].alg.aead);
2260
2261                sa_algs[i].registered = false;
2262        }
2263}
2264
2265static int sa_init_mem(struct sa_crypto_data *dev_data)
2266{
2267        struct device *dev = &dev_data->pdev->dev;
2268        /* Setup dma pool for security context buffers */
2269        dev_data->sc_pool = dma_pool_create("keystone-sc", dev,
2270                                            SA_CTX_MAX_SZ, 64, 0);
2271        if (!dev_data->sc_pool) {
2272                dev_err(dev, "Failed to create dma pool");
2273                return -ENOMEM;
2274        }
2275
2276        return 0;
2277}
2278
2279static int sa_dma_init(struct sa_crypto_data *dd)
2280{
2281        int ret;
2282        struct dma_slave_config cfg;
2283
2284        dd->dma_rx1 = NULL;
2285        dd->dma_tx = NULL;
2286        dd->dma_rx2 = NULL;
2287
2288        ret = dma_coerce_mask_and_coherent(dd->dev, DMA_BIT_MASK(48));
2289        if (ret)
2290                return ret;
2291
2292        dd->dma_rx1 = dma_request_chan(dd->dev, "rx1");
2293        if (IS_ERR(dd->dma_rx1))
2294                return dev_err_probe(dd->dev, PTR_ERR(dd->dma_rx1),
2295                                     "Unable to request rx1 DMA channel\n");
2296
2297        dd->dma_rx2 = dma_request_chan(dd->dev, "rx2");
2298        if (IS_ERR(dd->dma_rx2)) {
2299                ret = dev_err_probe(dd->dev, PTR_ERR(dd->dma_rx2),
2300                                    "Unable to request rx2 DMA channel\n");
2301                goto err_dma_rx2;
2302        }
2303
2304        dd->dma_tx = dma_request_chan(dd->dev, "tx");
2305        if (IS_ERR(dd->dma_tx)) {
2306                ret = dev_err_probe(dd->dev, PTR_ERR(dd->dma_tx),
2307                                    "Unable to request tx DMA channel\n");
2308                goto err_dma_tx;
2309        }
2310
2311        memzero_explicit(&cfg, sizeof(cfg));
2312
2313        cfg.src_addr_width = DMA_SLAVE_BUSWIDTH_4_BYTES;
2314        cfg.dst_addr_width = DMA_SLAVE_BUSWIDTH_4_BYTES;
2315        cfg.src_maxburst = 4;
2316        cfg.dst_maxburst = 4;
2317
2318        ret = dmaengine_slave_config(dd->dma_rx1, &cfg);
2319        if (ret) {
2320                dev_err(dd->dev, "can't configure IN dmaengine slave: %d\n",
2321                        ret);
2322                goto err_dma_config;
2323        }
2324
2325        ret = dmaengine_slave_config(dd->dma_rx2, &cfg);
2326        if (ret) {
2327                dev_err(dd->dev, "can't configure IN dmaengine slave: %d\n",
2328                        ret);
2329                goto err_dma_config;
2330        }
2331
2332        ret = dmaengine_slave_config(dd->dma_tx, &cfg);
2333        if (ret) {
2334                dev_err(dd->dev, "can't configure OUT dmaengine slave: %d\n",
2335                        ret);
2336                goto err_dma_config;
2337        }
2338
2339        return 0;
2340
2341err_dma_config:
2342        dma_release_channel(dd->dma_tx);
2343err_dma_tx:
2344        dma_release_channel(dd->dma_rx2);
2345err_dma_rx2:
2346        dma_release_channel(dd->dma_rx1);
2347
2348        return ret;
2349}
2350
2351static int sa_link_child(struct device *dev, void *data)
2352{
2353        struct device *parent = data;
2354
2355        device_link_add(dev, parent, DL_FLAG_AUTOPROBE_CONSUMER);
2356
2357        return 0;
2358}
2359
2360static struct sa_match_data am654_match_data = {
2361        .priv = 1,
2362        .priv_id = 1,
2363        .supported_algos = GENMASK(SA_ALG_AUTHENC_SHA256_AES, 0),
2364};
2365
2366static struct sa_match_data am64_match_data = {
2367        .priv = 0,
2368        .priv_id = 0,
2369        .supported_algos = BIT(SA_ALG_CBC_AES) |
2370                           BIT(SA_ALG_EBC_AES) |
2371                           BIT(SA_ALG_SHA256) |
2372                           BIT(SA_ALG_SHA512) |
2373                           BIT(SA_ALG_AUTHENC_SHA256_AES),
2374        .skip_engine_control = true,
2375};
2376
2377static const struct of_device_id of_match[] = {
2378        { .compatible = "ti,j721e-sa2ul", .data = &am654_match_data, },
2379        { .compatible = "ti,am654-sa2ul", .data = &am654_match_data, },
2380        { .compatible = "ti,am64-sa2ul", .data = &am64_match_data, },
2381        {},
2382};
2383MODULE_DEVICE_TABLE(of, of_match);
2384
2385static int sa_ul_probe(struct platform_device *pdev)
2386{
2387        struct device *dev = &pdev->dev;
2388        struct device_node *node = dev->of_node;
2389        static void __iomem *saul_base;
2390        struct sa_crypto_data *dev_data;
2391        int ret;
2392
2393        dev_data = devm_kzalloc(dev, sizeof(*dev_data), GFP_KERNEL);
2394        if (!dev_data)
2395                return -ENOMEM;
2396
2397        dev_data->match_data = of_device_get_match_data(dev);
2398        if (!dev_data->match_data)
2399                return -ENODEV;
2400
2401        saul_base = devm_platform_ioremap_resource(pdev, 0);
2402        if (IS_ERR(saul_base))
2403                return PTR_ERR(saul_base);
2404
2405        sa_k3_dev = dev;
2406        dev_data->dev = dev;
2407        dev_data->pdev = pdev;
2408        dev_data->base = saul_base;
2409        platform_set_drvdata(pdev, dev_data);
2410        dev_set_drvdata(sa_k3_dev, dev_data);
2411
2412        pm_runtime_enable(dev);
2413        ret = pm_runtime_resume_and_get(dev);
2414        if (ret < 0) {
2415                dev_err(&pdev->dev, "%s: failed to get sync: %d\n", __func__,
2416                        ret);
2417                pm_runtime_disable(dev);
2418                return ret;
2419        }
2420
2421        sa_init_mem(dev_data);
2422        ret = sa_dma_init(dev_data);
2423        if (ret)
2424                goto destroy_dma_pool;
2425
2426        spin_lock_init(&dev_data->scid_lock);
2427
2428        if (!dev_data->match_data->skip_engine_control) {
2429                u32 val = SA_EEC_ENCSS_EN | SA_EEC_AUTHSS_EN | SA_EEC_CTXCACH_EN |
2430                          SA_EEC_CPPI_PORT_IN_EN | SA_EEC_CPPI_PORT_OUT_EN |
2431                          SA_EEC_TRNG_EN;
2432
2433                writel_relaxed(val, saul_base + SA_ENGINE_ENABLE_CONTROL);
2434        }
2435
2436        sa_register_algos(dev_data);
2437
2438        ret = of_platform_populate(node, NULL, NULL, &pdev->dev);
2439        if (ret)
2440                goto release_dma;
2441
2442        device_for_each_child(&pdev->dev, &pdev->dev, sa_link_child);
2443
2444        return 0;
2445
2446release_dma:
2447        sa_unregister_algos(&pdev->dev);
2448
2449        dma_release_channel(dev_data->dma_rx2);
2450        dma_release_channel(dev_data->dma_rx1);
2451        dma_release_channel(dev_data->dma_tx);
2452
2453destroy_dma_pool:
2454        dma_pool_destroy(dev_data->sc_pool);
2455
2456        pm_runtime_put_sync(&pdev->dev);
2457        pm_runtime_disable(&pdev->dev);
2458
2459        return ret;
2460}
2461
2462static int sa_ul_remove(struct platform_device *pdev)
2463{
2464        struct sa_crypto_data *dev_data = platform_get_drvdata(pdev);
2465
2466        of_platform_depopulate(&pdev->dev);
2467
2468        sa_unregister_algos(&pdev->dev);
2469
2470        dma_release_channel(dev_data->dma_rx2);
2471        dma_release_channel(dev_data->dma_rx1);
2472        dma_release_channel(dev_data->dma_tx);
2473
2474        dma_pool_destroy(dev_data->sc_pool);
2475
2476        platform_set_drvdata(pdev, NULL);
2477
2478        pm_runtime_put_sync(&pdev->dev);
2479        pm_runtime_disable(&pdev->dev);
2480
2481        return 0;
2482}
2483
2484static struct platform_driver sa_ul_driver = {
2485        .probe = sa_ul_probe,
2486        .remove = sa_ul_remove,
2487        .driver = {
2488                   .name = "saul-crypto",
2489                   .of_match_table = of_match,
2490                   },
2491};
2492module_platform_driver(sa_ul_driver);
2493MODULE_LICENSE("GPL v2");
2494