linux/drivers/dma/stm32-mdma.c
<<
>>
Prefs
   1// SPDX-License-Identifier: GPL-2.0-only
   2/*
   3 *
   4 * Copyright (C) STMicroelectronics SA 2017
   5 * Author(s): M'boumba Cedric Madianga <cedric.madianga@gmail.com>
   6 *            Pierre-Yves Mordret <pierre-yves.mordret@st.com>
   7 *
   8 * Driver for STM32 MDMA controller
   9 *
  10 * Inspired by stm32-dma.c and dma-jz4780.c
  11 */
  12
  13#include <linux/clk.h>
  14#include <linux/delay.h>
  15#include <linux/dmaengine.h>
  16#include <linux/dma-mapping.h>
  17#include <linux/dmapool.h>
  18#include <linux/err.h>
  19#include <linux/init.h>
  20#include <linux/iopoll.h>
  21#include <linux/jiffies.h>
  22#include <linux/list.h>
  23#include <linux/log2.h>
  24#include <linux/module.h>
  25#include <linux/of.h>
  26#include <linux/of_device.h>
  27#include <linux/of_dma.h>
  28#include <linux/platform_device.h>
  29#include <linux/pm_runtime.h>
  30#include <linux/reset.h>
  31#include <linux/slab.h>
  32
  33#include "virt-dma.h"
  34
  35/*  MDMA Generic getter/setter */
  36#define STM32_MDMA_SHIFT(n)             (ffs(n) - 1)
  37#define STM32_MDMA_SET(n, mask)         (((n) << STM32_MDMA_SHIFT(mask)) & \
  38                                         (mask))
  39#define STM32_MDMA_GET(n, mask)         (((n) & (mask)) >> \
  40                                         STM32_MDMA_SHIFT(mask))
  41
  42#define STM32_MDMA_GISR0                0x0000 /* MDMA Int Status Reg 1 */
  43#define STM32_MDMA_GISR1                0x0004 /* MDMA Int Status Reg 2 */
  44
  45/* MDMA Channel x interrupt/status register */
  46#define STM32_MDMA_CISR(x)              (0x40 + 0x40 * (x)) /* x = 0..62 */
  47#define STM32_MDMA_CISR_CRQA            BIT(16)
  48#define STM32_MDMA_CISR_TCIF            BIT(4)
  49#define STM32_MDMA_CISR_BTIF            BIT(3)
  50#define STM32_MDMA_CISR_BRTIF           BIT(2)
  51#define STM32_MDMA_CISR_CTCIF           BIT(1)
  52#define STM32_MDMA_CISR_TEIF            BIT(0)
  53
  54/* MDMA Channel x interrupt flag clear register */
  55#define STM32_MDMA_CIFCR(x)             (0x44 + 0x40 * (x))
  56#define STM32_MDMA_CIFCR_CLTCIF         BIT(4)
  57#define STM32_MDMA_CIFCR_CBTIF          BIT(3)
  58#define STM32_MDMA_CIFCR_CBRTIF         BIT(2)
  59#define STM32_MDMA_CIFCR_CCTCIF         BIT(1)
  60#define STM32_MDMA_CIFCR_CTEIF          BIT(0)
  61#define STM32_MDMA_CIFCR_CLEAR_ALL      (STM32_MDMA_CIFCR_CLTCIF \
  62                                        | STM32_MDMA_CIFCR_CBTIF \
  63                                        | STM32_MDMA_CIFCR_CBRTIF \
  64                                        | STM32_MDMA_CIFCR_CCTCIF \
  65                                        | STM32_MDMA_CIFCR_CTEIF)
  66
  67/* MDMA Channel x error status register */
  68#define STM32_MDMA_CESR(x)              (0x48 + 0x40 * (x))
  69#define STM32_MDMA_CESR_BSE             BIT(11)
  70#define STM32_MDMA_CESR_ASR             BIT(10)
  71#define STM32_MDMA_CESR_TEMD            BIT(9)
  72#define STM32_MDMA_CESR_TELD            BIT(8)
  73#define STM32_MDMA_CESR_TED             BIT(7)
  74#define STM32_MDMA_CESR_TEA_MASK        GENMASK(6, 0)
  75
  76/* MDMA Channel x control register */
  77#define STM32_MDMA_CCR(x)               (0x4C + 0x40 * (x))
  78#define STM32_MDMA_CCR_SWRQ             BIT(16)
  79#define STM32_MDMA_CCR_WEX              BIT(14)
  80#define STM32_MDMA_CCR_HEX              BIT(13)
  81#define STM32_MDMA_CCR_BEX              BIT(12)
  82#define STM32_MDMA_CCR_PL_MASK          GENMASK(7, 6)
  83#define STM32_MDMA_CCR_PL(n)            STM32_MDMA_SET(n, \
  84                                                       STM32_MDMA_CCR_PL_MASK)
  85#define STM32_MDMA_CCR_TCIE             BIT(5)
  86#define STM32_MDMA_CCR_BTIE             BIT(4)
  87#define STM32_MDMA_CCR_BRTIE            BIT(3)
  88#define STM32_MDMA_CCR_CTCIE            BIT(2)
  89#define STM32_MDMA_CCR_TEIE             BIT(1)
  90#define STM32_MDMA_CCR_EN               BIT(0)
  91#define STM32_MDMA_CCR_IRQ_MASK         (STM32_MDMA_CCR_TCIE \
  92                                        | STM32_MDMA_CCR_BTIE \
  93                                        | STM32_MDMA_CCR_BRTIE \
  94                                        | STM32_MDMA_CCR_CTCIE \
  95                                        | STM32_MDMA_CCR_TEIE)
  96
  97/* MDMA Channel x transfer configuration register */
  98#define STM32_MDMA_CTCR(x)              (0x50 + 0x40 * (x))
  99#define STM32_MDMA_CTCR_BWM             BIT(31)
 100#define STM32_MDMA_CTCR_SWRM            BIT(30)
 101#define STM32_MDMA_CTCR_TRGM_MSK        GENMASK(29, 28)
 102#define STM32_MDMA_CTCR_TRGM(n)         STM32_MDMA_SET((n), \
 103                                                       STM32_MDMA_CTCR_TRGM_MSK)
 104#define STM32_MDMA_CTCR_TRGM_GET(n)     STM32_MDMA_GET((n), \
 105                                                       STM32_MDMA_CTCR_TRGM_MSK)
 106#define STM32_MDMA_CTCR_PAM_MASK        GENMASK(27, 26)
 107#define STM32_MDMA_CTCR_PAM(n)          STM32_MDMA_SET(n, \
 108                                                       STM32_MDMA_CTCR_PAM_MASK)
 109#define STM32_MDMA_CTCR_PKE             BIT(25)
 110#define STM32_MDMA_CTCR_TLEN_MSK        GENMASK(24, 18)
 111#define STM32_MDMA_CTCR_TLEN(n)         STM32_MDMA_SET((n), \
 112                                                       STM32_MDMA_CTCR_TLEN_MSK)
 113#define STM32_MDMA_CTCR_TLEN_GET(n)     STM32_MDMA_GET((n), \
 114                                                       STM32_MDMA_CTCR_TLEN_MSK)
 115#define STM32_MDMA_CTCR_LEN2_MSK        GENMASK(25, 18)
 116#define STM32_MDMA_CTCR_LEN2(n)         STM32_MDMA_SET((n), \
 117                                                       STM32_MDMA_CTCR_LEN2_MSK)
 118#define STM32_MDMA_CTCR_LEN2_GET(n)     STM32_MDMA_GET((n), \
 119                                                       STM32_MDMA_CTCR_LEN2_MSK)
 120#define STM32_MDMA_CTCR_DBURST_MASK     GENMASK(17, 15)
 121#define STM32_MDMA_CTCR_DBURST(n)       STM32_MDMA_SET(n, \
 122                                                    STM32_MDMA_CTCR_DBURST_MASK)
 123#define STM32_MDMA_CTCR_SBURST_MASK     GENMASK(14, 12)
 124#define STM32_MDMA_CTCR_SBURST(n)       STM32_MDMA_SET(n, \
 125                                                    STM32_MDMA_CTCR_SBURST_MASK)
 126#define STM32_MDMA_CTCR_DINCOS_MASK     GENMASK(11, 10)
 127#define STM32_MDMA_CTCR_DINCOS(n)       STM32_MDMA_SET((n), \
 128                                                    STM32_MDMA_CTCR_DINCOS_MASK)
 129#define STM32_MDMA_CTCR_SINCOS_MASK     GENMASK(9, 8)
 130#define STM32_MDMA_CTCR_SINCOS(n)       STM32_MDMA_SET((n), \
 131                                                    STM32_MDMA_CTCR_SINCOS_MASK)
 132#define STM32_MDMA_CTCR_DSIZE_MASK      GENMASK(7, 6)
 133#define STM32_MDMA_CTCR_DSIZE(n)        STM32_MDMA_SET(n, \
 134                                                     STM32_MDMA_CTCR_DSIZE_MASK)
 135#define STM32_MDMA_CTCR_SSIZE_MASK      GENMASK(5, 4)
 136#define STM32_MDMA_CTCR_SSIZE(n)        STM32_MDMA_SET(n, \
 137                                                     STM32_MDMA_CTCR_SSIZE_MASK)
 138#define STM32_MDMA_CTCR_DINC_MASK       GENMASK(3, 2)
 139#define STM32_MDMA_CTCR_DINC(n)         STM32_MDMA_SET((n), \
 140                                                      STM32_MDMA_CTCR_DINC_MASK)
 141#define STM32_MDMA_CTCR_SINC_MASK       GENMASK(1, 0)
 142#define STM32_MDMA_CTCR_SINC(n)         STM32_MDMA_SET((n), \
 143                                                      STM32_MDMA_CTCR_SINC_MASK)
 144#define STM32_MDMA_CTCR_CFG_MASK        (STM32_MDMA_CTCR_SINC_MASK \
 145                                        | STM32_MDMA_CTCR_DINC_MASK \
 146                                        | STM32_MDMA_CTCR_SINCOS_MASK \
 147                                        | STM32_MDMA_CTCR_DINCOS_MASK \
 148                                        | STM32_MDMA_CTCR_LEN2_MSK \
 149                                        | STM32_MDMA_CTCR_TRGM_MSK)
 150
 151/* MDMA Channel x block number of data register */
 152#define STM32_MDMA_CBNDTR(x)            (0x54 + 0x40 * (x))
 153#define STM32_MDMA_CBNDTR_BRC_MK        GENMASK(31, 20)
 154#define STM32_MDMA_CBNDTR_BRC(n)        STM32_MDMA_SET(n, \
 155                                                       STM32_MDMA_CBNDTR_BRC_MK)
 156#define STM32_MDMA_CBNDTR_BRC_GET(n)    STM32_MDMA_GET((n), \
 157                                                       STM32_MDMA_CBNDTR_BRC_MK)
 158
 159#define STM32_MDMA_CBNDTR_BRDUM         BIT(19)
 160#define STM32_MDMA_CBNDTR_BRSUM         BIT(18)
 161#define STM32_MDMA_CBNDTR_BNDT_MASK     GENMASK(16, 0)
 162#define STM32_MDMA_CBNDTR_BNDT(n)       STM32_MDMA_SET(n, \
 163                                                    STM32_MDMA_CBNDTR_BNDT_MASK)
 164
 165/* MDMA Channel x source address register */
 166#define STM32_MDMA_CSAR(x)              (0x58 + 0x40 * (x))
 167
 168/* MDMA Channel x destination address register */
 169#define STM32_MDMA_CDAR(x)              (0x5C + 0x40 * (x))
 170
 171/* MDMA Channel x block repeat address update register */
 172#define STM32_MDMA_CBRUR(x)             (0x60 + 0x40 * (x))
 173#define STM32_MDMA_CBRUR_DUV_MASK       GENMASK(31, 16)
 174#define STM32_MDMA_CBRUR_DUV(n)         STM32_MDMA_SET(n, \
 175                                                      STM32_MDMA_CBRUR_DUV_MASK)
 176#define STM32_MDMA_CBRUR_SUV_MASK       GENMASK(15, 0)
 177#define STM32_MDMA_CBRUR_SUV(n)         STM32_MDMA_SET(n, \
 178                                                      STM32_MDMA_CBRUR_SUV_MASK)
 179
 180/* MDMA Channel x link address register */
 181#define STM32_MDMA_CLAR(x)              (0x64 + 0x40 * (x))
 182
 183/* MDMA Channel x trigger and bus selection register */
 184#define STM32_MDMA_CTBR(x)              (0x68 + 0x40 * (x))
 185#define STM32_MDMA_CTBR_DBUS            BIT(17)
 186#define STM32_MDMA_CTBR_SBUS            BIT(16)
 187#define STM32_MDMA_CTBR_TSEL_MASK       GENMASK(7, 0)
 188#define STM32_MDMA_CTBR_TSEL(n)         STM32_MDMA_SET(n, \
 189                                                      STM32_MDMA_CTBR_TSEL_MASK)
 190
 191/* MDMA Channel x mask address register */
 192#define STM32_MDMA_CMAR(x)              (0x70 + 0x40 * (x))
 193
 194/* MDMA Channel x mask data register */
 195#define STM32_MDMA_CMDR(x)              (0x74 + 0x40 * (x))
 196
 197#define STM32_MDMA_MAX_BUF_LEN          128
 198#define STM32_MDMA_MAX_BLOCK_LEN        65536
 199#define STM32_MDMA_MAX_CHANNELS         63
 200#define STM32_MDMA_MAX_REQUESTS         256
 201#define STM32_MDMA_MAX_BURST            128
 202#define STM32_MDMA_VERY_HIGH_PRIORITY   0x11
 203
 204enum stm32_mdma_trigger_mode {
 205        STM32_MDMA_BUFFER,
 206        STM32_MDMA_BLOCK,
 207        STM32_MDMA_BLOCK_REP,
 208        STM32_MDMA_LINKED_LIST,
 209};
 210
 211enum stm32_mdma_width {
 212        STM32_MDMA_BYTE,
 213        STM32_MDMA_HALF_WORD,
 214        STM32_MDMA_WORD,
 215        STM32_MDMA_DOUBLE_WORD,
 216};
 217
 218enum stm32_mdma_inc_mode {
 219        STM32_MDMA_FIXED = 0,
 220        STM32_MDMA_INC = 2,
 221        STM32_MDMA_DEC = 3,
 222};
 223
 224struct stm32_mdma_chan_config {
 225        u32 request;
 226        u32 priority_level;
 227        u32 transfer_config;
 228        u32 mask_addr;
 229        u32 mask_data;
 230};
 231
 232struct stm32_mdma_hwdesc {
 233        u32 ctcr;
 234        u32 cbndtr;
 235        u32 csar;
 236        u32 cdar;
 237        u32 cbrur;
 238        u32 clar;
 239        u32 ctbr;
 240        u32 dummy;
 241        u32 cmar;
 242        u32 cmdr;
 243} __aligned(64);
 244
 245struct stm32_mdma_desc_node {
 246        struct stm32_mdma_hwdesc *hwdesc;
 247        dma_addr_t hwdesc_phys;
 248};
 249
 250struct stm32_mdma_desc {
 251        struct virt_dma_desc vdesc;
 252        u32 ccr;
 253        bool cyclic;
 254        u32 count;
 255        struct stm32_mdma_desc_node node[];
 256};
 257
 258struct stm32_mdma_chan {
 259        struct virt_dma_chan vchan;
 260        struct dma_pool *desc_pool;
 261        u32 id;
 262        struct stm32_mdma_desc *desc;
 263        u32 curr_hwdesc;
 264        struct dma_slave_config dma_config;
 265        struct stm32_mdma_chan_config chan_config;
 266        bool busy;
 267        u32 mem_burst;
 268        u32 mem_width;
 269};
 270
 271struct stm32_mdma_device {
 272        struct dma_device ddev;
 273        void __iomem *base;
 274        struct clk *clk;
 275        int irq;
 276        struct reset_control *rst;
 277        u32 nr_channels;
 278        u32 nr_requests;
 279        u32 nr_ahb_addr_masks;
 280        struct stm32_mdma_chan chan[STM32_MDMA_MAX_CHANNELS];
 281        u32 ahb_addr_masks[];
 282};
 283
 284static struct stm32_mdma_device *stm32_mdma_get_dev(
 285        struct stm32_mdma_chan *chan)
 286{
 287        return container_of(chan->vchan.chan.device, struct stm32_mdma_device,
 288                            ddev);
 289}
 290
 291static struct stm32_mdma_chan *to_stm32_mdma_chan(struct dma_chan *c)
 292{
 293        return container_of(c, struct stm32_mdma_chan, vchan.chan);
 294}
 295
 296static struct stm32_mdma_desc *to_stm32_mdma_desc(struct virt_dma_desc *vdesc)
 297{
 298        return container_of(vdesc, struct stm32_mdma_desc, vdesc);
 299}
 300
 301static struct device *chan2dev(struct stm32_mdma_chan *chan)
 302{
 303        return &chan->vchan.chan.dev->device;
 304}
 305
 306static struct device *mdma2dev(struct stm32_mdma_device *mdma_dev)
 307{
 308        return mdma_dev->ddev.dev;
 309}
 310
 311static u32 stm32_mdma_read(struct stm32_mdma_device *dmadev, u32 reg)
 312{
 313        return readl_relaxed(dmadev->base + reg);
 314}
 315
 316static void stm32_mdma_write(struct stm32_mdma_device *dmadev, u32 reg, u32 val)
 317{
 318        writel_relaxed(val, dmadev->base + reg);
 319}
 320
 321static void stm32_mdma_set_bits(struct stm32_mdma_device *dmadev, u32 reg,
 322                                u32 mask)
 323{
 324        void __iomem *addr = dmadev->base + reg;
 325
 326        writel_relaxed(readl_relaxed(addr) | mask, addr);
 327}
 328
 329static void stm32_mdma_clr_bits(struct stm32_mdma_device *dmadev, u32 reg,
 330                                u32 mask)
 331{
 332        void __iomem *addr = dmadev->base + reg;
 333
 334        writel_relaxed(readl_relaxed(addr) & ~mask, addr);
 335}
 336
 337static struct stm32_mdma_desc *stm32_mdma_alloc_desc(
 338                struct stm32_mdma_chan *chan, u32 count)
 339{
 340        struct stm32_mdma_desc *desc;
 341        int i;
 342
 343        desc = kzalloc(offsetof(typeof(*desc), node[count]), GFP_NOWAIT);
 344        if (!desc)
 345                return NULL;
 346
 347        for (i = 0; i < count; i++) {
 348                desc->node[i].hwdesc =
 349                        dma_pool_alloc(chan->desc_pool, GFP_NOWAIT,
 350                                       &desc->node[i].hwdesc_phys);
 351                if (!desc->node[i].hwdesc)
 352                        goto err;
 353        }
 354
 355        desc->count = count;
 356
 357        return desc;
 358
 359err:
 360        dev_err(chan2dev(chan), "Failed to allocate descriptor\n");
 361        while (--i >= 0)
 362                dma_pool_free(chan->desc_pool, desc->node[i].hwdesc,
 363                              desc->node[i].hwdesc_phys);
 364        kfree(desc);
 365        return NULL;
 366}
 367
 368static void stm32_mdma_desc_free(struct virt_dma_desc *vdesc)
 369{
 370        struct stm32_mdma_desc *desc = to_stm32_mdma_desc(vdesc);
 371        struct stm32_mdma_chan *chan = to_stm32_mdma_chan(vdesc->tx.chan);
 372        int i;
 373
 374        for (i = 0; i < desc->count; i++)
 375                dma_pool_free(chan->desc_pool, desc->node[i].hwdesc,
 376                              desc->node[i].hwdesc_phys);
 377        kfree(desc);
 378}
 379
 380static int stm32_mdma_get_width(struct stm32_mdma_chan *chan,
 381                                enum dma_slave_buswidth width)
 382{
 383        switch (width) {
 384        case DMA_SLAVE_BUSWIDTH_1_BYTE:
 385        case DMA_SLAVE_BUSWIDTH_2_BYTES:
 386        case DMA_SLAVE_BUSWIDTH_4_BYTES:
 387        case DMA_SLAVE_BUSWIDTH_8_BYTES:
 388                return ffs(width) - 1;
 389        default:
 390                dev_err(chan2dev(chan), "Dma bus width %i not supported\n",
 391                        width);
 392                return -EINVAL;
 393        }
 394}
 395
 396static enum dma_slave_buswidth stm32_mdma_get_max_width(dma_addr_t addr,
 397                                                        u32 buf_len, u32 tlen)
 398{
 399        enum dma_slave_buswidth max_width = DMA_SLAVE_BUSWIDTH_8_BYTES;
 400
 401        for (max_width = DMA_SLAVE_BUSWIDTH_8_BYTES;
 402             max_width > DMA_SLAVE_BUSWIDTH_1_BYTE;
 403             max_width >>= 1) {
 404                /*
 405                 * Address and buffer length both have to be aligned on
 406                 * bus width
 407                 */
 408                if ((((buf_len | addr) & (max_width - 1)) == 0) &&
 409                    tlen >= max_width)
 410                        break;
 411        }
 412
 413        return max_width;
 414}
 415
 416static u32 stm32_mdma_get_best_burst(u32 buf_len, u32 tlen, u32 max_burst,
 417                                     enum dma_slave_buswidth width)
 418{
 419        u32 best_burst;
 420
 421        best_burst = min((u32)1 << __ffs(tlen | buf_len),
 422                         max_burst * width) / width;
 423
 424        return (best_burst > 0) ? best_burst : 1;
 425}
 426
 427static int stm32_mdma_disable_chan(struct stm32_mdma_chan *chan)
 428{
 429        struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
 430        u32 ccr, cisr, id, reg;
 431        int ret;
 432
 433        id = chan->id;
 434        reg = STM32_MDMA_CCR(id);
 435
 436        /* Disable interrupts */
 437        stm32_mdma_clr_bits(dmadev, reg, STM32_MDMA_CCR_IRQ_MASK);
 438
 439        ccr = stm32_mdma_read(dmadev, reg);
 440        if (ccr & STM32_MDMA_CCR_EN) {
 441                stm32_mdma_clr_bits(dmadev, reg, STM32_MDMA_CCR_EN);
 442
 443                /* Ensure that any ongoing transfer has been completed */
 444                ret = readl_relaxed_poll_timeout_atomic(
 445                                dmadev->base + STM32_MDMA_CISR(id), cisr,
 446                                (cisr & STM32_MDMA_CISR_CTCIF), 10, 1000);
 447                if (ret) {
 448                        dev_err(chan2dev(chan), "%s: timeout!\n", __func__);
 449                        return -EBUSY;
 450                }
 451        }
 452
 453        return 0;
 454}
 455
 456static void stm32_mdma_stop(struct stm32_mdma_chan *chan)
 457{
 458        struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
 459        u32 status;
 460        int ret;
 461
 462        /* Disable DMA */
 463        ret = stm32_mdma_disable_chan(chan);
 464        if (ret < 0)
 465                return;
 466
 467        /* Clear interrupt status if it is there */
 468        status = stm32_mdma_read(dmadev, STM32_MDMA_CISR(chan->id));
 469        if (status) {
 470                dev_dbg(chan2dev(chan), "%s(): clearing interrupt: 0x%08x\n",
 471                        __func__, status);
 472                stm32_mdma_set_bits(dmadev, STM32_MDMA_CIFCR(chan->id), status);
 473        }
 474
 475        chan->busy = false;
 476}
 477
 478static void stm32_mdma_set_bus(struct stm32_mdma_device *dmadev, u32 *ctbr,
 479                               u32 ctbr_mask, u32 src_addr)
 480{
 481        u32 mask;
 482        int i;
 483
 484        /* Check if memory device is on AHB or AXI */
 485        *ctbr &= ~ctbr_mask;
 486        mask = src_addr & 0xF0000000;
 487        for (i = 0; i < dmadev->nr_ahb_addr_masks; i++) {
 488                if (mask == dmadev->ahb_addr_masks[i]) {
 489                        *ctbr |= ctbr_mask;
 490                        break;
 491                }
 492        }
 493}
 494
 495static int stm32_mdma_set_xfer_param(struct stm32_mdma_chan *chan,
 496                                     enum dma_transfer_direction direction,
 497                                     u32 *mdma_ccr, u32 *mdma_ctcr,
 498                                     u32 *mdma_ctbr, dma_addr_t addr,
 499                                     u32 buf_len)
 500{
 501        struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
 502        struct stm32_mdma_chan_config *chan_config = &chan->chan_config;
 503        enum dma_slave_buswidth src_addr_width, dst_addr_width;
 504        phys_addr_t src_addr, dst_addr;
 505        int src_bus_width, dst_bus_width;
 506        u32 src_maxburst, dst_maxburst, src_best_burst, dst_best_burst;
 507        u32 ccr, ctcr, ctbr, tlen;
 508
 509        src_addr_width = chan->dma_config.src_addr_width;
 510        dst_addr_width = chan->dma_config.dst_addr_width;
 511        src_maxburst = chan->dma_config.src_maxburst;
 512        dst_maxburst = chan->dma_config.dst_maxburst;
 513
 514        ccr = stm32_mdma_read(dmadev, STM32_MDMA_CCR(chan->id));
 515        ctcr = stm32_mdma_read(dmadev, STM32_MDMA_CTCR(chan->id));
 516        ctbr = stm32_mdma_read(dmadev, STM32_MDMA_CTBR(chan->id));
 517
 518        /* Enable HW request mode */
 519        ctcr &= ~STM32_MDMA_CTCR_SWRM;
 520
 521        /* Set DINC, SINC, DINCOS, SINCOS, TRGM and TLEN retrieve from DT */
 522        ctcr &= ~STM32_MDMA_CTCR_CFG_MASK;
 523        ctcr |= chan_config->transfer_config & STM32_MDMA_CTCR_CFG_MASK;
 524
 525        /*
 526         * For buffer transfer length (TLEN) we have to set
 527         * the number of bytes - 1 in CTCR register
 528         */
 529        tlen = STM32_MDMA_CTCR_LEN2_GET(ctcr);
 530        ctcr &= ~STM32_MDMA_CTCR_LEN2_MSK;
 531        ctcr |= STM32_MDMA_CTCR_TLEN((tlen - 1));
 532
 533        /* Disable Pack Enable */
 534        ctcr &= ~STM32_MDMA_CTCR_PKE;
 535
 536        /* Check burst size constraints */
 537        if (src_maxburst * src_addr_width > STM32_MDMA_MAX_BURST ||
 538            dst_maxburst * dst_addr_width > STM32_MDMA_MAX_BURST) {
 539                dev_err(chan2dev(chan),
 540                        "burst size * bus width higher than %d bytes\n",
 541                        STM32_MDMA_MAX_BURST);
 542                return -EINVAL;
 543        }
 544
 545        if ((!is_power_of_2(src_maxburst) && src_maxburst > 0) ||
 546            (!is_power_of_2(dst_maxburst) && dst_maxburst > 0)) {
 547                dev_err(chan2dev(chan), "burst size must be a power of 2\n");
 548                return -EINVAL;
 549        }
 550
 551        /*
 552         * Configure channel control:
 553         * - Clear SW request as in this case this is a HW one
 554         * - Clear WEX, HEX and BEX bits
 555         * - Set priority level
 556         */
 557        ccr &= ~(STM32_MDMA_CCR_SWRQ | STM32_MDMA_CCR_WEX | STM32_MDMA_CCR_HEX |
 558                 STM32_MDMA_CCR_BEX | STM32_MDMA_CCR_PL_MASK);
 559        ccr |= STM32_MDMA_CCR_PL(chan_config->priority_level);
 560
 561        /* Configure Trigger selection */
 562        ctbr &= ~STM32_MDMA_CTBR_TSEL_MASK;
 563        ctbr |= STM32_MDMA_CTBR_TSEL(chan_config->request);
 564
 565        switch (direction) {
 566        case DMA_MEM_TO_DEV:
 567                dst_addr = chan->dma_config.dst_addr;
 568
 569                /* Set device data size */
 570                dst_bus_width = stm32_mdma_get_width(chan, dst_addr_width);
 571                if (dst_bus_width < 0)
 572                        return dst_bus_width;
 573                ctcr &= ~STM32_MDMA_CTCR_DSIZE_MASK;
 574                ctcr |= STM32_MDMA_CTCR_DSIZE(dst_bus_width);
 575
 576                /* Set device burst value */
 577                dst_best_burst = stm32_mdma_get_best_burst(buf_len, tlen,
 578                                                           dst_maxburst,
 579                                                           dst_addr_width);
 580                chan->mem_burst = dst_best_burst;
 581                ctcr &= ~STM32_MDMA_CTCR_DBURST_MASK;
 582                ctcr |= STM32_MDMA_CTCR_DBURST((ilog2(dst_best_burst)));
 583
 584                /* Set memory data size */
 585                src_addr_width = stm32_mdma_get_max_width(addr, buf_len, tlen);
 586                chan->mem_width = src_addr_width;
 587                src_bus_width = stm32_mdma_get_width(chan, src_addr_width);
 588                if (src_bus_width < 0)
 589                        return src_bus_width;
 590                ctcr &= ~STM32_MDMA_CTCR_SSIZE_MASK |
 591                        STM32_MDMA_CTCR_SINCOS_MASK;
 592                ctcr |= STM32_MDMA_CTCR_SSIZE(src_bus_width) |
 593                        STM32_MDMA_CTCR_SINCOS(src_bus_width);
 594
 595                /* Set memory burst value */
 596                src_maxburst = STM32_MDMA_MAX_BUF_LEN / src_addr_width;
 597                src_best_burst = stm32_mdma_get_best_burst(buf_len, tlen,
 598                                                           src_maxburst,
 599                                                           src_addr_width);
 600                chan->mem_burst = src_best_burst;
 601                ctcr &= ~STM32_MDMA_CTCR_SBURST_MASK;
 602                ctcr |= STM32_MDMA_CTCR_SBURST((ilog2(src_best_burst)));
 603
 604                /* Select bus */
 605                stm32_mdma_set_bus(dmadev, &ctbr, STM32_MDMA_CTBR_DBUS,
 606                                   dst_addr);
 607
 608                if (dst_bus_width != src_bus_width)
 609                        ctcr |= STM32_MDMA_CTCR_PKE;
 610
 611                /* Set destination address */
 612                stm32_mdma_write(dmadev, STM32_MDMA_CDAR(chan->id), dst_addr);
 613                break;
 614
 615        case DMA_DEV_TO_MEM:
 616                src_addr = chan->dma_config.src_addr;
 617
 618                /* Set device data size */
 619                src_bus_width = stm32_mdma_get_width(chan, src_addr_width);
 620                if (src_bus_width < 0)
 621                        return src_bus_width;
 622                ctcr &= ~STM32_MDMA_CTCR_SSIZE_MASK;
 623                ctcr |= STM32_MDMA_CTCR_SSIZE(src_bus_width);
 624
 625                /* Set device burst value */
 626                src_best_burst = stm32_mdma_get_best_burst(buf_len, tlen,
 627                                                           src_maxburst,
 628                                                           src_addr_width);
 629                ctcr &= ~STM32_MDMA_CTCR_SBURST_MASK;
 630                ctcr |= STM32_MDMA_CTCR_SBURST((ilog2(src_best_burst)));
 631
 632                /* Set memory data size */
 633                dst_addr_width = stm32_mdma_get_max_width(addr, buf_len, tlen);
 634                chan->mem_width = dst_addr_width;
 635                dst_bus_width = stm32_mdma_get_width(chan, dst_addr_width);
 636                if (dst_bus_width < 0)
 637                        return dst_bus_width;
 638                ctcr &= ~(STM32_MDMA_CTCR_DSIZE_MASK |
 639                        STM32_MDMA_CTCR_DINCOS_MASK);
 640                ctcr |= STM32_MDMA_CTCR_DSIZE(dst_bus_width) |
 641                        STM32_MDMA_CTCR_DINCOS(dst_bus_width);
 642
 643                /* Set memory burst value */
 644                dst_maxburst = STM32_MDMA_MAX_BUF_LEN / dst_addr_width;
 645                dst_best_burst = stm32_mdma_get_best_burst(buf_len, tlen,
 646                                                           dst_maxburst,
 647                                                           dst_addr_width);
 648                ctcr &= ~STM32_MDMA_CTCR_DBURST_MASK;
 649                ctcr |= STM32_MDMA_CTCR_DBURST((ilog2(dst_best_burst)));
 650
 651                /* Select bus */
 652                stm32_mdma_set_bus(dmadev, &ctbr, STM32_MDMA_CTBR_SBUS,
 653                                   src_addr);
 654
 655                if (dst_bus_width != src_bus_width)
 656                        ctcr |= STM32_MDMA_CTCR_PKE;
 657
 658                /* Set source address */
 659                stm32_mdma_write(dmadev, STM32_MDMA_CSAR(chan->id), src_addr);
 660                break;
 661
 662        default:
 663                dev_err(chan2dev(chan), "Dma direction is not supported\n");
 664                return -EINVAL;
 665        }
 666
 667        *mdma_ccr = ccr;
 668        *mdma_ctcr = ctcr;
 669        *mdma_ctbr = ctbr;
 670
 671        return 0;
 672}
 673
 674static void stm32_mdma_dump_hwdesc(struct stm32_mdma_chan *chan,
 675                                   struct stm32_mdma_desc_node *node)
 676{
 677        dev_dbg(chan2dev(chan), "hwdesc:  %pad\n", &node->hwdesc_phys);
 678        dev_dbg(chan2dev(chan), "CTCR:    0x%08x\n", node->hwdesc->ctcr);
 679        dev_dbg(chan2dev(chan), "CBNDTR:  0x%08x\n", node->hwdesc->cbndtr);
 680        dev_dbg(chan2dev(chan), "CSAR:    0x%08x\n", node->hwdesc->csar);
 681        dev_dbg(chan2dev(chan), "CDAR:    0x%08x\n", node->hwdesc->cdar);
 682        dev_dbg(chan2dev(chan), "CBRUR:   0x%08x\n", node->hwdesc->cbrur);
 683        dev_dbg(chan2dev(chan), "CLAR:    0x%08x\n", node->hwdesc->clar);
 684        dev_dbg(chan2dev(chan), "CTBR:    0x%08x\n", node->hwdesc->ctbr);
 685        dev_dbg(chan2dev(chan), "CMAR:    0x%08x\n", node->hwdesc->cmar);
 686        dev_dbg(chan2dev(chan), "CMDR:    0x%08x\n\n", node->hwdesc->cmdr);
 687}
 688
 689static void stm32_mdma_setup_hwdesc(struct stm32_mdma_chan *chan,
 690                                    struct stm32_mdma_desc *desc,
 691                                    enum dma_transfer_direction dir, u32 count,
 692                                    dma_addr_t src_addr, dma_addr_t dst_addr,
 693                                    u32 len, u32 ctcr, u32 ctbr, bool is_last,
 694                                    bool is_first, bool is_cyclic)
 695{
 696        struct stm32_mdma_chan_config *config = &chan->chan_config;
 697        struct stm32_mdma_hwdesc *hwdesc;
 698        u32 next = count + 1;
 699
 700        hwdesc = desc->node[count].hwdesc;
 701        hwdesc->ctcr = ctcr;
 702        hwdesc->cbndtr &= ~(STM32_MDMA_CBNDTR_BRC_MK |
 703                        STM32_MDMA_CBNDTR_BRDUM |
 704                        STM32_MDMA_CBNDTR_BRSUM |
 705                        STM32_MDMA_CBNDTR_BNDT_MASK);
 706        hwdesc->cbndtr |= STM32_MDMA_CBNDTR_BNDT(len);
 707        hwdesc->csar = src_addr;
 708        hwdesc->cdar = dst_addr;
 709        hwdesc->cbrur = 0;
 710        hwdesc->ctbr = ctbr;
 711        hwdesc->cmar = config->mask_addr;
 712        hwdesc->cmdr = config->mask_data;
 713
 714        if (is_last) {
 715                if (is_cyclic)
 716                        hwdesc->clar = desc->node[0].hwdesc_phys;
 717                else
 718                        hwdesc->clar = 0;
 719        } else {
 720                hwdesc->clar = desc->node[next].hwdesc_phys;
 721        }
 722
 723        stm32_mdma_dump_hwdesc(chan, &desc->node[count]);
 724}
 725
 726static int stm32_mdma_setup_xfer(struct stm32_mdma_chan *chan,
 727                                 struct stm32_mdma_desc *desc,
 728                                 struct scatterlist *sgl, u32 sg_len,
 729                                 enum dma_transfer_direction direction)
 730{
 731        struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
 732        struct dma_slave_config *dma_config = &chan->dma_config;
 733        struct scatterlist *sg;
 734        dma_addr_t src_addr, dst_addr;
 735        u32 ccr, ctcr, ctbr;
 736        int i, ret = 0;
 737
 738        for_each_sg(sgl, sg, sg_len, i) {
 739                if (sg_dma_len(sg) > STM32_MDMA_MAX_BLOCK_LEN) {
 740                        dev_err(chan2dev(chan), "Invalid block len\n");
 741                        return -EINVAL;
 742                }
 743
 744                if (direction == DMA_MEM_TO_DEV) {
 745                        src_addr = sg_dma_address(sg);
 746                        dst_addr = dma_config->dst_addr;
 747                        ret = stm32_mdma_set_xfer_param(chan, direction, &ccr,
 748                                                        &ctcr, &ctbr, src_addr,
 749                                                        sg_dma_len(sg));
 750                        stm32_mdma_set_bus(dmadev, &ctbr, STM32_MDMA_CTBR_SBUS,
 751                                           src_addr);
 752                } else {
 753                        src_addr = dma_config->src_addr;
 754                        dst_addr = sg_dma_address(sg);
 755                        ret = stm32_mdma_set_xfer_param(chan, direction, &ccr,
 756                                                        &ctcr, &ctbr, dst_addr,
 757                                                        sg_dma_len(sg));
 758                        stm32_mdma_set_bus(dmadev, &ctbr, STM32_MDMA_CTBR_DBUS,
 759                                           dst_addr);
 760                }
 761
 762                if (ret < 0)
 763                        return ret;
 764
 765                stm32_mdma_setup_hwdesc(chan, desc, direction, i, src_addr,
 766                                        dst_addr, sg_dma_len(sg), ctcr, ctbr,
 767                                        i == sg_len - 1, i == 0, false);
 768        }
 769
 770        /* Enable interrupts */
 771        ccr &= ~STM32_MDMA_CCR_IRQ_MASK;
 772        ccr |= STM32_MDMA_CCR_TEIE | STM32_MDMA_CCR_CTCIE;
 773        if (sg_len > 1)
 774                ccr |= STM32_MDMA_CCR_BTIE;
 775        desc->ccr = ccr;
 776
 777        return 0;
 778}
 779
 780static struct dma_async_tx_descriptor *
 781stm32_mdma_prep_slave_sg(struct dma_chan *c, struct scatterlist *sgl,
 782                         u32 sg_len, enum dma_transfer_direction direction,
 783                         unsigned long flags, void *context)
 784{
 785        struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
 786        struct stm32_mdma_desc *desc;
 787        int i, ret;
 788
 789        /*
 790         * Once DMA is in setup cyclic mode the channel we cannot assign this
 791         * channel anymore. The DMA channel needs to be aborted or terminated
 792         * for allowing another request.
 793         */
 794        if (chan->desc && chan->desc->cyclic) {
 795                dev_err(chan2dev(chan),
 796                        "Request not allowed when dma in cyclic mode\n");
 797                return NULL;
 798        }
 799
 800        desc = stm32_mdma_alloc_desc(chan, sg_len);
 801        if (!desc)
 802                return NULL;
 803
 804        ret = stm32_mdma_setup_xfer(chan, desc, sgl, sg_len, direction);
 805        if (ret < 0)
 806                goto xfer_setup_err;
 807
 808        desc->cyclic = false;
 809
 810        return vchan_tx_prep(&chan->vchan, &desc->vdesc, flags);
 811
 812xfer_setup_err:
 813        for (i = 0; i < desc->count; i++)
 814                dma_pool_free(chan->desc_pool, desc->node[i].hwdesc,
 815                              desc->node[i].hwdesc_phys);
 816        kfree(desc);
 817        return NULL;
 818}
 819
 820static struct dma_async_tx_descriptor *
 821stm32_mdma_prep_dma_cyclic(struct dma_chan *c, dma_addr_t buf_addr,
 822                           size_t buf_len, size_t period_len,
 823                           enum dma_transfer_direction direction,
 824                           unsigned long flags)
 825{
 826        struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
 827        struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
 828        struct dma_slave_config *dma_config = &chan->dma_config;
 829        struct stm32_mdma_desc *desc;
 830        dma_addr_t src_addr, dst_addr;
 831        u32 ccr, ctcr, ctbr, count;
 832        int i, ret;
 833
 834        /*
 835         * Once DMA is in setup cyclic mode the channel we cannot assign this
 836         * channel anymore. The DMA channel needs to be aborted or terminated
 837         * for allowing another request.
 838         */
 839        if (chan->desc && chan->desc->cyclic) {
 840                dev_err(chan2dev(chan),
 841                        "Request not allowed when dma in cyclic mode\n");
 842                return NULL;
 843        }
 844
 845        if (!buf_len || !period_len || period_len > STM32_MDMA_MAX_BLOCK_LEN) {
 846                dev_err(chan2dev(chan), "Invalid buffer/period len\n");
 847                return NULL;
 848        }
 849
 850        if (buf_len % period_len) {
 851                dev_err(chan2dev(chan), "buf_len not multiple of period_len\n");
 852                return NULL;
 853        }
 854
 855        count = buf_len / period_len;
 856
 857        desc = stm32_mdma_alloc_desc(chan, count);
 858        if (!desc)
 859                return NULL;
 860
 861        /* Select bus */
 862        if (direction == DMA_MEM_TO_DEV) {
 863                src_addr = buf_addr;
 864                ret = stm32_mdma_set_xfer_param(chan, direction, &ccr, &ctcr,
 865                                                &ctbr, src_addr, period_len);
 866                stm32_mdma_set_bus(dmadev, &ctbr, STM32_MDMA_CTBR_SBUS,
 867                                   src_addr);
 868        } else {
 869                dst_addr = buf_addr;
 870                ret = stm32_mdma_set_xfer_param(chan, direction, &ccr, &ctcr,
 871                                                &ctbr, dst_addr, period_len);
 872                stm32_mdma_set_bus(dmadev, &ctbr, STM32_MDMA_CTBR_DBUS,
 873                                   dst_addr);
 874        }
 875
 876        if (ret < 0)
 877                goto xfer_setup_err;
 878
 879        /* Enable interrupts */
 880        ccr &= ~STM32_MDMA_CCR_IRQ_MASK;
 881        ccr |= STM32_MDMA_CCR_TEIE | STM32_MDMA_CCR_CTCIE | STM32_MDMA_CCR_BTIE;
 882        desc->ccr = ccr;
 883
 884        /* Configure hwdesc list */
 885        for (i = 0; i < count; i++) {
 886                if (direction == DMA_MEM_TO_DEV) {
 887                        src_addr = buf_addr + i * period_len;
 888                        dst_addr = dma_config->dst_addr;
 889                } else {
 890                        src_addr = dma_config->src_addr;
 891                        dst_addr = buf_addr + i * period_len;
 892                }
 893
 894                stm32_mdma_setup_hwdesc(chan, desc, direction, i, src_addr,
 895                                        dst_addr, period_len, ctcr, ctbr,
 896                                        i == count - 1, i == 0, true);
 897        }
 898
 899        desc->cyclic = true;
 900
 901        return vchan_tx_prep(&chan->vchan, &desc->vdesc, flags);
 902
 903xfer_setup_err:
 904        for (i = 0; i < desc->count; i++)
 905                dma_pool_free(chan->desc_pool, desc->node[i].hwdesc,
 906                              desc->node[i].hwdesc_phys);
 907        kfree(desc);
 908        return NULL;
 909}
 910
 911static struct dma_async_tx_descriptor *
 912stm32_mdma_prep_dma_memcpy(struct dma_chan *c, dma_addr_t dest, dma_addr_t src,
 913                           size_t len, unsigned long flags)
 914{
 915        struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
 916        struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
 917        enum dma_slave_buswidth max_width;
 918        struct stm32_mdma_desc *desc;
 919        struct stm32_mdma_hwdesc *hwdesc;
 920        u32 ccr, ctcr, ctbr, cbndtr, count, max_burst, mdma_burst;
 921        u32 best_burst, tlen;
 922        size_t xfer_count, offset;
 923        int src_bus_width, dst_bus_width;
 924        int i;
 925
 926        /*
 927         * Once DMA is in setup cyclic mode the channel we cannot assign this
 928         * channel anymore. The DMA channel needs to be aborted or terminated
 929         * to allow another request
 930         */
 931        if (chan->desc && chan->desc->cyclic) {
 932                dev_err(chan2dev(chan),
 933                        "Request not allowed when dma in cyclic mode\n");
 934                return NULL;
 935        }
 936
 937        count = DIV_ROUND_UP(len, STM32_MDMA_MAX_BLOCK_LEN);
 938        desc = stm32_mdma_alloc_desc(chan, count);
 939        if (!desc)
 940                return NULL;
 941
 942        ccr = stm32_mdma_read(dmadev, STM32_MDMA_CCR(chan->id));
 943        ctcr = stm32_mdma_read(dmadev, STM32_MDMA_CTCR(chan->id));
 944        ctbr = stm32_mdma_read(dmadev, STM32_MDMA_CTBR(chan->id));
 945        cbndtr = stm32_mdma_read(dmadev, STM32_MDMA_CBNDTR(chan->id));
 946
 947        /* Enable sw req, some interrupts and clear other bits */
 948        ccr &= ~(STM32_MDMA_CCR_WEX | STM32_MDMA_CCR_HEX |
 949                 STM32_MDMA_CCR_BEX | STM32_MDMA_CCR_PL_MASK |
 950                 STM32_MDMA_CCR_IRQ_MASK);
 951        ccr |= STM32_MDMA_CCR_TEIE;
 952
 953        /* Enable SW request mode, dest/src inc and clear other bits */
 954        ctcr &= ~(STM32_MDMA_CTCR_BWM | STM32_MDMA_CTCR_TRGM_MSK |
 955                  STM32_MDMA_CTCR_PAM_MASK | STM32_MDMA_CTCR_PKE |
 956                  STM32_MDMA_CTCR_TLEN_MSK | STM32_MDMA_CTCR_DBURST_MASK |
 957                  STM32_MDMA_CTCR_SBURST_MASK | STM32_MDMA_CTCR_DINCOS_MASK |
 958                  STM32_MDMA_CTCR_SINCOS_MASK | STM32_MDMA_CTCR_DSIZE_MASK |
 959                  STM32_MDMA_CTCR_SSIZE_MASK | STM32_MDMA_CTCR_DINC_MASK |
 960                  STM32_MDMA_CTCR_SINC_MASK);
 961        ctcr |= STM32_MDMA_CTCR_SWRM | STM32_MDMA_CTCR_SINC(STM32_MDMA_INC) |
 962                STM32_MDMA_CTCR_DINC(STM32_MDMA_INC);
 963
 964        /* Reset HW request */
 965        ctbr &= ~STM32_MDMA_CTBR_TSEL_MASK;
 966
 967        /* Select bus */
 968        stm32_mdma_set_bus(dmadev, &ctbr, STM32_MDMA_CTBR_SBUS, src);
 969        stm32_mdma_set_bus(dmadev, &ctbr, STM32_MDMA_CTBR_DBUS, dest);
 970
 971        /* Clear CBNDTR registers */
 972        cbndtr &= ~(STM32_MDMA_CBNDTR_BRC_MK | STM32_MDMA_CBNDTR_BRDUM |
 973                        STM32_MDMA_CBNDTR_BRSUM | STM32_MDMA_CBNDTR_BNDT_MASK);
 974
 975        if (len <= STM32_MDMA_MAX_BLOCK_LEN) {
 976                cbndtr |= STM32_MDMA_CBNDTR_BNDT(len);
 977                if (len <= STM32_MDMA_MAX_BUF_LEN) {
 978                        /* Setup a buffer transfer */
 979                        ccr |= STM32_MDMA_CCR_TCIE | STM32_MDMA_CCR_CTCIE;
 980                        ctcr |= STM32_MDMA_CTCR_TRGM(STM32_MDMA_BUFFER);
 981                } else {
 982                        /* Setup a block transfer */
 983                        ccr |= STM32_MDMA_CCR_BTIE | STM32_MDMA_CCR_CTCIE;
 984                        ctcr |= STM32_MDMA_CTCR_TRGM(STM32_MDMA_BLOCK);
 985                }
 986
 987                tlen = STM32_MDMA_MAX_BUF_LEN;
 988                ctcr |= STM32_MDMA_CTCR_TLEN((tlen - 1));
 989
 990                /* Set source best burst size */
 991                max_width = stm32_mdma_get_max_width(src, len, tlen);
 992                src_bus_width = stm32_mdma_get_width(chan, max_width);
 993
 994                max_burst = tlen / max_width;
 995                best_burst = stm32_mdma_get_best_burst(len, tlen, max_burst,
 996                                                       max_width);
 997                mdma_burst = ilog2(best_burst);
 998
 999                ctcr |= STM32_MDMA_CTCR_SBURST(mdma_burst) |
1000                        STM32_MDMA_CTCR_SSIZE(src_bus_width) |
1001                        STM32_MDMA_CTCR_SINCOS(src_bus_width);
1002
1003                /* Set destination best burst size */
1004                max_width = stm32_mdma_get_max_width(dest, len, tlen);
1005                dst_bus_width = stm32_mdma_get_width(chan, max_width);
1006
1007                max_burst = tlen / max_width;
1008                best_burst = stm32_mdma_get_best_burst(len, tlen, max_burst,
1009                                                       max_width);
1010                mdma_burst = ilog2(best_burst);
1011
1012                ctcr |= STM32_MDMA_CTCR_DBURST(mdma_burst) |
1013                        STM32_MDMA_CTCR_DSIZE(dst_bus_width) |
1014                        STM32_MDMA_CTCR_DINCOS(dst_bus_width);
1015
1016                if (dst_bus_width != src_bus_width)
1017                        ctcr |= STM32_MDMA_CTCR_PKE;
1018
1019                /* Prepare hardware descriptor */
1020                hwdesc = desc->node[0].hwdesc;
1021                hwdesc->ctcr = ctcr;
1022                hwdesc->cbndtr = cbndtr;
1023                hwdesc->csar = src;
1024                hwdesc->cdar = dest;
1025                hwdesc->cbrur = 0;
1026                hwdesc->clar = 0;
1027                hwdesc->ctbr = ctbr;
1028                hwdesc->cmar = 0;
1029                hwdesc->cmdr = 0;
1030
1031                stm32_mdma_dump_hwdesc(chan, &desc->node[0]);
1032        } else {
1033                /* Setup a LLI transfer */
1034                ctcr |= STM32_MDMA_CTCR_TRGM(STM32_MDMA_LINKED_LIST) |
1035                        STM32_MDMA_CTCR_TLEN((STM32_MDMA_MAX_BUF_LEN - 1));
1036                ccr |= STM32_MDMA_CCR_BTIE | STM32_MDMA_CCR_CTCIE;
1037                tlen = STM32_MDMA_MAX_BUF_LEN;
1038
1039                for (i = 0, offset = 0; offset < len;
1040                     i++, offset += xfer_count) {
1041                        xfer_count = min_t(size_t, len - offset,
1042                                           STM32_MDMA_MAX_BLOCK_LEN);
1043
1044                        /* Set source best burst size */
1045                        max_width = stm32_mdma_get_max_width(src, len, tlen);
1046                        src_bus_width = stm32_mdma_get_width(chan, max_width);
1047
1048                        max_burst = tlen / max_width;
1049                        best_burst = stm32_mdma_get_best_burst(len, tlen,
1050                                                               max_burst,
1051                                                               max_width);
1052                        mdma_burst = ilog2(best_burst);
1053
1054                        ctcr |= STM32_MDMA_CTCR_SBURST(mdma_burst) |
1055                                STM32_MDMA_CTCR_SSIZE(src_bus_width) |
1056                                STM32_MDMA_CTCR_SINCOS(src_bus_width);
1057
1058                        /* Set destination best burst size */
1059                        max_width = stm32_mdma_get_max_width(dest, len, tlen);
1060                        dst_bus_width = stm32_mdma_get_width(chan, max_width);
1061
1062                        max_burst = tlen / max_width;
1063                        best_burst = stm32_mdma_get_best_burst(len, tlen,
1064                                                               max_burst,
1065                                                               max_width);
1066                        mdma_burst = ilog2(best_burst);
1067
1068                        ctcr |= STM32_MDMA_CTCR_DBURST(mdma_burst) |
1069                                STM32_MDMA_CTCR_DSIZE(dst_bus_width) |
1070                                STM32_MDMA_CTCR_DINCOS(dst_bus_width);
1071
1072                        if (dst_bus_width != src_bus_width)
1073                                ctcr |= STM32_MDMA_CTCR_PKE;
1074
1075                        /* Prepare hardware descriptor */
1076                        stm32_mdma_setup_hwdesc(chan, desc, DMA_MEM_TO_MEM, i,
1077                                                src + offset, dest + offset,
1078                                                xfer_count, ctcr, ctbr,
1079                                                i == count - 1, i == 0, false);
1080                }
1081        }
1082
1083        desc->ccr = ccr;
1084
1085        desc->cyclic = false;
1086
1087        return vchan_tx_prep(&chan->vchan, &desc->vdesc, flags);
1088}
1089
1090static void stm32_mdma_dump_reg(struct stm32_mdma_chan *chan)
1091{
1092        struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
1093
1094        dev_dbg(chan2dev(chan), "CCR:     0x%08x\n",
1095                stm32_mdma_read(dmadev, STM32_MDMA_CCR(chan->id)));
1096        dev_dbg(chan2dev(chan), "CTCR:    0x%08x\n",
1097                stm32_mdma_read(dmadev, STM32_MDMA_CTCR(chan->id)));
1098        dev_dbg(chan2dev(chan), "CBNDTR:  0x%08x\n",
1099                stm32_mdma_read(dmadev, STM32_MDMA_CBNDTR(chan->id)));
1100        dev_dbg(chan2dev(chan), "CSAR:    0x%08x\n",
1101                stm32_mdma_read(dmadev, STM32_MDMA_CSAR(chan->id)));
1102        dev_dbg(chan2dev(chan), "CDAR:    0x%08x\n",
1103                stm32_mdma_read(dmadev, STM32_MDMA_CDAR(chan->id)));
1104        dev_dbg(chan2dev(chan), "CBRUR:   0x%08x\n",
1105                stm32_mdma_read(dmadev, STM32_MDMA_CBRUR(chan->id)));
1106        dev_dbg(chan2dev(chan), "CLAR:    0x%08x\n",
1107                stm32_mdma_read(dmadev, STM32_MDMA_CLAR(chan->id)));
1108        dev_dbg(chan2dev(chan), "CTBR:    0x%08x\n",
1109                stm32_mdma_read(dmadev, STM32_MDMA_CTBR(chan->id)));
1110        dev_dbg(chan2dev(chan), "CMAR:    0x%08x\n",
1111                stm32_mdma_read(dmadev, STM32_MDMA_CMAR(chan->id)));
1112        dev_dbg(chan2dev(chan), "CMDR:    0x%08x\n",
1113                stm32_mdma_read(dmadev, STM32_MDMA_CMDR(chan->id)));
1114}
1115
1116static void stm32_mdma_start_transfer(struct stm32_mdma_chan *chan)
1117{
1118        struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
1119        struct virt_dma_desc *vdesc;
1120        struct stm32_mdma_hwdesc *hwdesc;
1121        u32 id = chan->id;
1122        u32 status, reg;
1123
1124        vdesc = vchan_next_desc(&chan->vchan);
1125        if (!vdesc) {
1126                chan->desc = NULL;
1127                return;
1128        }
1129
1130        chan->desc = to_stm32_mdma_desc(vdesc);
1131        hwdesc = chan->desc->node[0].hwdesc;
1132        chan->curr_hwdesc = 0;
1133
1134        stm32_mdma_write(dmadev, STM32_MDMA_CCR(id), chan->desc->ccr);
1135        stm32_mdma_write(dmadev, STM32_MDMA_CTCR(id), hwdesc->ctcr);
1136        stm32_mdma_write(dmadev, STM32_MDMA_CBNDTR(id), hwdesc->cbndtr);
1137        stm32_mdma_write(dmadev, STM32_MDMA_CSAR(id), hwdesc->csar);
1138        stm32_mdma_write(dmadev, STM32_MDMA_CDAR(id), hwdesc->cdar);
1139        stm32_mdma_write(dmadev, STM32_MDMA_CBRUR(id), hwdesc->cbrur);
1140        stm32_mdma_write(dmadev, STM32_MDMA_CLAR(id), hwdesc->clar);
1141        stm32_mdma_write(dmadev, STM32_MDMA_CTBR(id), hwdesc->ctbr);
1142        stm32_mdma_write(dmadev, STM32_MDMA_CMAR(id), hwdesc->cmar);
1143        stm32_mdma_write(dmadev, STM32_MDMA_CMDR(id), hwdesc->cmdr);
1144
1145        /* Clear interrupt status if it is there */
1146        status = stm32_mdma_read(dmadev, STM32_MDMA_CISR(id));
1147        if (status)
1148                stm32_mdma_set_bits(dmadev, STM32_MDMA_CIFCR(id), status);
1149
1150        stm32_mdma_dump_reg(chan);
1151
1152        /* Start DMA */
1153        stm32_mdma_set_bits(dmadev, STM32_MDMA_CCR(id), STM32_MDMA_CCR_EN);
1154
1155        /* Set SW request in case of MEM2MEM transfer */
1156        if (hwdesc->ctcr & STM32_MDMA_CTCR_SWRM) {
1157                reg = STM32_MDMA_CCR(id);
1158                stm32_mdma_set_bits(dmadev, reg, STM32_MDMA_CCR_SWRQ);
1159        }
1160
1161        chan->busy = true;
1162
1163        dev_dbg(chan2dev(chan), "vchan %pK: started\n", &chan->vchan);
1164}
1165
1166static void stm32_mdma_issue_pending(struct dma_chan *c)
1167{
1168        struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
1169        unsigned long flags;
1170
1171        spin_lock_irqsave(&chan->vchan.lock, flags);
1172
1173        if (!vchan_issue_pending(&chan->vchan))
1174                goto end;
1175
1176        dev_dbg(chan2dev(chan), "vchan %pK: issued\n", &chan->vchan);
1177
1178        if (!chan->desc && !chan->busy)
1179                stm32_mdma_start_transfer(chan);
1180
1181end:
1182        spin_unlock_irqrestore(&chan->vchan.lock, flags);
1183}
1184
1185static int stm32_mdma_pause(struct dma_chan *c)
1186{
1187        struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
1188        unsigned long flags;
1189        int ret;
1190
1191        spin_lock_irqsave(&chan->vchan.lock, flags);
1192        ret = stm32_mdma_disable_chan(chan);
1193        spin_unlock_irqrestore(&chan->vchan.lock, flags);
1194
1195        if (!ret)
1196                dev_dbg(chan2dev(chan), "vchan %pK: pause\n", &chan->vchan);
1197
1198        return ret;
1199}
1200
1201static int stm32_mdma_resume(struct dma_chan *c)
1202{
1203        struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
1204        struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
1205        struct stm32_mdma_hwdesc *hwdesc;
1206        unsigned long flags;
1207        u32 status, reg;
1208
1209        hwdesc = chan->desc->node[chan->curr_hwdesc].hwdesc;
1210
1211        spin_lock_irqsave(&chan->vchan.lock, flags);
1212
1213        /* Re-configure control register */
1214        stm32_mdma_write(dmadev, STM32_MDMA_CCR(chan->id), chan->desc->ccr);
1215
1216        /* Clear interrupt status if it is there */
1217        status = stm32_mdma_read(dmadev, STM32_MDMA_CISR(chan->id));
1218        if (status)
1219                stm32_mdma_set_bits(dmadev, STM32_MDMA_CIFCR(chan->id), status);
1220
1221        stm32_mdma_dump_reg(chan);
1222
1223        /* Re-start DMA */
1224        reg = STM32_MDMA_CCR(chan->id);
1225        stm32_mdma_set_bits(dmadev, reg, STM32_MDMA_CCR_EN);
1226
1227        /* Set SW request in case of MEM2MEM transfer */
1228        if (hwdesc->ctcr & STM32_MDMA_CTCR_SWRM)
1229                stm32_mdma_set_bits(dmadev, reg, STM32_MDMA_CCR_SWRQ);
1230
1231        spin_unlock_irqrestore(&chan->vchan.lock, flags);
1232
1233        dev_dbg(chan2dev(chan), "vchan %pK: resume\n", &chan->vchan);
1234
1235        return 0;
1236}
1237
1238static int stm32_mdma_terminate_all(struct dma_chan *c)
1239{
1240        struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
1241        unsigned long flags;
1242        LIST_HEAD(head);
1243
1244        spin_lock_irqsave(&chan->vchan.lock, flags);
1245        if (chan->busy) {
1246                stm32_mdma_stop(chan);
1247                chan->desc = NULL;
1248        }
1249        vchan_get_all_descriptors(&chan->vchan, &head);
1250        spin_unlock_irqrestore(&chan->vchan.lock, flags);
1251
1252        vchan_dma_desc_free_list(&chan->vchan, &head);
1253
1254        return 0;
1255}
1256
1257static void stm32_mdma_synchronize(struct dma_chan *c)
1258{
1259        struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
1260
1261        vchan_synchronize(&chan->vchan);
1262}
1263
1264static int stm32_mdma_slave_config(struct dma_chan *c,
1265                                   struct dma_slave_config *config)
1266{
1267        struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
1268
1269        memcpy(&chan->dma_config, config, sizeof(*config));
1270
1271        return 0;
1272}
1273
1274static size_t stm32_mdma_desc_residue(struct stm32_mdma_chan *chan,
1275                                      struct stm32_mdma_desc *desc,
1276                                      u32 curr_hwdesc)
1277{
1278        struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
1279        struct stm32_mdma_hwdesc *hwdesc = desc->node[0].hwdesc;
1280        u32 cbndtr, residue, modulo, burst_size;
1281        int i;
1282
1283        residue = 0;
1284        for (i = curr_hwdesc + 1; i < desc->count; i++) {
1285                hwdesc = desc->node[i].hwdesc;
1286                residue += STM32_MDMA_CBNDTR_BNDT(hwdesc->cbndtr);
1287        }
1288        cbndtr = stm32_mdma_read(dmadev, STM32_MDMA_CBNDTR(chan->id));
1289        residue += cbndtr & STM32_MDMA_CBNDTR_BNDT_MASK;
1290
1291        if (!chan->mem_burst)
1292                return residue;
1293
1294        burst_size = chan->mem_burst * chan->mem_width;
1295        modulo = residue % burst_size;
1296        if (modulo)
1297                residue = residue - modulo + burst_size;
1298
1299        return residue;
1300}
1301
1302static enum dma_status stm32_mdma_tx_status(struct dma_chan *c,
1303                                            dma_cookie_t cookie,
1304                                            struct dma_tx_state *state)
1305{
1306        struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
1307        struct virt_dma_desc *vdesc;
1308        enum dma_status status;
1309        unsigned long flags;
1310        u32 residue = 0;
1311
1312        status = dma_cookie_status(c, cookie, state);
1313        if ((status == DMA_COMPLETE) || (!state))
1314                return status;
1315
1316        spin_lock_irqsave(&chan->vchan.lock, flags);
1317
1318        vdesc = vchan_find_desc(&chan->vchan, cookie);
1319        if (chan->desc && cookie == chan->desc->vdesc.tx.cookie)
1320                residue = stm32_mdma_desc_residue(chan, chan->desc,
1321                                                  chan->curr_hwdesc);
1322        else if (vdesc)
1323                residue = stm32_mdma_desc_residue(chan,
1324                                                  to_stm32_mdma_desc(vdesc), 0);
1325        dma_set_residue(state, residue);
1326
1327        spin_unlock_irqrestore(&chan->vchan.lock, flags);
1328
1329        return status;
1330}
1331
1332static void stm32_mdma_xfer_end(struct stm32_mdma_chan *chan)
1333{
1334        list_del(&chan->desc->vdesc.node);
1335        vchan_cookie_complete(&chan->desc->vdesc);
1336        chan->desc = NULL;
1337        chan->busy = false;
1338
1339        /* Start the next transfer if this driver has a next desc */
1340        stm32_mdma_start_transfer(chan);
1341}
1342
1343static irqreturn_t stm32_mdma_irq_handler(int irq, void *devid)
1344{
1345        struct stm32_mdma_device *dmadev = devid;
1346        struct stm32_mdma_chan *chan = devid;
1347        u32 reg, id, ien, status, flag;
1348
1349        /* Find out which channel generates the interrupt */
1350        status = readl_relaxed(dmadev->base + STM32_MDMA_GISR0);
1351        if (status) {
1352                id = __ffs(status);
1353        } else {
1354                status = readl_relaxed(dmadev->base + STM32_MDMA_GISR1);
1355                if (!status) {
1356                        dev_dbg(mdma2dev(dmadev), "spurious it\n");
1357                        return IRQ_NONE;
1358                }
1359                id = __ffs(status);
1360                /*
1361                 * As GISR0 provides status for channel id from 0 to 31,
1362                 * so GISR1 provides status for channel id from 32 to 62
1363                 */
1364                id += 32;
1365        }
1366
1367        chan = &dmadev->chan[id];
1368        if (!chan) {
1369                dev_dbg(mdma2dev(dmadev), "MDMA channel not initialized\n");
1370                goto exit;
1371        }
1372
1373        /* Handle interrupt for the channel */
1374        spin_lock(&chan->vchan.lock);
1375        status = stm32_mdma_read(dmadev, STM32_MDMA_CISR(chan->id));
1376        ien = stm32_mdma_read(dmadev, STM32_MDMA_CCR(chan->id));
1377        ien &= STM32_MDMA_CCR_IRQ_MASK;
1378        ien >>= 1;
1379
1380        if (!(status & ien)) {
1381                spin_unlock(&chan->vchan.lock);
1382                dev_dbg(chan2dev(chan),
1383                        "spurious it (status=0x%04x, ien=0x%04x)\n",
1384                        status, ien);
1385                return IRQ_NONE;
1386        }
1387
1388        flag = __ffs(status & ien);
1389        reg = STM32_MDMA_CIFCR(chan->id);
1390
1391        switch (1 << flag) {
1392        case STM32_MDMA_CISR_TEIF:
1393                id = chan->id;
1394                status = readl_relaxed(dmadev->base + STM32_MDMA_CESR(id));
1395                dev_err(chan2dev(chan), "Transfer Err: stat=0x%08x\n", status);
1396                stm32_mdma_set_bits(dmadev, reg, STM32_MDMA_CIFCR_CTEIF);
1397                break;
1398
1399        case STM32_MDMA_CISR_CTCIF:
1400                stm32_mdma_set_bits(dmadev, reg, STM32_MDMA_CIFCR_CCTCIF);
1401                stm32_mdma_xfer_end(chan);
1402                break;
1403
1404        case STM32_MDMA_CISR_BRTIF:
1405                stm32_mdma_set_bits(dmadev, reg, STM32_MDMA_CIFCR_CBRTIF);
1406                break;
1407
1408        case STM32_MDMA_CISR_BTIF:
1409                stm32_mdma_set_bits(dmadev, reg, STM32_MDMA_CIFCR_CBTIF);
1410                chan->curr_hwdesc++;
1411                if (chan->desc && chan->desc->cyclic) {
1412                        if (chan->curr_hwdesc == chan->desc->count)
1413                                chan->curr_hwdesc = 0;
1414                        vchan_cyclic_callback(&chan->desc->vdesc);
1415                }
1416                break;
1417
1418        case STM32_MDMA_CISR_TCIF:
1419                stm32_mdma_set_bits(dmadev, reg, STM32_MDMA_CIFCR_CLTCIF);
1420                break;
1421
1422        default:
1423                dev_err(chan2dev(chan), "it %d unhandled (status=0x%04x)\n",
1424                        1 << flag, status);
1425        }
1426
1427        spin_unlock(&chan->vchan.lock);
1428
1429exit:
1430        return IRQ_HANDLED;
1431}
1432
1433static int stm32_mdma_alloc_chan_resources(struct dma_chan *c)
1434{
1435        struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
1436        struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
1437        int ret;
1438
1439        chan->desc_pool = dmam_pool_create(dev_name(&c->dev->device),
1440                                           c->device->dev,
1441                                           sizeof(struct stm32_mdma_hwdesc),
1442                                          __alignof__(struct stm32_mdma_hwdesc),
1443                                           0);
1444        if (!chan->desc_pool) {
1445                dev_err(chan2dev(chan), "failed to allocate descriptor pool\n");
1446                return -ENOMEM;
1447        }
1448
1449        ret = pm_runtime_get_sync(dmadev->ddev.dev);
1450        if (ret < 0)
1451                return ret;
1452
1453        ret = stm32_mdma_disable_chan(chan);
1454        if (ret < 0)
1455                pm_runtime_put(dmadev->ddev.dev);
1456
1457        return ret;
1458}
1459
1460static void stm32_mdma_free_chan_resources(struct dma_chan *c)
1461{
1462        struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
1463        struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
1464        unsigned long flags;
1465
1466        dev_dbg(chan2dev(chan), "Freeing channel %d\n", chan->id);
1467
1468        if (chan->busy) {
1469                spin_lock_irqsave(&chan->vchan.lock, flags);
1470                stm32_mdma_stop(chan);
1471                chan->desc = NULL;
1472                spin_unlock_irqrestore(&chan->vchan.lock, flags);
1473        }
1474
1475        pm_runtime_put(dmadev->ddev.dev);
1476        vchan_free_chan_resources(to_virt_chan(c));
1477        dmam_pool_destroy(chan->desc_pool);
1478        chan->desc_pool = NULL;
1479}
1480
1481static struct dma_chan *stm32_mdma_of_xlate(struct of_phandle_args *dma_spec,
1482                                            struct of_dma *ofdma)
1483{
1484        struct stm32_mdma_device *dmadev = ofdma->of_dma_data;
1485        struct stm32_mdma_chan *chan;
1486        struct dma_chan *c;
1487        struct stm32_mdma_chan_config config;
1488
1489        if (dma_spec->args_count < 5) {
1490                dev_err(mdma2dev(dmadev), "Bad number of args\n");
1491                return NULL;
1492        }
1493
1494        config.request = dma_spec->args[0];
1495        config.priority_level = dma_spec->args[1];
1496        config.transfer_config = dma_spec->args[2];
1497        config.mask_addr = dma_spec->args[3];
1498        config.mask_data = dma_spec->args[4];
1499
1500        if (config.request >= dmadev->nr_requests) {
1501                dev_err(mdma2dev(dmadev), "Bad request line\n");
1502                return NULL;
1503        }
1504
1505        if (config.priority_level > STM32_MDMA_VERY_HIGH_PRIORITY) {
1506                dev_err(mdma2dev(dmadev), "Priority level not supported\n");
1507                return NULL;
1508        }
1509
1510        c = dma_get_any_slave_channel(&dmadev->ddev);
1511        if (!c) {
1512                dev_err(mdma2dev(dmadev), "No more channels available\n");
1513                return NULL;
1514        }
1515
1516        chan = to_stm32_mdma_chan(c);
1517        chan->chan_config = config;
1518
1519        return c;
1520}
1521
1522static const struct of_device_id stm32_mdma_of_match[] = {
1523        { .compatible = "st,stm32h7-mdma", },
1524        { /* sentinel */ },
1525};
1526MODULE_DEVICE_TABLE(of, stm32_mdma_of_match);
1527
1528static int stm32_mdma_probe(struct platform_device *pdev)
1529{
1530        struct stm32_mdma_chan *chan;
1531        struct stm32_mdma_device *dmadev;
1532        struct dma_device *dd;
1533        struct device_node *of_node;
1534        struct resource *res;
1535        u32 nr_channels, nr_requests;
1536        int i, count, ret;
1537
1538        of_node = pdev->dev.of_node;
1539        if (!of_node)
1540                return -ENODEV;
1541
1542        ret = device_property_read_u32(&pdev->dev, "dma-channels",
1543                                       &nr_channels);
1544        if (ret) {
1545                nr_channels = STM32_MDMA_MAX_CHANNELS;
1546                dev_warn(&pdev->dev, "MDMA defaulting on %i channels\n",
1547                         nr_channels);
1548        }
1549
1550        ret = device_property_read_u32(&pdev->dev, "dma-requests",
1551                                       &nr_requests);
1552        if (ret) {
1553                nr_requests = STM32_MDMA_MAX_REQUESTS;
1554                dev_warn(&pdev->dev, "MDMA defaulting on %i request lines\n",
1555                         nr_requests);
1556        }
1557
1558        count = device_property_count_u32(&pdev->dev, "st,ahb-addr-masks");
1559        if (count < 0)
1560                count = 0;
1561
1562        dmadev = devm_kzalloc(&pdev->dev, sizeof(*dmadev) + sizeof(u32) * count,
1563                              GFP_KERNEL);
1564        if (!dmadev)
1565                return -ENOMEM;
1566
1567        dmadev->nr_channels = nr_channels;
1568        dmadev->nr_requests = nr_requests;
1569        device_property_read_u32_array(&pdev->dev, "st,ahb-addr-masks",
1570                                       dmadev->ahb_addr_masks,
1571                                       count);
1572        dmadev->nr_ahb_addr_masks = count;
1573
1574        res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
1575        dmadev->base = devm_ioremap_resource(&pdev->dev, res);
1576        if (IS_ERR(dmadev->base))
1577                return PTR_ERR(dmadev->base);
1578
1579        dmadev->clk = devm_clk_get(&pdev->dev, NULL);
1580        if (IS_ERR(dmadev->clk)) {
1581                ret = PTR_ERR(dmadev->clk);
1582                if (ret == -EPROBE_DEFER)
1583                        dev_info(&pdev->dev, "Missing controller clock\n");
1584                return ret;
1585        }
1586
1587        ret = clk_prepare_enable(dmadev->clk);
1588        if (ret < 0) {
1589                dev_err(&pdev->dev, "clk_prep_enable error: %d\n", ret);
1590                return ret;
1591        }
1592
1593        dmadev->rst = devm_reset_control_get(&pdev->dev, NULL);
1594        if (!IS_ERR(dmadev->rst)) {
1595                reset_control_assert(dmadev->rst);
1596                udelay(2);
1597                reset_control_deassert(dmadev->rst);
1598        }
1599
1600        dd = &dmadev->ddev;
1601        dma_cap_set(DMA_SLAVE, dd->cap_mask);
1602        dma_cap_set(DMA_PRIVATE, dd->cap_mask);
1603        dma_cap_set(DMA_CYCLIC, dd->cap_mask);
1604        dma_cap_set(DMA_MEMCPY, dd->cap_mask);
1605        dd->device_alloc_chan_resources = stm32_mdma_alloc_chan_resources;
1606        dd->device_free_chan_resources = stm32_mdma_free_chan_resources;
1607        dd->device_tx_status = stm32_mdma_tx_status;
1608        dd->device_issue_pending = stm32_mdma_issue_pending;
1609        dd->device_prep_slave_sg = stm32_mdma_prep_slave_sg;
1610        dd->device_prep_dma_cyclic = stm32_mdma_prep_dma_cyclic;
1611        dd->device_prep_dma_memcpy = stm32_mdma_prep_dma_memcpy;
1612        dd->device_config = stm32_mdma_slave_config;
1613        dd->device_pause = stm32_mdma_pause;
1614        dd->device_resume = stm32_mdma_resume;
1615        dd->device_terminate_all = stm32_mdma_terminate_all;
1616        dd->device_synchronize = stm32_mdma_synchronize;
1617        dd->src_addr_widths = BIT(DMA_SLAVE_BUSWIDTH_1_BYTE) |
1618                BIT(DMA_SLAVE_BUSWIDTH_2_BYTES) |
1619                BIT(DMA_SLAVE_BUSWIDTH_4_BYTES) |
1620                BIT(DMA_SLAVE_BUSWIDTH_8_BYTES);
1621        dd->dst_addr_widths = BIT(DMA_SLAVE_BUSWIDTH_1_BYTE) |
1622                BIT(DMA_SLAVE_BUSWIDTH_2_BYTES) |
1623                BIT(DMA_SLAVE_BUSWIDTH_4_BYTES) |
1624                BIT(DMA_SLAVE_BUSWIDTH_8_BYTES);
1625        dd->directions = BIT(DMA_DEV_TO_MEM) | BIT(DMA_MEM_TO_DEV) |
1626                BIT(DMA_MEM_TO_MEM);
1627        dd->residue_granularity = DMA_RESIDUE_GRANULARITY_BURST;
1628        dd->max_burst = STM32_MDMA_MAX_BURST;
1629        dd->dev = &pdev->dev;
1630        INIT_LIST_HEAD(&dd->channels);
1631
1632        for (i = 0; i < dmadev->nr_channels; i++) {
1633                chan = &dmadev->chan[i];
1634                chan->id = i;
1635                chan->vchan.desc_free = stm32_mdma_desc_free;
1636                vchan_init(&chan->vchan, dd);
1637        }
1638
1639        dmadev->irq = platform_get_irq(pdev, 0);
1640        if (dmadev->irq < 0)
1641                return dmadev->irq;
1642
1643        ret = devm_request_irq(&pdev->dev, dmadev->irq, stm32_mdma_irq_handler,
1644                               0, dev_name(&pdev->dev), dmadev);
1645        if (ret) {
1646                dev_err(&pdev->dev, "failed to request IRQ\n");
1647                return ret;
1648        }
1649
1650        ret = dmaenginem_async_device_register(dd);
1651        if (ret)
1652                return ret;
1653
1654        ret = of_dma_controller_register(of_node, stm32_mdma_of_xlate, dmadev);
1655        if (ret < 0) {
1656                dev_err(&pdev->dev,
1657                        "STM32 MDMA DMA OF registration failed %d\n", ret);
1658                goto err_unregister;
1659        }
1660
1661        platform_set_drvdata(pdev, dmadev);
1662        pm_runtime_set_active(&pdev->dev);
1663        pm_runtime_enable(&pdev->dev);
1664        pm_runtime_get_noresume(&pdev->dev);
1665        pm_runtime_put(&pdev->dev);
1666
1667        dev_info(&pdev->dev, "STM32 MDMA driver registered\n");
1668
1669        return 0;
1670
1671err_unregister:
1672        return ret;
1673}
1674
1675#ifdef CONFIG_PM
1676static int stm32_mdma_runtime_suspend(struct device *dev)
1677{
1678        struct stm32_mdma_device *dmadev = dev_get_drvdata(dev);
1679
1680        clk_disable_unprepare(dmadev->clk);
1681
1682        return 0;
1683}
1684
1685static int stm32_mdma_runtime_resume(struct device *dev)
1686{
1687        struct stm32_mdma_device *dmadev = dev_get_drvdata(dev);
1688        int ret;
1689
1690        ret = clk_prepare_enable(dmadev->clk);
1691        if (ret) {
1692                dev_err(dev, "failed to prepare_enable clock\n");
1693                return ret;
1694        }
1695
1696        return 0;
1697}
1698#endif
1699
1700static const struct dev_pm_ops stm32_mdma_pm_ops = {
1701        SET_RUNTIME_PM_OPS(stm32_mdma_runtime_suspend,
1702                           stm32_mdma_runtime_resume, NULL)
1703};
1704
1705static struct platform_driver stm32_mdma_driver = {
1706        .probe = stm32_mdma_probe,
1707        .driver = {
1708                .name = "stm32-mdma",
1709                .of_match_table = stm32_mdma_of_match,
1710                .pm = &stm32_mdma_pm_ops,
1711        },
1712};
1713
1714static int __init stm32_mdma_init(void)
1715{
1716        return platform_driver_register(&stm32_mdma_driver);
1717}
1718
1719subsys_initcall(stm32_mdma_init);
1720
1721MODULE_DESCRIPTION("Driver for STM32 MDMA controller");
1722MODULE_AUTHOR("M'boumba Cedric Madianga <cedric.madianga@gmail.com>");
1723MODULE_AUTHOR("Pierre-Yves Mordret <pierre-yves.mordret@st.com>");
1724MODULE_LICENSE("GPL v2");
1725