linux/drivers/dma/stm32-mdma.c
<<
>>
Prefs
   1// SPDX-License-Identifier: GPL-2.0-only
   2/*
   3 *
   4 * Copyright (C) STMicroelectronics SA 2017
   5 * Author(s): M'boumba Cedric Madianga <cedric.madianga@gmail.com>
   6 *            Pierre-Yves Mordret <pierre-yves.mordret@st.com>
   7 *
   8 * Driver for STM32 MDMA controller
   9 *
  10 * Inspired by stm32-dma.c and dma-jz4780.c
  11 */
  12
  13#include <linux/clk.h>
  14#include <linux/delay.h>
  15#include <linux/dmaengine.h>
  16#include <linux/dma-mapping.h>
  17#include <linux/dmapool.h>
  18#include <linux/err.h>
  19#include <linux/init.h>
  20#include <linux/iopoll.h>
  21#include <linux/jiffies.h>
  22#include <linux/list.h>
  23#include <linux/log2.h>
  24#include <linux/module.h>
  25#include <linux/of.h>
  26#include <linux/of_device.h>
  27#include <linux/of_dma.h>
  28#include <linux/platform_device.h>
  29#include <linux/pm_runtime.h>
  30#include <linux/reset.h>
  31#include <linux/slab.h>
  32
  33#include "virt-dma.h"
  34
  35/*  MDMA Generic getter/setter */
  36#define STM32_MDMA_SHIFT(n)             (ffs(n) - 1)
  37#define STM32_MDMA_SET(n, mask)         (((n) << STM32_MDMA_SHIFT(mask)) & \
  38                                         (mask))
  39#define STM32_MDMA_GET(n, mask)         (((n) & (mask)) >> \
  40                                         STM32_MDMA_SHIFT(mask))
  41
  42#define STM32_MDMA_GISR0                0x0000 /* MDMA Int Status Reg 1 */
  43#define STM32_MDMA_GISR1                0x0004 /* MDMA Int Status Reg 2 */
  44
  45/* MDMA Channel x interrupt/status register */
  46#define STM32_MDMA_CISR(x)              (0x40 + 0x40 * (x)) /* x = 0..62 */
  47#define STM32_MDMA_CISR_CRQA            BIT(16)
  48#define STM32_MDMA_CISR_TCIF            BIT(4)
  49#define STM32_MDMA_CISR_BTIF            BIT(3)
  50#define STM32_MDMA_CISR_BRTIF           BIT(2)
  51#define STM32_MDMA_CISR_CTCIF           BIT(1)
  52#define STM32_MDMA_CISR_TEIF            BIT(0)
  53
  54/* MDMA Channel x interrupt flag clear register */
  55#define STM32_MDMA_CIFCR(x)             (0x44 + 0x40 * (x))
  56#define STM32_MDMA_CIFCR_CLTCIF         BIT(4)
  57#define STM32_MDMA_CIFCR_CBTIF          BIT(3)
  58#define STM32_MDMA_CIFCR_CBRTIF         BIT(2)
  59#define STM32_MDMA_CIFCR_CCTCIF         BIT(1)
  60#define STM32_MDMA_CIFCR_CTEIF          BIT(0)
  61#define STM32_MDMA_CIFCR_CLEAR_ALL      (STM32_MDMA_CIFCR_CLTCIF \
  62                                        | STM32_MDMA_CIFCR_CBTIF \
  63                                        | STM32_MDMA_CIFCR_CBRTIF \
  64                                        | STM32_MDMA_CIFCR_CCTCIF \
  65                                        | STM32_MDMA_CIFCR_CTEIF)
  66
  67/* MDMA Channel x error status register */
  68#define STM32_MDMA_CESR(x)              (0x48 + 0x40 * (x))
  69#define STM32_MDMA_CESR_BSE             BIT(11)
  70#define STM32_MDMA_CESR_ASR             BIT(10)
  71#define STM32_MDMA_CESR_TEMD            BIT(9)
  72#define STM32_MDMA_CESR_TELD            BIT(8)
  73#define STM32_MDMA_CESR_TED             BIT(7)
  74#define STM32_MDMA_CESR_TEA_MASK        GENMASK(6, 0)
  75
  76/* MDMA Channel x control register */
  77#define STM32_MDMA_CCR(x)               (0x4C + 0x40 * (x))
  78#define STM32_MDMA_CCR_SWRQ             BIT(16)
  79#define STM32_MDMA_CCR_WEX              BIT(14)
  80#define STM32_MDMA_CCR_HEX              BIT(13)
  81#define STM32_MDMA_CCR_BEX              BIT(12)
  82#define STM32_MDMA_CCR_PL_MASK          GENMASK(7, 6)
  83#define STM32_MDMA_CCR_PL(n)            STM32_MDMA_SET(n, \
  84                                                       STM32_MDMA_CCR_PL_MASK)
  85#define STM32_MDMA_CCR_TCIE             BIT(5)
  86#define STM32_MDMA_CCR_BTIE             BIT(4)
  87#define STM32_MDMA_CCR_BRTIE            BIT(3)
  88#define STM32_MDMA_CCR_CTCIE            BIT(2)
  89#define STM32_MDMA_CCR_TEIE             BIT(1)
  90#define STM32_MDMA_CCR_EN               BIT(0)
  91#define STM32_MDMA_CCR_IRQ_MASK         (STM32_MDMA_CCR_TCIE \
  92                                        | STM32_MDMA_CCR_BTIE \
  93                                        | STM32_MDMA_CCR_BRTIE \
  94                                        | STM32_MDMA_CCR_CTCIE \
  95                                        | STM32_MDMA_CCR_TEIE)
  96
  97/* MDMA Channel x transfer configuration register */
  98#define STM32_MDMA_CTCR(x)              (0x50 + 0x40 * (x))
  99#define STM32_MDMA_CTCR_BWM             BIT(31)
 100#define STM32_MDMA_CTCR_SWRM            BIT(30)
 101#define STM32_MDMA_CTCR_TRGM_MSK        GENMASK(29, 28)
 102#define STM32_MDMA_CTCR_TRGM(n)         STM32_MDMA_SET((n), \
 103                                                       STM32_MDMA_CTCR_TRGM_MSK)
 104#define STM32_MDMA_CTCR_TRGM_GET(n)     STM32_MDMA_GET((n), \
 105                                                       STM32_MDMA_CTCR_TRGM_MSK)
 106#define STM32_MDMA_CTCR_PAM_MASK        GENMASK(27, 26)
 107#define STM32_MDMA_CTCR_PAM(n)          STM32_MDMA_SET(n, \
 108                                                       STM32_MDMA_CTCR_PAM_MASK)
 109#define STM32_MDMA_CTCR_PKE             BIT(25)
 110#define STM32_MDMA_CTCR_TLEN_MSK        GENMASK(24, 18)
 111#define STM32_MDMA_CTCR_TLEN(n)         STM32_MDMA_SET((n), \
 112                                                       STM32_MDMA_CTCR_TLEN_MSK)
 113#define STM32_MDMA_CTCR_TLEN_GET(n)     STM32_MDMA_GET((n), \
 114                                                       STM32_MDMA_CTCR_TLEN_MSK)
 115#define STM32_MDMA_CTCR_LEN2_MSK        GENMASK(25, 18)
 116#define STM32_MDMA_CTCR_LEN2(n)         STM32_MDMA_SET((n), \
 117                                                       STM32_MDMA_CTCR_LEN2_MSK)
 118#define STM32_MDMA_CTCR_LEN2_GET(n)     STM32_MDMA_GET((n), \
 119                                                       STM32_MDMA_CTCR_LEN2_MSK)
 120#define STM32_MDMA_CTCR_DBURST_MASK     GENMASK(17, 15)
 121#define STM32_MDMA_CTCR_DBURST(n)       STM32_MDMA_SET(n, \
 122                                                    STM32_MDMA_CTCR_DBURST_MASK)
 123#define STM32_MDMA_CTCR_SBURST_MASK     GENMASK(14, 12)
 124#define STM32_MDMA_CTCR_SBURST(n)       STM32_MDMA_SET(n, \
 125                                                    STM32_MDMA_CTCR_SBURST_MASK)
 126#define STM32_MDMA_CTCR_DINCOS_MASK     GENMASK(11, 10)
 127#define STM32_MDMA_CTCR_DINCOS(n)       STM32_MDMA_SET((n), \
 128                                                    STM32_MDMA_CTCR_DINCOS_MASK)
 129#define STM32_MDMA_CTCR_SINCOS_MASK     GENMASK(9, 8)
 130#define STM32_MDMA_CTCR_SINCOS(n)       STM32_MDMA_SET((n), \
 131                                                    STM32_MDMA_CTCR_SINCOS_MASK)
 132#define STM32_MDMA_CTCR_DSIZE_MASK      GENMASK(7, 6)
 133#define STM32_MDMA_CTCR_DSIZE(n)        STM32_MDMA_SET(n, \
 134                                                     STM32_MDMA_CTCR_DSIZE_MASK)
 135#define STM32_MDMA_CTCR_SSIZE_MASK      GENMASK(5, 4)
 136#define STM32_MDMA_CTCR_SSIZE(n)        STM32_MDMA_SET(n, \
 137                                                     STM32_MDMA_CTCR_SSIZE_MASK)
 138#define STM32_MDMA_CTCR_DINC_MASK       GENMASK(3, 2)
 139#define STM32_MDMA_CTCR_DINC(n)         STM32_MDMA_SET((n), \
 140                                                      STM32_MDMA_CTCR_DINC_MASK)
 141#define STM32_MDMA_CTCR_SINC_MASK       GENMASK(1, 0)
 142#define STM32_MDMA_CTCR_SINC(n)         STM32_MDMA_SET((n), \
 143                                                      STM32_MDMA_CTCR_SINC_MASK)
 144#define STM32_MDMA_CTCR_CFG_MASK        (STM32_MDMA_CTCR_SINC_MASK \
 145                                        | STM32_MDMA_CTCR_DINC_MASK \
 146                                        | STM32_MDMA_CTCR_SINCOS_MASK \
 147                                        | STM32_MDMA_CTCR_DINCOS_MASK \
 148                                        | STM32_MDMA_CTCR_LEN2_MSK \
 149                                        | STM32_MDMA_CTCR_TRGM_MSK)
 150
 151/* MDMA Channel x block number of data register */
 152#define STM32_MDMA_CBNDTR(x)            (0x54 + 0x40 * (x))
 153#define STM32_MDMA_CBNDTR_BRC_MK        GENMASK(31, 20)
 154#define STM32_MDMA_CBNDTR_BRC(n)        STM32_MDMA_SET(n, \
 155                                                       STM32_MDMA_CBNDTR_BRC_MK)
 156#define STM32_MDMA_CBNDTR_BRC_GET(n)    STM32_MDMA_GET((n), \
 157                                                       STM32_MDMA_CBNDTR_BRC_MK)
 158
 159#define STM32_MDMA_CBNDTR_BRDUM         BIT(19)
 160#define STM32_MDMA_CBNDTR_BRSUM         BIT(18)
 161#define STM32_MDMA_CBNDTR_BNDT_MASK     GENMASK(16, 0)
 162#define STM32_MDMA_CBNDTR_BNDT(n)       STM32_MDMA_SET(n, \
 163                                                    STM32_MDMA_CBNDTR_BNDT_MASK)
 164
 165/* MDMA Channel x source address register */
 166#define STM32_MDMA_CSAR(x)              (0x58 + 0x40 * (x))
 167
 168/* MDMA Channel x destination address register */
 169#define STM32_MDMA_CDAR(x)              (0x5C + 0x40 * (x))
 170
 171/* MDMA Channel x block repeat address update register */
 172#define STM32_MDMA_CBRUR(x)             (0x60 + 0x40 * (x))
 173#define STM32_MDMA_CBRUR_DUV_MASK       GENMASK(31, 16)
 174#define STM32_MDMA_CBRUR_DUV(n)         STM32_MDMA_SET(n, \
 175                                                      STM32_MDMA_CBRUR_DUV_MASK)
 176#define STM32_MDMA_CBRUR_SUV_MASK       GENMASK(15, 0)
 177#define STM32_MDMA_CBRUR_SUV(n)         STM32_MDMA_SET(n, \
 178                                                      STM32_MDMA_CBRUR_SUV_MASK)
 179
 180/* MDMA Channel x link address register */
 181#define STM32_MDMA_CLAR(x)              (0x64 + 0x40 * (x))
 182
 183/* MDMA Channel x trigger and bus selection register */
 184#define STM32_MDMA_CTBR(x)              (0x68 + 0x40 * (x))
 185#define STM32_MDMA_CTBR_DBUS            BIT(17)
 186#define STM32_MDMA_CTBR_SBUS            BIT(16)
 187#define STM32_MDMA_CTBR_TSEL_MASK       GENMASK(7, 0)
 188#define STM32_MDMA_CTBR_TSEL(n)         STM32_MDMA_SET(n, \
 189                                                      STM32_MDMA_CTBR_TSEL_MASK)
 190
 191/* MDMA Channel x mask address register */
 192#define STM32_MDMA_CMAR(x)              (0x70 + 0x40 * (x))
 193
 194/* MDMA Channel x mask data register */
 195#define STM32_MDMA_CMDR(x)              (0x74 + 0x40 * (x))
 196
 197#define STM32_MDMA_MAX_BUF_LEN          128
 198#define STM32_MDMA_MAX_BLOCK_LEN        65536
 199#define STM32_MDMA_MAX_CHANNELS         63
 200#define STM32_MDMA_MAX_REQUESTS         256
 201#define STM32_MDMA_MAX_BURST            128
 202#define STM32_MDMA_VERY_HIGH_PRIORITY   0x11
 203
 204enum stm32_mdma_trigger_mode {
 205        STM32_MDMA_BUFFER,
 206        STM32_MDMA_BLOCK,
 207        STM32_MDMA_BLOCK_REP,
 208        STM32_MDMA_LINKED_LIST,
 209};
 210
 211enum stm32_mdma_width {
 212        STM32_MDMA_BYTE,
 213        STM32_MDMA_HALF_WORD,
 214        STM32_MDMA_WORD,
 215        STM32_MDMA_DOUBLE_WORD,
 216};
 217
 218enum stm32_mdma_inc_mode {
 219        STM32_MDMA_FIXED = 0,
 220        STM32_MDMA_INC = 2,
 221        STM32_MDMA_DEC = 3,
 222};
 223
 224struct stm32_mdma_chan_config {
 225        u32 request;
 226        u32 priority_level;
 227        u32 transfer_config;
 228        u32 mask_addr;
 229        u32 mask_data;
 230};
 231
 232struct stm32_mdma_hwdesc {
 233        u32 ctcr;
 234        u32 cbndtr;
 235        u32 csar;
 236        u32 cdar;
 237        u32 cbrur;
 238        u32 clar;
 239        u32 ctbr;
 240        u32 dummy;
 241        u32 cmar;
 242        u32 cmdr;
 243} __aligned(64);
 244
 245struct stm32_mdma_desc_node {
 246        struct stm32_mdma_hwdesc *hwdesc;
 247        dma_addr_t hwdesc_phys;
 248};
 249
 250struct stm32_mdma_desc {
 251        struct virt_dma_desc vdesc;
 252        u32 ccr;
 253        bool cyclic;
 254        u32 count;
 255        struct stm32_mdma_desc_node node[];
 256};
 257
 258struct stm32_mdma_chan {
 259        struct virt_dma_chan vchan;
 260        struct dma_pool *desc_pool;
 261        u32 id;
 262        struct stm32_mdma_desc *desc;
 263        u32 curr_hwdesc;
 264        struct dma_slave_config dma_config;
 265        struct stm32_mdma_chan_config chan_config;
 266        bool busy;
 267        u32 mem_burst;
 268        u32 mem_width;
 269};
 270
 271struct stm32_mdma_device {
 272        struct dma_device ddev;
 273        void __iomem *base;
 274        struct clk *clk;
 275        int irq;
 276        u32 nr_channels;
 277        u32 nr_requests;
 278        u32 nr_ahb_addr_masks;
 279        struct stm32_mdma_chan chan[STM32_MDMA_MAX_CHANNELS];
 280        u32 ahb_addr_masks[];
 281};
 282
 283static struct stm32_mdma_device *stm32_mdma_get_dev(
 284        struct stm32_mdma_chan *chan)
 285{
 286        return container_of(chan->vchan.chan.device, struct stm32_mdma_device,
 287                            ddev);
 288}
 289
 290static struct stm32_mdma_chan *to_stm32_mdma_chan(struct dma_chan *c)
 291{
 292        return container_of(c, struct stm32_mdma_chan, vchan.chan);
 293}
 294
 295static struct stm32_mdma_desc *to_stm32_mdma_desc(struct virt_dma_desc *vdesc)
 296{
 297        return container_of(vdesc, struct stm32_mdma_desc, vdesc);
 298}
 299
 300static struct device *chan2dev(struct stm32_mdma_chan *chan)
 301{
 302        return &chan->vchan.chan.dev->device;
 303}
 304
 305static struct device *mdma2dev(struct stm32_mdma_device *mdma_dev)
 306{
 307        return mdma_dev->ddev.dev;
 308}
 309
 310static u32 stm32_mdma_read(struct stm32_mdma_device *dmadev, u32 reg)
 311{
 312        return readl_relaxed(dmadev->base + reg);
 313}
 314
 315static void stm32_mdma_write(struct stm32_mdma_device *dmadev, u32 reg, u32 val)
 316{
 317        writel_relaxed(val, dmadev->base + reg);
 318}
 319
 320static void stm32_mdma_set_bits(struct stm32_mdma_device *dmadev, u32 reg,
 321                                u32 mask)
 322{
 323        void __iomem *addr = dmadev->base + reg;
 324
 325        writel_relaxed(readl_relaxed(addr) | mask, addr);
 326}
 327
 328static void stm32_mdma_clr_bits(struct stm32_mdma_device *dmadev, u32 reg,
 329                                u32 mask)
 330{
 331        void __iomem *addr = dmadev->base + reg;
 332
 333        writel_relaxed(readl_relaxed(addr) & ~mask, addr);
 334}
 335
 336static struct stm32_mdma_desc *stm32_mdma_alloc_desc(
 337                struct stm32_mdma_chan *chan, u32 count)
 338{
 339        struct stm32_mdma_desc *desc;
 340        int i;
 341
 342        desc = kzalloc(offsetof(typeof(*desc), node[count]), GFP_NOWAIT);
 343        if (!desc)
 344                return NULL;
 345
 346        for (i = 0; i < count; i++) {
 347                desc->node[i].hwdesc =
 348                        dma_pool_alloc(chan->desc_pool, GFP_NOWAIT,
 349                                       &desc->node[i].hwdesc_phys);
 350                if (!desc->node[i].hwdesc)
 351                        goto err;
 352        }
 353
 354        desc->count = count;
 355
 356        return desc;
 357
 358err:
 359        dev_err(chan2dev(chan), "Failed to allocate descriptor\n");
 360        while (--i >= 0)
 361                dma_pool_free(chan->desc_pool, desc->node[i].hwdesc,
 362                              desc->node[i].hwdesc_phys);
 363        kfree(desc);
 364        return NULL;
 365}
 366
 367static void stm32_mdma_desc_free(struct virt_dma_desc *vdesc)
 368{
 369        struct stm32_mdma_desc *desc = to_stm32_mdma_desc(vdesc);
 370        struct stm32_mdma_chan *chan = to_stm32_mdma_chan(vdesc->tx.chan);
 371        int i;
 372
 373        for (i = 0; i < desc->count; i++)
 374                dma_pool_free(chan->desc_pool, desc->node[i].hwdesc,
 375                              desc->node[i].hwdesc_phys);
 376        kfree(desc);
 377}
 378
 379static int stm32_mdma_get_width(struct stm32_mdma_chan *chan,
 380                                enum dma_slave_buswidth width)
 381{
 382        switch (width) {
 383        case DMA_SLAVE_BUSWIDTH_1_BYTE:
 384        case DMA_SLAVE_BUSWIDTH_2_BYTES:
 385        case DMA_SLAVE_BUSWIDTH_4_BYTES:
 386        case DMA_SLAVE_BUSWIDTH_8_BYTES:
 387                return ffs(width) - 1;
 388        default:
 389                dev_err(chan2dev(chan), "Dma bus width %i not supported\n",
 390                        width);
 391                return -EINVAL;
 392        }
 393}
 394
 395static enum dma_slave_buswidth stm32_mdma_get_max_width(dma_addr_t addr,
 396                                                        u32 buf_len, u32 tlen)
 397{
 398        enum dma_slave_buswidth max_width = DMA_SLAVE_BUSWIDTH_8_BYTES;
 399
 400        for (max_width = DMA_SLAVE_BUSWIDTH_8_BYTES;
 401             max_width > DMA_SLAVE_BUSWIDTH_1_BYTE;
 402             max_width >>= 1) {
 403                /*
 404                 * Address and buffer length both have to be aligned on
 405                 * bus width
 406                 */
 407                if ((((buf_len | addr) & (max_width - 1)) == 0) &&
 408                    tlen >= max_width)
 409                        break;
 410        }
 411
 412        return max_width;
 413}
 414
 415static u32 stm32_mdma_get_best_burst(u32 buf_len, u32 tlen, u32 max_burst,
 416                                     enum dma_slave_buswidth width)
 417{
 418        u32 best_burst;
 419
 420        best_burst = min((u32)1 << __ffs(tlen | buf_len),
 421                         max_burst * width) / width;
 422
 423        return (best_burst > 0) ? best_burst : 1;
 424}
 425
 426static int stm32_mdma_disable_chan(struct stm32_mdma_chan *chan)
 427{
 428        struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
 429        u32 ccr, cisr, id, reg;
 430        int ret;
 431
 432        id = chan->id;
 433        reg = STM32_MDMA_CCR(id);
 434
 435        /* Disable interrupts */
 436        stm32_mdma_clr_bits(dmadev, reg, STM32_MDMA_CCR_IRQ_MASK);
 437
 438        ccr = stm32_mdma_read(dmadev, reg);
 439        if (ccr & STM32_MDMA_CCR_EN) {
 440                stm32_mdma_clr_bits(dmadev, reg, STM32_MDMA_CCR_EN);
 441
 442                /* Ensure that any ongoing transfer has been completed */
 443                ret = readl_relaxed_poll_timeout_atomic(
 444                                dmadev->base + STM32_MDMA_CISR(id), cisr,
 445                                (cisr & STM32_MDMA_CISR_CTCIF), 10, 1000);
 446                if (ret) {
 447                        dev_err(chan2dev(chan), "%s: timeout!\n", __func__);
 448                        return -EBUSY;
 449                }
 450        }
 451
 452        return 0;
 453}
 454
 455static void stm32_mdma_stop(struct stm32_mdma_chan *chan)
 456{
 457        struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
 458        u32 status;
 459        int ret;
 460
 461        /* Disable DMA */
 462        ret = stm32_mdma_disable_chan(chan);
 463        if (ret < 0)
 464                return;
 465
 466        /* Clear interrupt status if it is there */
 467        status = stm32_mdma_read(dmadev, STM32_MDMA_CISR(chan->id));
 468        if (status) {
 469                dev_dbg(chan2dev(chan), "%s(): clearing interrupt: 0x%08x\n",
 470                        __func__, status);
 471                stm32_mdma_set_bits(dmadev, STM32_MDMA_CIFCR(chan->id), status);
 472        }
 473
 474        chan->busy = false;
 475}
 476
 477static void stm32_mdma_set_bus(struct stm32_mdma_device *dmadev, u32 *ctbr,
 478                               u32 ctbr_mask, u32 src_addr)
 479{
 480        u32 mask;
 481        int i;
 482
 483        /* Check if memory device is on AHB or AXI */
 484        *ctbr &= ~ctbr_mask;
 485        mask = src_addr & 0xF0000000;
 486        for (i = 0; i < dmadev->nr_ahb_addr_masks; i++) {
 487                if (mask == dmadev->ahb_addr_masks[i]) {
 488                        *ctbr |= ctbr_mask;
 489                        break;
 490                }
 491        }
 492}
 493
 494static int stm32_mdma_set_xfer_param(struct stm32_mdma_chan *chan,
 495                                     enum dma_transfer_direction direction,
 496                                     u32 *mdma_ccr, u32 *mdma_ctcr,
 497                                     u32 *mdma_ctbr, dma_addr_t addr,
 498                                     u32 buf_len)
 499{
 500        struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
 501        struct stm32_mdma_chan_config *chan_config = &chan->chan_config;
 502        enum dma_slave_buswidth src_addr_width, dst_addr_width;
 503        phys_addr_t src_addr, dst_addr;
 504        int src_bus_width, dst_bus_width;
 505        u32 src_maxburst, dst_maxburst, src_best_burst, dst_best_burst;
 506        u32 ccr, ctcr, ctbr, tlen;
 507
 508        src_addr_width = chan->dma_config.src_addr_width;
 509        dst_addr_width = chan->dma_config.dst_addr_width;
 510        src_maxburst = chan->dma_config.src_maxburst;
 511        dst_maxburst = chan->dma_config.dst_maxburst;
 512
 513        ccr = stm32_mdma_read(dmadev, STM32_MDMA_CCR(chan->id));
 514        ctcr = stm32_mdma_read(dmadev, STM32_MDMA_CTCR(chan->id));
 515        ctbr = stm32_mdma_read(dmadev, STM32_MDMA_CTBR(chan->id));
 516
 517        /* Enable HW request mode */
 518        ctcr &= ~STM32_MDMA_CTCR_SWRM;
 519
 520        /* Set DINC, SINC, DINCOS, SINCOS, TRGM and TLEN retrieve from DT */
 521        ctcr &= ~STM32_MDMA_CTCR_CFG_MASK;
 522        ctcr |= chan_config->transfer_config & STM32_MDMA_CTCR_CFG_MASK;
 523
 524        /*
 525         * For buffer transfer length (TLEN) we have to set
 526         * the number of bytes - 1 in CTCR register
 527         */
 528        tlen = STM32_MDMA_CTCR_LEN2_GET(ctcr);
 529        ctcr &= ~STM32_MDMA_CTCR_LEN2_MSK;
 530        ctcr |= STM32_MDMA_CTCR_TLEN((tlen - 1));
 531
 532        /* Disable Pack Enable */
 533        ctcr &= ~STM32_MDMA_CTCR_PKE;
 534
 535        /* Check burst size constraints */
 536        if (src_maxburst * src_addr_width > STM32_MDMA_MAX_BURST ||
 537            dst_maxburst * dst_addr_width > STM32_MDMA_MAX_BURST) {
 538                dev_err(chan2dev(chan),
 539                        "burst size * bus width higher than %d bytes\n",
 540                        STM32_MDMA_MAX_BURST);
 541                return -EINVAL;
 542        }
 543
 544        if ((!is_power_of_2(src_maxburst) && src_maxburst > 0) ||
 545            (!is_power_of_2(dst_maxburst) && dst_maxburst > 0)) {
 546                dev_err(chan2dev(chan), "burst size must be a power of 2\n");
 547                return -EINVAL;
 548        }
 549
 550        /*
 551         * Configure channel control:
 552         * - Clear SW request as in this case this is a HW one
 553         * - Clear WEX, HEX and BEX bits
 554         * - Set priority level
 555         */
 556        ccr &= ~(STM32_MDMA_CCR_SWRQ | STM32_MDMA_CCR_WEX | STM32_MDMA_CCR_HEX |
 557                 STM32_MDMA_CCR_BEX | STM32_MDMA_CCR_PL_MASK);
 558        ccr |= STM32_MDMA_CCR_PL(chan_config->priority_level);
 559
 560        /* Configure Trigger selection */
 561        ctbr &= ~STM32_MDMA_CTBR_TSEL_MASK;
 562        ctbr |= STM32_MDMA_CTBR_TSEL(chan_config->request);
 563
 564        switch (direction) {
 565        case DMA_MEM_TO_DEV:
 566                dst_addr = chan->dma_config.dst_addr;
 567
 568                /* Set device data size */
 569                dst_bus_width = stm32_mdma_get_width(chan, dst_addr_width);
 570                if (dst_bus_width < 0)
 571                        return dst_bus_width;
 572                ctcr &= ~STM32_MDMA_CTCR_DSIZE_MASK;
 573                ctcr |= STM32_MDMA_CTCR_DSIZE(dst_bus_width);
 574
 575                /* Set device burst value */
 576                dst_best_burst = stm32_mdma_get_best_burst(buf_len, tlen,
 577                                                           dst_maxburst,
 578                                                           dst_addr_width);
 579                chan->mem_burst = dst_best_burst;
 580                ctcr &= ~STM32_MDMA_CTCR_DBURST_MASK;
 581                ctcr |= STM32_MDMA_CTCR_DBURST((ilog2(dst_best_burst)));
 582
 583                /* Set memory data size */
 584                src_addr_width = stm32_mdma_get_max_width(addr, buf_len, tlen);
 585                chan->mem_width = src_addr_width;
 586                src_bus_width = stm32_mdma_get_width(chan, src_addr_width);
 587                if (src_bus_width < 0)
 588                        return src_bus_width;
 589                ctcr &= ~STM32_MDMA_CTCR_SSIZE_MASK |
 590                        STM32_MDMA_CTCR_SINCOS_MASK;
 591                ctcr |= STM32_MDMA_CTCR_SSIZE(src_bus_width) |
 592                        STM32_MDMA_CTCR_SINCOS(src_bus_width);
 593
 594                /* Set memory burst value */
 595                src_maxburst = STM32_MDMA_MAX_BUF_LEN / src_addr_width;
 596                src_best_burst = stm32_mdma_get_best_burst(buf_len, tlen,
 597                                                           src_maxburst,
 598                                                           src_addr_width);
 599                chan->mem_burst = src_best_burst;
 600                ctcr &= ~STM32_MDMA_CTCR_SBURST_MASK;
 601                ctcr |= STM32_MDMA_CTCR_SBURST((ilog2(src_best_burst)));
 602
 603                /* Select bus */
 604                stm32_mdma_set_bus(dmadev, &ctbr, STM32_MDMA_CTBR_DBUS,
 605                                   dst_addr);
 606
 607                if (dst_bus_width != src_bus_width)
 608                        ctcr |= STM32_MDMA_CTCR_PKE;
 609
 610                /* Set destination address */
 611                stm32_mdma_write(dmadev, STM32_MDMA_CDAR(chan->id), dst_addr);
 612                break;
 613
 614        case DMA_DEV_TO_MEM:
 615                src_addr = chan->dma_config.src_addr;
 616
 617                /* Set device data size */
 618                src_bus_width = stm32_mdma_get_width(chan, src_addr_width);
 619                if (src_bus_width < 0)
 620                        return src_bus_width;
 621                ctcr &= ~STM32_MDMA_CTCR_SSIZE_MASK;
 622                ctcr |= STM32_MDMA_CTCR_SSIZE(src_bus_width);
 623
 624                /* Set device burst value */
 625                src_best_burst = stm32_mdma_get_best_burst(buf_len, tlen,
 626                                                           src_maxburst,
 627                                                           src_addr_width);
 628                ctcr &= ~STM32_MDMA_CTCR_SBURST_MASK;
 629                ctcr |= STM32_MDMA_CTCR_SBURST((ilog2(src_best_burst)));
 630
 631                /* Set memory data size */
 632                dst_addr_width = stm32_mdma_get_max_width(addr, buf_len, tlen);
 633                chan->mem_width = dst_addr_width;
 634                dst_bus_width = stm32_mdma_get_width(chan, dst_addr_width);
 635                if (dst_bus_width < 0)
 636                        return dst_bus_width;
 637                ctcr &= ~(STM32_MDMA_CTCR_DSIZE_MASK |
 638                        STM32_MDMA_CTCR_DINCOS_MASK);
 639                ctcr |= STM32_MDMA_CTCR_DSIZE(dst_bus_width) |
 640                        STM32_MDMA_CTCR_DINCOS(dst_bus_width);
 641
 642                /* Set memory burst value */
 643                dst_maxburst = STM32_MDMA_MAX_BUF_LEN / dst_addr_width;
 644                dst_best_burst = stm32_mdma_get_best_burst(buf_len, tlen,
 645                                                           dst_maxburst,
 646                                                           dst_addr_width);
 647                ctcr &= ~STM32_MDMA_CTCR_DBURST_MASK;
 648                ctcr |= STM32_MDMA_CTCR_DBURST((ilog2(dst_best_burst)));
 649
 650                /* Select bus */
 651                stm32_mdma_set_bus(dmadev, &ctbr, STM32_MDMA_CTBR_SBUS,
 652                                   src_addr);
 653
 654                if (dst_bus_width != src_bus_width)
 655                        ctcr |= STM32_MDMA_CTCR_PKE;
 656
 657                /* Set source address */
 658                stm32_mdma_write(dmadev, STM32_MDMA_CSAR(chan->id), src_addr);
 659                break;
 660
 661        default:
 662                dev_err(chan2dev(chan), "Dma direction is not supported\n");
 663                return -EINVAL;
 664        }
 665
 666        *mdma_ccr = ccr;
 667        *mdma_ctcr = ctcr;
 668        *mdma_ctbr = ctbr;
 669
 670        return 0;
 671}
 672
 673static void stm32_mdma_dump_hwdesc(struct stm32_mdma_chan *chan,
 674                                   struct stm32_mdma_desc_node *node)
 675{
 676        dev_dbg(chan2dev(chan), "hwdesc:  %pad\n", &node->hwdesc_phys);
 677        dev_dbg(chan2dev(chan), "CTCR:    0x%08x\n", node->hwdesc->ctcr);
 678        dev_dbg(chan2dev(chan), "CBNDTR:  0x%08x\n", node->hwdesc->cbndtr);
 679        dev_dbg(chan2dev(chan), "CSAR:    0x%08x\n", node->hwdesc->csar);
 680        dev_dbg(chan2dev(chan), "CDAR:    0x%08x\n", node->hwdesc->cdar);
 681        dev_dbg(chan2dev(chan), "CBRUR:   0x%08x\n", node->hwdesc->cbrur);
 682        dev_dbg(chan2dev(chan), "CLAR:    0x%08x\n", node->hwdesc->clar);
 683        dev_dbg(chan2dev(chan), "CTBR:    0x%08x\n", node->hwdesc->ctbr);
 684        dev_dbg(chan2dev(chan), "CMAR:    0x%08x\n", node->hwdesc->cmar);
 685        dev_dbg(chan2dev(chan), "CMDR:    0x%08x\n\n", node->hwdesc->cmdr);
 686}
 687
 688static void stm32_mdma_setup_hwdesc(struct stm32_mdma_chan *chan,
 689                                    struct stm32_mdma_desc *desc,
 690                                    enum dma_transfer_direction dir, u32 count,
 691                                    dma_addr_t src_addr, dma_addr_t dst_addr,
 692                                    u32 len, u32 ctcr, u32 ctbr, bool is_last,
 693                                    bool is_first, bool is_cyclic)
 694{
 695        struct stm32_mdma_chan_config *config = &chan->chan_config;
 696        struct stm32_mdma_hwdesc *hwdesc;
 697        u32 next = count + 1;
 698
 699        hwdesc = desc->node[count].hwdesc;
 700        hwdesc->ctcr = ctcr;
 701        hwdesc->cbndtr &= ~(STM32_MDMA_CBNDTR_BRC_MK |
 702                        STM32_MDMA_CBNDTR_BRDUM |
 703                        STM32_MDMA_CBNDTR_BRSUM |
 704                        STM32_MDMA_CBNDTR_BNDT_MASK);
 705        hwdesc->cbndtr |= STM32_MDMA_CBNDTR_BNDT(len);
 706        hwdesc->csar = src_addr;
 707        hwdesc->cdar = dst_addr;
 708        hwdesc->cbrur = 0;
 709        hwdesc->ctbr = ctbr;
 710        hwdesc->cmar = config->mask_addr;
 711        hwdesc->cmdr = config->mask_data;
 712
 713        if (is_last) {
 714                if (is_cyclic)
 715                        hwdesc->clar = desc->node[0].hwdesc_phys;
 716                else
 717                        hwdesc->clar = 0;
 718        } else {
 719                hwdesc->clar = desc->node[next].hwdesc_phys;
 720        }
 721
 722        stm32_mdma_dump_hwdesc(chan, &desc->node[count]);
 723}
 724
 725static int stm32_mdma_setup_xfer(struct stm32_mdma_chan *chan,
 726                                 struct stm32_mdma_desc *desc,
 727                                 struct scatterlist *sgl, u32 sg_len,
 728                                 enum dma_transfer_direction direction)
 729{
 730        struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
 731        struct dma_slave_config *dma_config = &chan->dma_config;
 732        struct scatterlist *sg;
 733        dma_addr_t src_addr, dst_addr;
 734        u32 ccr, ctcr, ctbr;
 735        int i, ret = 0;
 736
 737        for_each_sg(sgl, sg, sg_len, i) {
 738                if (sg_dma_len(sg) > STM32_MDMA_MAX_BLOCK_LEN) {
 739                        dev_err(chan2dev(chan), "Invalid block len\n");
 740                        return -EINVAL;
 741                }
 742
 743                if (direction == DMA_MEM_TO_DEV) {
 744                        src_addr = sg_dma_address(sg);
 745                        dst_addr = dma_config->dst_addr;
 746                        ret = stm32_mdma_set_xfer_param(chan, direction, &ccr,
 747                                                        &ctcr, &ctbr, src_addr,
 748                                                        sg_dma_len(sg));
 749                        stm32_mdma_set_bus(dmadev, &ctbr, STM32_MDMA_CTBR_SBUS,
 750                                           src_addr);
 751                } else {
 752                        src_addr = dma_config->src_addr;
 753                        dst_addr = sg_dma_address(sg);
 754                        ret = stm32_mdma_set_xfer_param(chan, direction, &ccr,
 755                                                        &ctcr, &ctbr, dst_addr,
 756                                                        sg_dma_len(sg));
 757                        stm32_mdma_set_bus(dmadev, &ctbr, STM32_MDMA_CTBR_DBUS,
 758                                           dst_addr);
 759                }
 760
 761                if (ret < 0)
 762                        return ret;
 763
 764                stm32_mdma_setup_hwdesc(chan, desc, direction, i, src_addr,
 765                                        dst_addr, sg_dma_len(sg), ctcr, ctbr,
 766                                        i == sg_len - 1, i == 0, false);
 767        }
 768
 769        /* Enable interrupts */
 770        ccr &= ~STM32_MDMA_CCR_IRQ_MASK;
 771        ccr |= STM32_MDMA_CCR_TEIE | STM32_MDMA_CCR_CTCIE;
 772        if (sg_len > 1)
 773                ccr |= STM32_MDMA_CCR_BTIE;
 774        desc->ccr = ccr;
 775
 776        return 0;
 777}
 778
 779static struct dma_async_tx_descriptor *
 780stm32_mdma_prep_slave_sg(struct dma_chan *c, struct scatterlist *sgl,
 781                         u32 sg_len, enum dma_transfer_direction direction,
 782                         unsigned long flags, void *context)
 783{
 784        struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
 785        struct stm32_mdma_desc *desc;
 786        int i, ret;
 787
 788        /*
 789         * Once DMA is in setup cyclic mode the channel we cannot assign this
 790         * channel anymore. The DMA channel needs to be aborted or terminated
 791         * for allowing another request.
 792         */
 793        if (chan->desc && chan->desc->cyclic) {
 794                dev_err(chan2dev(chan),
 795                        "Request not allowed when dma in cyclic mode\n");
 796                return NULL;
 797        }
 798
 799        desc = stm32_mdma_alloc_desc(chan, sg_len);
 800        if (!desc)
 801                return NULL;
 802
 803        ret = stm32_mdma_setup_xfer(chan, desc, sgl, sg_len, direction);
 804        if (ret < 0)
 805                goto xfer_setup_err;
 806
 807        desc->cyclic = false;
 808
 809        return vchan_tx_prep(&chan->vchan, &desc->vdesc, flags);
 810
 811xfer_setup_err:
 812        for (i = 0; i < desc->count; i++)
 813                dma_pool_free(chan->desc_pool, desc->node[i].hwdesc,
 814                              desc->node[i].hwdesc_phys);
 815        kfree(desc);
 816        return NULL;
 817}
 818
 819static struct dma_async_tx_descriptor *
 820stm32_mdma_prep_dma_cyclic(struct dma_chan *c, dma_addr_t buf_addr,
 821                           size_t buf_len, size_t period_len,
 822                           enum dma_transfer_direction direction,
 823                           unsigned long flags)
 824{
 825        struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
 826        struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
 827        struct dma_slave_config *dma_config = &chan->dma_config;
 828        struct stm32_mdma_desc *desc;
 829        dma_addr_t src_addr, dst_addr;
 830        u32 ccr, ctcr, ctbr, count;
 831        int i, ret;
 832
 833        /*
 834         * Once DMA is in setup cyclic mode the channel we cannot assign this
 835         * channel anymore. The DMA channel needs to be aborted or terminated
 836         * for allowing another request.
 837         */
 838        if (chan->desc && chan->desc->cyclic) {
 839                dev_err(chan2dev(chan),
 840                        "Request not allowed when dma in cyclic mode\n");
 841                return NULL;
 842        }
 843
 844        if (!buf_len || !period_len || period_len > STM32_MDMA_MAX_BLOCK_LEN) {
 845                dev_err(chan2dev(chan), "Invalid buffer/period len\n");
 846                return NULL;
 847        }
 848
 849        if (buf_len % period_len) {
 850                dev_err(chan2dev(chan), "buf_len not multiple of period_len\n");
 851                return NULL;
 852        }
 853
 854        count = buf_len / period_len;
 855
 856        desc = stm32_mdma_alloc_desc(chan, count);
 857        if (!desc)
 858                return NULL;
 859
 860        /* Select bus */
 861        if (direction == DMA_MEM_TO_DEV) {
 862                src_addr = buf_addr;
 863                ret = stm32_mdma_set_xfer_param(chan, direction, &ccr, &ctcr,
 864                                                &ctbr, src_addr, period_len);
 865                stm32_mdma_set_bus(dmadev, &ctbr, STM32_MDMA_CTBR_SBUS,
 866                                   src_addr);
 867        } else {
 868                dst_addr = buf_addr;
 869                ret = stm32_mdma_set_xfer_param(chan, direction, &ccr, &ctcr,
 870                                                &ctbr, dst_addr, period_len);
 871                stm32_mdma_set_bus(dmadev, &ctbr, STM32_MDMA_CTBR_DBUS,
 872                                   dst_addr);
 873        }
 874
 875        if (ret < 0)
 876                goto xfer_setup_err;
 877
 878        /* Enable interrupts */
 879        ccr &= ~STM32_MDMA_CCR_IRQ_MASK;
 880        ccr |= STM32_MDMA_CCR_TEIE | STM32_MDMA_CCR_CTCIE | STM32_MDMA_CCR_BTIE;
 881        desc->ccr = ccr;
 882
 883        /* Configure hwdesc list */
 884        for (i = 0; i < count; i++) {
 885                if (direction == DMA_MEM_TO_DEV) {
 886                        src_addr = buf_addr + i * period_len;
 887                        dst_addr = dma_config->dst_addr;
 888                } else {
 889                        src_addr = dma_config->src_addr;
 890                        dst_addr = buf_addr + i * period_len;
 891                }
 892
 893                stm32_mdma_setup_hwdesc(chan, desc, direction, i, src_addr,
 894                                        dst_addr, period_len, ctcr, ctbr,
 895                                        i == count - 1, i == 0, true);
 896        }
 897
 898        desc->cyclic = true;
 899
 900        return vchan_tx_prep(&chan->vchan, &desc->vdesc, flags);
 901
 902xfer_setup_err:
 903        for (i = 0; i < desc->count; i++)
 904                dma_pool_free(chan->desc_pool, desc->node[i].hwdesc,
 905                              desc->node[i].hwdesc_phys);
 906        kfree(desc);
 907        return NULL;
 908}
 909
 910static struct dma_async_tx_descriptor *
 911stm32_mdma_prep_dma_memcpy(struct dma_chan *c, dma_addr_t dest, dma_addr_t src,
 912                           size_t len, unsigned long flags)
 913{
 914        struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
 915        struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
 916        enum dma_slave_buswidth max_width;
 917        struct stm32_mdma_desc *desc;
 918        struct stm32_mdma_hwdesc *hwdesc;
 919        u32 ccr, ctcr, ctbr, cbndtr, count, max_burst, mdma_burst;
 920        u32 best_burst, tlen;
 921        size_t xfer_count, offset;
 922        int src_bus_width, dst_bus_width;
 923        int i;
 924
 925        /*
 926         * Once DMA is in setup cyclic mode the channel we cannot assign this
 927         * channel anymore. The DMA channel needs to be aborted or terminated
 928         * to allow another request
 929         */
 930        if (chan->desc && chan->desc->cyclic) {
 931                dev_err(chan2dev(chan),
 932                        "Request not allowed when dma in cyclic mode\n");
 933                return NULL;
 934        }
 935
 936        count = DIV_ROUND_UP(len, STM32_MDMA_MAX_BLOCK_LEN);
 937        desc = stm32_mdma_alloc_desc(chan, count);
 938        if (!desc)
 939                return NULL;
 940
 941        ccr = stm32_mdma_read(dmadev, STM32_MDMA_CCR(chan->id));
 942        ctcr = stm32_mdma_read(dmadev, STM32_MDMA_CTCR(chan->id));
 943        ctbr = stm32_mdma_read(dmadev, STM32_MDMA_CTBR(chan->id));
 944        cbndtr = stm32_mdma_read(dmadev, STM32_MDMA_CBNDTR(chan->id));
 945
 946        /* Enable sw req, some interrupts and clear other bits */
 947        ccr &= ~(STM32_MDMA_CCR_WEX | STM32_MDMA_CCR_HEX |
 948                 STM32_MDMA_CCR_BEX | STM32_MDMA_CCR_PL_MASK |
 949                 STM32_MDMA_CCR_IRQ_MASK);
 950        ccr |= STM32_MDMA_CCR_TEIE;
 951
 952        /* Enable SW request mode, dest/src inc and clear other bits */
 953        ctcr &= ~(STM32_MDMA_CTCR_BWM | STM32_MDMA_CTCR_TRGM_MSK |
 954                  STM32_MDMA_CTCR_PAM_MASK | STM32_MDMA_CTCR_PKE |
 955                  STM32_MDMA_CTCR_TLEN_MSK | STM32_MDMA_CTCR_DBURST_MASK |
 956                  STM32_MDMA_CTCR_SBURST_MASK | STM32_MDMA_CTCR_DINCOS_MASK |
 957                  STM32_MDMA_CTCR_SINCOS_MASK | STM32_MDMA_CTCR_DSIZE_MASK |
 958                  STM32_MDMA_CTCR_SSIZE_MASK | STM32_MDMA_CTCR_DINC_MASK |
 959                  STM32_MDMA_CTCR_SINC_MASK);
 960        ctcr |= STM32_MDMA_CTCR_SWRM | STM32_MDMA_CTCR_SINC(STM32_MDMA_INC) |
 961                STM32_MDMA_CTCR_DINC(STM32_MDMA_INC);
 962
 963        /* Reset HW request */
 964        ctbr &= ~STM32_MDMA_CTBR_TSEL_MASK;
 965
 966        /* Select bus */
 967        stm32_mdma_set_bus(dmadev, &ctbr, STM32_MDMA_CTBR_SBUS, src);
 968        stm32_mdma_set_bus(dmadev, &ctbr, STM32_MDMA_CTBR_DBUS, dest);
 969
 970        /* Clear CBNDTR registers */
 971        cbndtr &= ~(STM32_MDMA_CBNDTR_BRC_MK | STM32_MDMA_CBNDTR_BRDUM |
 972                        STM32_MDMA_CBNDTR_BRSUM | STM32_MDMA_CBNDTR_BNDT_MASK);
 973
 974        if (len <= STM32_MDMA_MAX_BLOCK_LEN) {
 975                cbndtr |= STM32_MDMA_CBNDTR_BNDT(len);
 976                if (len <= STM32_MDMA_MAX_BUF_LEN) {
 977                        /* Setup a buffer transfer */
 978                        ccr |= STM32_MDMA_CCR_TCIE | STM32_MDMA_CCR_CTCIE;
 979                        ctcr |= STM32_MDMA_CTCR_TRGM(STM32_MDMA_BUFFER);
 980                } else {
 981                        /* Setup a block transfer */
 982                        ccr |= STM32_MDMA_CCR_BTIE | STM32_MDMA_CCR_CTCIE;
 983                        ctcr |= STM32_MDMA_CTCR_TRGM(STM32_MDMA_BLOCK);
 984                }
 985
 986                tlen = STM32_MDMA_MAX_BUF_LEN;
 987                ctcr |= STM32_MDMA_CTCR_TLEN((tlen - 1));
 988
 989                /* Set source best burst size */
 990                max_width = stm32_mdma_get_max_width(src, len, tlen);
 991                src_bus_width = stm32_mdma_get_width(chan, max_width);
 992
 993                max_burst = tlen / max_width;
 994                best_burst = stm32_mdma_get_best_burst(len, tlen, max_burst,
 995                                                       max_width);
 996                mdma_burst = ilog2(best_burst);
 997
 998                ctcr |= STM32_MDMA_CTCR_SBURST(mdma_burst) |
 999                        STM32_MDMA_CTCR_SSIZE(src_bus_width) |
1000                        STM32_MDMA_CTCR_SINCOS(src_bus_width);
1001
1002                /* Set destination best burst size */
1003                max_width = stm32_mdma_get_max_width(dest, len, tlen);
1004                dst_bus_width = stm32_mdma_get_width(chan, max_width);
1005
1006                max_burst = tlen / max_width;
1007                best_burst = stm32_mdma_get_best_burst(len, tlen, max_burst,
1008                                                       max_width);
1009                mdma_burst = ilog2(best_burst);
1010
1011                ctcr |= STM32_MDMA_CTCR_DBURST(mdma_burst) |
1012                        STM32_MDMA_CTCR_DSIZE(dst_bus_width) |
1013                        STM32_MDMA_CTCR_DINCOS(dst_bus_width);
1014
1015                if (dst_bus_width != src_bus_width)
1016                        ctcr |= STM32_MDMA_CTCR_PKE;
1017
1018                /* Prepare hardware descriptor */
1019                hwdesc = desc->node[0].hwdesc;
1020                hwdesc->ctcr = ctcr;
1021                hwdesc->cbndtr = cbndtr;
1022                hwdesc->csar = src;
1023                hwdesc->cdar = dest;
1024                hwdesc->cbrur = 0;
1025                hwdesc->clar = 0;
1026                hwdesc->ctbr = ctbr;
1027                hwdesc->cmar = 0;
1028                hwdesc->cmdr = 0;
1029
1030                stm32_mdma_dump_hwdesc(chan, &desc->node[0]);
1031        } else {
1032                /* Setup a LLI transfer */
1033                ctcr |= STM32_MDMA_CTCR_TRGM(STM32_MDMA_LINKED_LIST) |
1034                        STM32_MDMA_CTCR_TLEN((STM32_MDMA_MAX_BUF_LEN - 1));
1035                ccr |= STM32_MDMA_CCR_BTIE | STM32_MDMA_CCR_CTCIE;
1036                tlen = STM32_MDMA_MAX_BUF_LEN;
1037
1038                for (i = 0, offset = 0; offset < len;
1039                     i++, offset += xfer_count) {
1040                        xfer_count = min_t(size_t, len - offset,
1041                                           STM32_MDMA_MAX_BLOCK_LEN);
1042
1043                        /* Set source best burst size */
1044                        max_width = stm32_mdma_get_max_width(src, len, tlen);
1045                        src_bus_width = stm32_mdma_get_width(chan, max_width);
1046
1047                        max_burst = tlen / max_width;
1048                        best_burst = stm32_mdma_get_best_burst(len, tlen,
1049                                                               max_burst,
1050                                                               max_width);
1051                        mdma_burst = ilog2(best_burst);
1052
1053                        ctcr |= STM32_MDMA_CTCR_SBURST(mdma_burst) |
1054                                STM32_MDMA_CTCR_SSIZE(src_bus_width) |
1055                                STM32_MDMA_CTCR_SINCOS(src_bus_width);
1056
1057                        /* Set destination best burst size */
1058                        max_width = stm32_mdma_get_max_width(dest, len, tlen);
1059                        dst_bus_width = stm32_mdma_get_width(chan, max_width);
1060
1061                        max_burst = tlen / max_width;
1062                        best_burst = stm32_mdma_get_best_burst(len, tlen,
1063                                                               max_burst,
1064                                                               max_width);
1065                        mdma_burst = ilog2(best_burst);
1066
1067                        ctcr |= STM32_MDMA_CTCR_DBURST(mdma_burst) |
1068                                STM32_MDMA_CTCR_DSIZE(dst_bus_width) |
1069                                STM32_MDMA_CTCR_DINCOS(dst_bus_width);
1070
1071                        if (dst_bus_width != src_bus_width)
1072                                ctcr |= STM32_MDMA_CTCR_PKE;
1073
1074                        /* Prepare hardware descriptor */
1075                        stm32_mdma_setup_hwdesc(chan, desc, DMA_MEM_TO_MEM, i,
1076                                                src + offset, dest + offset,
1077                                                xfer_count, ctcr, ctbr,
1078                                                i == count - 1, i == 0, false);
1079                }
1080        }
1081
1082        desc->ccr = ccr;
1083
1084        desc->cyclic = false;
1085
1086        return vchan_tx_prep(&chan->vchan, &desc->vdesc, flags);
1087}
1088
1089static void stm32_mdma_dump_reg(struct stm32_mdma_chan *chan)
1090{
1091        struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
1092
1093        dev_dbg(chan2dev(chan), "CCR:     0x%08x\n",
1094                stm32_mdma_read(dmadev, STM32_MDMA_CCR(chan->id)));
1095        dev_dbg(chan2dev(chan), "CTCR:    0x%08x\n",
1096                stm32_mdma_read(dmadev, STM32_MDMA_CTCR(chan->id)));
1097        dev_dbg(chan2dev(chan), "CBNDTR:  0x%08x\n",
1098                stm32_mdma_read(dmadev, STM32_MDMA_CBNDTR(chan->id)));
1099        dev_dbg(chan2dev(chan), "CSAR:    0x%08x\n",
1100                stm32_mdma_read(dmadev, STM32_MDMA_CSAR(chan->id)));
1101        dev_dbg(chan2dev(chan), "CDAR:    0x%08x\n",
1102                stm32_mdma_read(dmadev, STM32_MDMA_CDAR(chan->id)));
1103        dev_dbg(chan2dev(chan), "CBRUR:   0x%08x\n",
1104                stm32_mdma_read(dmadev, STM32_MDMA_CBRUR(chan->id)));
1105        dev_dbg(chan2dev(chan), "CLAR:    0x%08x\n",
1106                stm32_mdma_read(dmadev, STM32_MDMA_CLAR(chan->id)));
1107        dev_dbg(chan2dev(chan), "CTBR:    0x%08x\n",
1108                stm32_mdma_read(dmadev, STM32_MDMA_CTBR(chan->id)));
1109        dev_dbg(chan2dev(chan), "CMAR:    0x%08x\n",
1110                stm32_mdma_read(dmadev, STM32_MDMA_CMAR(chan->id)));
1111        dev_dbg(chan2dev(chan), "CMDR:    0x%08x\n",
1112                stm32_mdma_read(dmadev, STM32_MDMA_CMDR(chan->id)));
1113}
1114
1115static void stm32_mdma_start_transfer(struct stm32_mdma_chan *chan)
1116{
1117        struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
1118        struct virt_dma_desc *vdesc;
1119        struct stm32_mdma_hwdesc *hwdesc;
1120        u32 id = chan->id;
1121        u32 status, reg;
1122
1123        vdesc = vchan_next_desc(&chan->vchan);
1124        if (!vdesc) {
1125                chan->desc = NULL;
1126                return;
1127        }
1128
1129        list_del(&vdesc->node);
1130
1131        chan->desc = to_stm32_mdma_desc(vdesc);
1132        hwdesc = chan->desc->node[0].hwdesc;
1133        chan->curr_hwdesc = 0;
1134
1135        stm32_mdma_write(dmadev, STM32_MDMA_CCR(id), chan->desc->ccr);
1136        stm32_mdma_write(dmadev, STM32_MDMA_CTCR(id), hwdesc->ctcr);
1137        stm32_mdma_write(dmadev, STM32_MDMA_CBNDTR(id), hwdesc->cbndtr);
1138        stm32_mdma_write(dmadev, STM32_MDMA_CSAR(id), hwdesc->csar);
1139        stm32_mdma_write(dmadev, STM32_MDMA_CDAR(id), hwdesc->cdar);
1140        stm32_mdma_write(dmadev, STM32_MDMA_CBRUR(id), hwdesc->cbrur);
1141        stm32_mdma_write(dmadev, STM32_MDMA_CLAR(id), hwdesc->clar);
1142        stm32_mdma_write(dmadev, STM32_MDMA_CTBR(id), hwdesc->ctbr);
1143        stm32_mdma_write(dmadev, STM32_MDMA_CMAR(id), hwdesc->cmar);
1144        stm32_mdma_write(dmadev, STM32_MDMA_CMDR(id), hwdesc->cmdr);
1145
1146        /* Clear interrupt status if it is there */
1147        status = stm32_mdma_read(dmadev, STM32_MDMA_CISR(id));
1148        if (status)
1149                stm32_mdma_set_bits(dmadev, STM32_MDMA_CIFCR(id), status);
1150
1151        stm32_mdma_dump_reg(chan);
1152
1153        /* Start DMA */
1154        stm32_mdma_set_bits(dmadev, STM32_MDMA_CCR(id), STM32_MDMA_CCR_EN);
1155
1156        /* Set SW request in case of MEM2MEM transfer */
1157        if (hwdesc->ctcr & STM32_MDMA_CTCR_SWRM) {
1158                reg = STM32_MDMA_CCR(id);
1159                stm32_mdma_set_bits(dmadev, reg, STM32_MDMA_CCR_SWRQ);
1160        }
1161
1162        chan->busy = true;
1163
1164        dev_dbg(chan2dev(chan), "vchan %pK: started\n", &chan->vchan);
1165}
1166
1167static void stm32_mdma_issue_pending(struct dma_chan *c)
1168{
1169        struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
1170        unsigned long flags;
1171
1172        spin_lock_irqsave(&chan->vchan.lock, flags);
1173
1174        if (!vchan_issue_pending(&chan->vchan))
1175                goto end;
1176
1177        dev_dbg(chan2dev(chan), "vchan %pK: issued\n", &chan->vchan);
1178
1179        if (!chan->desc && !chan->busy)
1180                stm32_mdma_start_transfer(chan);
1181
1182end:
1183        spin_unlock_irqrestore(&chan->vchan.lock, flags);
1184}
1185
1186static int stm32_mdma_pause(struct dma_chan *c)
1187{
1188        struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
1189        unsigned long flags;
1190        int ret;
1191
1192        spin_lock_irqsave(&chan->vchan.lock, flags);
1193        ret = stm32_mdma_disable_chan(chan);
1194        spin_unlock_irqrestore(&chan->vchan.lock, flags);
1195
1196        if (!ret)
1197                dev_dbg(chan2dev(chan), "vchan %pK: pause\n", &chan->vchan);
1198
1199        return ret;
1200}
1201
1202static int stm32_mdma_resume(struct dma_chan *c)
1203{
1204        struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
1205        struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
1206        struct stm32_mdma_hwdesc *hwdesc;
1207        unsigned long flags;
1208        u32 status, reg;
1209
1210        hwdesc = chan->desc->node[chan->curr_hwdesc].hwdesc;
1211
1212        spin_lock_irqsave(&chan->vchan.lock, flags);
1213
1214        /* Re-configure control register */
1215        stm32_mdma_write(dmadev, STM32_MDMA_CCR(chan->id), chan->desc->ccr);
1216
1217        /* Clear interrupt status if it is there */
1218        status = stm32_mdma_read(dmadev, STM32_MDMA_CISR(chan->id));
1219        if (status)
1220                stm32_mdma_set_bits(dmadev, STM32_MDMA_CIFCR(chan->id), status);
1221
1222        stm32_mdma_dump_reg(chan);
1223
1224        /* Re-start DMA */
1225        reg = STM32_MDMA_CCR(chan->id);
1226        stm32_mdma_set_bits(dmadev, reg, STM32_MDMA_CCR_EN);
1227
1228        /* Set SW request in case of MEM2MEM transfer */
1229        if (hwdesc->ctcr & STM32_MDMA_CTCR_SWRM)
1230                stm32_mdma_set_bits(dmadev, reg, STM32_MDMA_CCR_SWRQ);
1231
1232        spin_unlock_irqrestore(&chan->vchan.lock, flags);
1233
1234        dev_dbg(chan2dev(chan), "vchan %pK: resume\n", &chan->vchan);
1235
1236        return 0;
1237}
1238
1239static int stm32_mdma_terminate_all(struct dma_chan *c)
1240{
1241        struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
1242        unsigned long flags;
1243        LIST_HEAD(head);
1244
1245        spin_lock_irqsave(&chan->vchan.lock, flags);
1246        if (chan->desc) {
1247                vchan_terminate_vdesc(&chan->desc->vdesc);
1248                if (chan->busy)
1249                        stm32_mdma_stop(chan);
1250                chan->desc = NULL;
1251        }
1252        vchan_get_all_descriptors(&chan->vchan, &head);
1253        spin_unlock_irqrestore(&chan->vchan.lock, flags);
1254
1255        vchan_dma_desc_free_list(&chan->vchan, &head);
1256
1257        return 0;
1258}
1259
1260static void stm32_mdma_synchronize(struct dma_chan *c)
1261{
1262        struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
1263
1264        vchan_synchronize(&chan->vchan);
1265}
1266
1267static int stm32_mdma_slave_config(struct dma_chan *c,
1268                                   struct dma_slave_config *config)
1269{
1270        struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
1271
1272        memcpy(&chan->dma_config, config, sizeof(*config));
1273
1274        return 0;
1275}
1276
1277static size_t stm32_mdma_desc_residue(struct stm32_mdma_chan *chan,
1278                                      struct stm32_mdma_desc *desc,
1279                                      u32 curr_hwdesc)
1280{
1281        struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
1282        struct stm32_mdma_hwdesc *hwdesc = desc->node[0].hwdesc;
1283        u32 cbndtr, residue, modulo, burst_size;
1284        int i;
1285
1286        residue = 0;
1287        for (i = curr_hwdesc + 1; i < desc->count; i++) {
1288                hwdesc = desc->node[i].hwdesc;
1289                residue += STM32_MDMA_CBNDTR_BNDT(hwdesc->cbndtr);
1290        }
1291        cbndtr = stm32_mdma_read(dmadev, STM32_MDMA_CBNDTR(chan->id));
1292        residue += cbndtr & STM32_MDMA_CBNDTR_BNDT_MASK;
1293
1294        if (!chan->mem_burst)
1295                return residue;
1296
1297        burst_size = chan->mem_burst * chan->mem_width;
1298        modulo = residue % burst_size;
1299        if (modulo)
1300                residue = residue - modulo + burst_size;
1301
1302        return residue;
1303}
1304
1305static enum dma_status stm32_mdma_tx_status(struct dma_chan *c,
1306                                            dma_cookie_t cookie,
1307                                            struct dma_tx_state *state)
1308{
1309        struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
1310        struct virt_dma_desc *vdesc;
1311        enum dma_status status;
1312        unsigned long flags;
1313        u32 residue = 0;
1314
1315        status = dma_cookie_status(c, cookie, state);
1316        if ((status == DMA_COMPLETE) || (!state))
1317                return status;
1318
1319        spin_lock_irqsave(&chan->vchan.lock, flags);
1320
1321        vdesc = vchan_find_desc(&chan->vchan, cookie);
1322        if (chan->desc && cookie == chan->desc->vdesc.tx.cookie)
1323                residue = stm32_mdma_desc_residue(chan, chan->desc,
1324                                                  chan->curr_hwdesc);
1325        else if (vdesc)
1326                residue = stm32_mdma_desc_residue(chan,
1327                                                  to_stm32_mdma_desc(vdesc), 0);
1328        dma_set_residue(state, residue);
1329
1330        spin_unlock_irqrestore(&chan->vchan.lock, flags);
1331
1332        return status;
1333}
1334
1335static void stm32_mdma_xfer_end(struct stm32_mdma_chan *chan)
1336{
1337        vchan_cookie_complete(&chan->desc->vdesc);
1338        chan->desc = NULL;
1339        chan->busy = false;
1340
1341        /* Start the next transfer if this driver has a next desc */
1342        stm32_mdma_start_transfer(chan);
1343}
1344
1345static irqreturn_t stm32_mdma_irq_handler(int irq, void *devid)
1346{
1347        struct stm32_mdma_device *dmadev = devid;
1348        struct stm32_mdma_chan *chan = devid;
1349        u32 reg, id, ien, status, flag;
1350
1351        /* Find out which channel generates the interrupt */
1352        status = readl_relaxed(dmadev->base + STM32_MDMA_GISR0);
1353        if (status) {
1354                id = __ffs(status);
1355        } else {
1356                status = readl_relaxed(dmadev->base + STM32_MDMA_GISR1);
1357                if (!status) {
1358                        dev_dbg(mdma2dev(dmadev), "spurious it\n");
1359                        return IRQ_NONE;
1360                }
1361                id = __ffs(status);
1362                /*
1363                 * As GISR0 provides status for channel id from 0 to 31,
1364                 * so GISR1 provides status for channel id from 32 to 62
1365                 */
1366                id += 32;
1367        }
1368
1369        chan = &dmadev->chan[id];
1370        if (!chan) {
1371                dev_dbg(mdma2dev(dmadev), "MDMA channel not initialized\n");
1372                goto exit;
1373        }
1374
1375        /* Handle interrupt for the channel */
1376        spin_lock(&chan->vchan.lock);
1377        status = stm32_mdma_read(dmadev, STM32_MDMA_CISR(chan->id));
1378        ien = stm32_mdma_read(dmadev, STM32_MDMA_CCR(chan->id));
1379        ien &= STM32_MDMA_CCR_IRQ_MASK;
1380        ien >>= 1;
1381
1382        if (!(status & ien)) {
1383                spin_unlock(&chan->vchan.lock);
1384                dev_dbg(chan2dev(chan),
1385                        "spurious it (status=0x%04x, ien=0x%04x)\n",
1386                        status, ien);
1387                return IRQ_NONE;
1388        }
1389
1390        flag = __ffs(status & ien);
1391        reg = STM32_MDMA_CIFCR(chan->id);
1392
1393        switch (1 << flag) {
1394        case STM32_MDMA_CISR_TEIF:
1395                id = chan->id;
1396                status = readl_relaxed(dmadev->base + STM32_MDMA_CESR(id));
1397                dev_err(chan2dev(chan), "Transfer Err: stat=0x%08x\n", status);
1398                stm32_mdma_set_bits(dmadev, reg, STM32_MDMA_CIFCR_CTEIF);
1399                break;
1400
1401        case STM32_MDMA_CISR_CTCIF:
1402                stm32_mdma_set_bits(dmadev, reg, STM32_MDMA_CIFCR_CCTCIF);
1403                stm32_mdma_xfer_end(chan);
1404                break;
1405
1406        case STM32_MDMA_CISR_BRTIF:
1407                stm32_mdma_set_bits(dmadev, reg, STM32_MDMA_CIFCR_CBRTIF);
1408                break;
1409
1410        case STM32_MDMA_CISR_BTIF:
1411                stm32_mdma_set_bits(dmadev, reg, STM32_MDMA_CIFCR_CBTIF);
1412                chan->curr_hwdesc++;
1413                if (chan->desc && chan->desc->cyclic) {
1414                        if (chan->curr_hwdesc == chan->desc->count)
1415                                chan->curr_hwdesc = 0;
1416                        vchan_cyclic_callback(&chan->desc->vdesc);
1417                }
1418                break;
1419
1420        case STM32_MDMA_CISR_TCIF:
1421                stm32_mdma_set_bits(dmadev, reg, STM32_MDMA_CIFCR_CLTCIF);
1422                break;
1423
1424        default:
1425                dev_err(chan2dev(chan), "it %d unhandled (status=0x%04x)\n",
1426                        1 << flag, status);
1427        }
1428
1429        spin_unlock(&chan->vchan.lock);
1430
1431exit:
1432        return IRQ_HANDLED;
1433}
1434
1435static int stm32_mdma_alloc_chan_resources(struct dma_chan *c)
1436{
1437        struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
1438        struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
1439        int ret;
1440
1441        chan->desc_pool = dmam_pool_create(dev_name(&c->dev->device),
1442                                           c->device->dev,
1443                                           sizeof(struct stm32_mdma_hwdesc),
1444                                          __alignof__(struct stm32_mdma_hwdesc),
1445                                           0);
1446        if (!chan->desc_pool) {
1447                dev_err(chan2dev(chan), "failed to allocate descriptor pool\n");
1448                return -ENOMEM;
1449        }
1450
1451        ret = pm_runtime_get_sync(dmadev->ddev.dev);
1452        if (ret < 0)
1453                return ret;
1454
1455        ret = stm32_mdma_disable_chan(chan);
1456        if (ret < 0)
1457                pm_runtime_put(dmadev->ddev.dev);
1458
1459        return ret;
1460}
1461
1462static void stm32_mdma_free_chan_resources(struct dma_chan *c)
1463{
1464        struct stm32_mdma_chan *chan = to_stm32_mdma_chan(c);
1465        struct stm32_mdma_device *dmadev = stm32_mdma_get_dev(chan);
1466        unsigned long flags;
1467
1468        dev_dbg(chan2dev(chan), "Freeing channel %d\n", chan->id);
1469
1470        if (chan->busy) {
1471                spin_lock_irqsave(&chan->vchan.lock, flags);
1472                stm32_mdma_stop(chan);
1473                chan->desc = NULL;
1474                spin_unlock_irqrestore(&chan->vchan.lock, flags);
1475        }
1476
1477        pm_runtime_put(dmadev->ddev.dev);
1478        vchan_free_chan_resources(to_virt_chan(c));
1479        dmam_pool_destroy(chan->desc_pool);
1480        chan->desc_pool = NULL;
1481}
1482
1483static struct dma_chan *stm32_mdma_of_xlate(struct of_phandle_args *dma_spec,
1484                                            struct of_dma *ofdma)
1485{
1486        struct stm32_mdma_device *dmadev = ofdma->of_dma_data;
1487        struct stm32_mdma_chan *chan;
1488        struct dma_chan *c;
1489        struct stm32_mdma_chan_config config;
1490
1491        if (dma_spec->args_count < 5) {
1492                dev_err(mdma2dev(dmadev), "Bad number of args\n");
1493                return NULL;
1494        }
1495
1496        config.request = dma_spec->args[0];
1497        config.priority_level = dma_spec->args[1];
1498        config.transfer_config = dma_spec->args[2];
1499        config.mask_addr = dma_spec->args[3];
1500        config.mask_data = dma_spec->args[4];
1501
1502        if (config.request >= dmadev->nr_requests) {
1503                dev_err(mdma2dev(dmadev), "Bad request line\n");
1504                return NULL;
1505        }
1506
1507        if (config.priority_level > STM32_MDMA_VERY_HIGH_PRIORITY) {
1508                dev_err(mdma2dev(dmadev), "Priority level not supported\n");
1509                return NULL;
1510        }
1511
1512        c = dma_get_any_slave_channel(&dmadev->ddev);
1513        if (!c) {
1514                dev_err(mdma2dev(dmadev), "No more channels available\n");
1515                return NULL;
1516        }
1517
1518        chan = to_stm32_mdma_chan(c);
1519        chan->chan_config = config;
1520
1521        return c;
1522}
1523
1524static const struct of_device_id stm32_mdma_of_match[] = {
1525        { .compatible = "st,stm32h7-mdma", },
1526        { /* sentinel */ },
1527};
1528MODULE_DEVICE_TABLE(of, stm32_mdma_of_match);
1529
1530static int stm32_mdma_probe(struct platform_device *pdev)
1531{
1532        struct stm32_mdma_chan *chan;
1533        struct stm32_mdma_device *dmadev;
1534        struct dma_device *dd;
1535        struct device_node *of_node;
1536        struct resource *res;
1537        struct reset_control *rst;
1538        u32 nr_channels, nr_requests;
1539        int i, count, ret;
1540
1541        of_node = pdev->dev.of_node;
1542        if (!of_node)
1543                return -ENODEV;
1544
1545        ret = device_property_read_u32(&pdev->dev, "dma-channels",
1546                                       &nr_channels);
1547        if (ret) {
1548                nr_channels = STM32_MDMA_MAX_CHANNELS;
1549                dev_warn(&pdev->dev, "MDMA defaulting on %i channels\n",
1550                         nr_channels);
1551        }
1552
1553        ret = device_property_read_u32(&pdev->dev, "dma-requests",
1554                                       &nr_requests);
1555        if (ret) {
1556                nr_requests = STM32_MDMA_MAX_REQUESTS;
1557                dev_warn(&pdev->dev, "MDMA defaulting on %i request lines\n",
1558                         nr_requests);
1559        }
1560
1561        count = device_property_count_u32(&pdev->dev, "st,ahb-addr-masks");
1562        if (count < 0)
1563                count = 0;
1564
1565        dmadev = devm_kzalloc(&pdev->dev, sizeof(*dmadev) + sizeof(u32) * count,
1566                              GFP_KERNEL);
1567        if (!dmadev)
1568                return -ENOMEM;
1569
1570        dmadev->nr_channels = nr_channels;
1571        dmadev->nr_requests = nr_requests;
1572        device_property_read_u32_array(&pdev->dev, "st,ahb-addr-masks",
1573                                       dmadev->ahb_addr_masks,
1574                                       count);
1575        dmadev->nr_ahb_addr_masks = count;
1576
1577        res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
1578        dmadev->base = devm_ioremap_resource(&pdev->dev, res);
1579        if (IS_ERR(dmadev->base))
1580                return PTR_ERR(dmadev->base);
1581
1582        dmadev->clk = devm_clk_get(&pdev->dev, NULL);
1583        if (IS_ERR(dmadev->clk)) {
1584                ret = PTR_ERR(dmadev->clk);
1585                if (ret != -EPROBE_DEFER)
1586                        dev_err(&pdev->dev, "Missing clock controller\n");
1587                return ret;
1588        }
1589
1590        ret = clk_prepare_enable(dmadev->clk);
1591        if (ret < 0) {
1592                dev_err(&pdev->dev, "clk_prep_enable error: %d\n", ret);
1593                return ret;
1594        }
1595
1596        rst = devm_reset_control_get(&pdev->dev, NULL);
1597        if (IS_ERR(rst)) {
1598                ret = PTR_ERR(rst);
1599                if (ret == -EPROBE_DEFER)
1600                        goto err_clk;
1601        } else {
1602                reset_control_assert(rst);
1603                udelay(2);
1604                reset_control_deassert(rst);
1605        }
1606
1607        dd = &dmadev->ddev;
1608        dma_cap_set(DMA_SLAVE, dd->cap_mask);
1609        dma_cap_set(DMA_PRIVATE, dd->cap_mask);
1610        dma_cap_set(DMA_CYCLIC, dd->cap_mask);
1611        dma_cap_set(DMA_MEMCPY, dd->cap_mask);
1612        dd->device_alloc_chan_resources = stm32_mdma_alloc_chan_resources;
1613        dd->device_free_chan_resources = stm32_mdma_free_chan_resources;
1614        dd->device_tx_status = stm32_mdma_tx_status;
1615        dd->device_issue_pending = stm32_mdma_issue_pending;
1616        dd->device_prep_slave_sg = stm32_mdma_prep_slave_sg;
1617        dd->device_prep_dma_cyclic = stm32_mdma_prep_dma_cyclic;
1618        dd->device_prep_dma_memcpy = stm32_mdma_prep_dma_memcpy;
1619        dd->device_config = stm32_mdma_slave_config;
1620        dd->device_pause = stm32_mdma_pause;
1621        dd->device_resume = stm32_mdma_resume;
1622        dd->device_terminate_all = stm32_mdma_terminate_all;
1623        dd->device_synchronize = stm32_mdma_synchronize;
1624        dd->descriptor_reuse = true;
1625
1626        dd->src_addr_widths = BIT(DMA_SLAVE_BUSWIDTH_1_BYTE) |
1627                BIT(DMA_SLAVE_BUSWIDTH_2_BYTES) |
1628                BIT(DMA_SLAVE_BUSWIDTH_4_BYTES) |
1629                BIT(DMA_SLAVE_BUSWIDTH_8_BYTES);
1630        dd->dst_addr_widths = BIT(DMA_SLAVE_BUSWIDTH_1_BYTE) |
1631                BIT(DMA_SLAVE_BUSWIDTH_2_BYTES) |
1632                BIT(DMA_SLAVE_BUSWIDTH_4_BYTES) |
1633                BIT(DMA_SLAVE_BUSWIDTH_8_BYTES);
1634        dd->directions = BIT(DMA_DEV_TO_MEM) | BIT(DMA_MEM_TO_DEV) |
1635                BIT(DMA_MEM_TO_MEM);
1636        dd->residue_granularity = DMA_RESIDUE_GRANULARITY_BURST;
1637        dd->max_burst = STM32_MDMA_MAX_BURST;
1638        dd->dev = &pdev->dev;
1639        INIT_LIST_HEAD(&dd->channels);
1640
1641        for (i = 0; i < dmadev->nr_channels; i++) {
1642                chan = &dmadev->chan[i];
1643                chan->id = i;
1644                chan->vchan.desc_free = stm32_mdma_desc_free;
1645                vchan_init(&chan->vchan, dd);
1646        }
1647
1648        dmadev->irq = platform_get_irq(pdev, 0);
1649        if (dmadev->irq < 0) {
1650                ret = dmadev->irq;
1651                goto err_clk;
1652        }
1653
1654        ret = devm_request_irq(&pdev->dev, dmadev->irq, stm32_mdma_irq_handler,
1655                               0, dev_name(&pdev->dev), dmadev);
1656        if (ret) {
1657                dev_err(&pdev->dev, "failed to request IRQ\n");
1658                goto err_clk;
1659        }
1660
1661        ret = dmaenginem_async_device_register(dd);
1662        if (ret)
1663                goto err_clk;
1664
1665        ret = of_dma_controller_register(of_node, stm32_mdma_of_xlate, dmadev);
1666        if (ret < 0) {
1667                dev_err(&pdev->dev,
1668                        "STM32 MDMA DMA OF registration failed %d\n", ret);
1669                goto err_clk;
1670        }
1671
1672        platform_set_drvdata(pdev, dmadev);
1673        pm_runtime_set_active(&pdev->dev);
1674        pm_runtime_enable(&pdev->dev);
1675        pm_runtime_get_noresume(&pdev->dev);
1676        pm_runtime_put(&pdev->dev);
1677
1678        dev_info(&pdev->dev, "STM32 MDMA driver registered\n");
1679
1680        return 0;
1681
1682err_clk:
1683        clk_disable_unprepare(dmadev->clk);
1684
1685        return ret;
1686}
1687
1688#ifdef CONFIG_PM
1689static int stm32_mdma_runtime_suspend(struct device *dev)
1690{
1691        struct stm32_mdma_device *dmadev = dev_get_drvdata(dev);
1692
1693        clk_disable_unprepare(dmadev->clk);
1694
1695        return 0;
1696}
1697
1698static int stm32_mdma_runtime_resume(struct device *dev)
1699{
1700        struct stm32_mdma_device *dmadev = dev_get_drvdata(dev);
1701        int ret;
1702
1703        ret = clk_prepare_enable(dmadev->clk);
1704        if (ret) {
1705                dev_err(dev, "failed to prepare_enable clock\n");
1706                return ret;
1707        }
1708
1709        return 0;
1710}
1711#endif
1712
1713#ifdef CONFIG_PM_SLEEP
1714static int stm32_mdma_pm_suspend(struct device *dev)
1715{
1716        struct stm32_mdma_device *dmadev = dev_get_drvdata(dev);
1717        u32 ccr, id;
1718        int ret;
1719
1720        ret = pm_runtime_get_sync(dev);
1721        if (ret < 0)
1722                return ret;
1723
1724        for (id = 0; id < dmadev->nr_channels; id++) {
1725                ccr = stm32_mdma_read(dmadev, STM32_MDMA_CCR(id));
1726                if (ccr & STM32_MDMA_CCR_EN) {
1727                        dev_warn(dev, "Suspend is prevented by Chan %i\n", id);
1728                        return -EBUSY;
1729                }
1730        }
1731
1732        pm_runtime_put_sync(dev);
1733
1734        pm_runtime_force_suspend(dev);
1735
1736        return 0;
1737}
1738
1739static int stm32_mdma_pm_resume(struct device *dev)
1740{
1741        return pm_runtime_force_resume(dev);
1742}
1743#endif
1744
1745static const struct dev_pm_ops stm32_mdma_pm_ops = {
1746        SET_SYSTEM_SLEEP_PM_OPS(stm32_mdma_pm_suspend, stm32_mdma_pm_resume)
1747        SET_RUNTIME_PM_OPS(stm32_mdma_runtime_suspend,
1748                           stm32_mdma_runtime_resume, NULL)
1749};
1750
1751static struct platform_driver stm32_mdma_driver = {
1752        .probe = stm32_mdma_probe,
1753        .driver = {
1754                .name = "stm32-mdma",
1755                .of_match_table = stm32_mdma_of_match,
1756                .pm = &stm32_mdma_pm_ops,
1757        },
1758};
1759
1760static int __init stm32_mdma_init(void)
1761{
1762        return platform_driver_register(&stm32_mdma_driver);
1763}
1764
1765subsys_initcall(stm32_mdma_init);
1766
1767MODULE_DESCRIPTION("Driver for STM32 MDMA controller");
1768MODULE_AUTHOR("M'boumba Cedric Madianga <cedric.madianga@gmail.com>");
1769MODULE_AUTHOR("Pierre-Yves Mordret <pierre-yves.mordret@st.com>");
1770MODULE_LICENSE("GPL v2");
1771