linux/drivers/dma/stm32-dma.c
<<
>>
Prefs
   1/*
   2 * Driver for STM32 DMA controller
   3 *
   4 * Inspired by dma-jz4740.c and tegra20-apb-dma.c
   5 *
   6 * Copyright (C) M'boumba Cedric Madianga 2015
   7 * Author: M'boumba Cedric Madianga <cedric.madianga@gmail.com>
   8 *
   9 * License terms:  GNU General Public License (GPL), version 2
  10 */
  11
  12#include <linux/clk.h>
  13#include <linux/delay.h>
  14#include <linux/dmaengine.h>
  15#include <linux/dma-mapping.h>
  16#include <linux/err.h>
  17#include <linux/init.h>
  18#include <linux/jiffies.h>
  19#include <linux/list.h>
  20#include <linux/module.h>
  21#include <linux/of.h>
  22#include <linux/of_device.h>
  23#include <linux/of_dma.h>
  24#include <linux/platform_device.h>
  25#include <linux/reset.h>
  26#include <linux/sched.h>
  27#include <linux/slab.h>
  28
  29#include "virt-dma.h"
  30
  31#define STM32_DMA_LISR                  0x0000 /* DMA Low Int Status Reg */
  32#define STM32_DMA_HISR                  0x0004 /* DMA High Int Status Reg */
  33#define STM32_DMA_LIFCR                 0x0008 /* DMA Low Int Flag Clear Reg */
  34#define STM32_DMA_HIFCR                 0x000c /* DMA High Int Flag Clear Reg */
  35#define STM32_DMA_TCI                   BIT(5) /* Transfer Complete Interrupt */
  36#define STM32_DMA_TEI                   BIT(3) /* Transfer Error Interrupt */
  37#define STM32_DMA_DMEI                  BIT(2) /* Direct Mode Error Interrupt */
  38#define STM32_DMA_FEI                   BIT(0) /* FIFO Error Interrupt */
  39
  40/* DMA Stream x Configuration Register */
  41#define STM32_DMA_SCR(x)                (0x0010 + 0x18 * (x)) /* x = 0..7 */
  42#define STM32_DMA_SCR_REQ(n)            ((n & 0x7) << 25)
  43#define STM32_DMA_SCR_MBURST_MASK       GENMASK(24, 23)
  44#define STM32_DMA_SCR_MBURST(n)         ((n & 0x3) << 23)
  45#define STM32_DMA_SCR_PBURST_MASK       GENMASK(22, 21)
  46#define STM32_DMA_SCR_PBURST(n)         ((n & 0x3) << 21)
  47#define STM32_DMA_SCR_PL_MASK           GENMASK(17, 16)
  48#define STM32_DMA_SCR_PL(n)             ((n & 0x3) << 16)
  49#define STM32_DMA_SCR_MSIZE_MASK        GENMASK(14, 13)
  50#define STM32_DMA_SCR_MSIZE(n)          ((n & 0x3) << 13)
  51#define STM32_DMA_SCR_PSIZE_MASK        GENMASK(12, 11)
  52#define STM32_DMA_SCR_PSIZE(n)          ((n & 0x3) << 11)
  53#define STM32_DMA_SCR_PSIZE_GET(n)      ((n & STM32_DMA_SCR_PSIZE_MASK) >> 11)
  54#define STM32_DMA_SCR_DIR_MASK          GENMASK(7, 6)
  55#define STM32_DMA_SCR_DIR(n)            ((n & 0x3) << 6)
  56#define STM32_DMA_SCR_CT                BIT(19) /* Target in double buffer */
  57#define STM32_DMA_SCR_DBM               BIT(18) /* Double Buffer Mode */
  58#define STM32_DMA_SCR_PINCOS            BIT(15) /* Peripheral inc offset size */
  59#define STM32_DMA_SCR_MINC              BIT(10) /* Memory increment mode */
  60#define STM32_DMA_SCR_PINC              BIT(9) /* Peripheral increment mode */
  61#define STM32_DMA_SCR_CIRC              BIT(8) /* Circular mode */
  62#define STM32_DMA_SCR_PFCTRL            BIT(5) /* Peripheral Flow Controller */
  63#define STM32_DMA_SCR_TCIE              BIT(4) /* Transfer Cplete Int Enable*/
  64#define STM32_DMA_SCR_TEIE              BIT(2) /* Transfer Error Int Enable */
  65#define STM32_DMA_SCR_DMEIE             BIT(1) /* Direct Mode Err Int Enable */
  66#define STM32_DMA_SCR_EN                BIT(0) /* Stream Enable */
  67#define STM32_DMA_SCR_CFG_MASK          (STM32_DMA_SCR_PINC \
  68                                        | STM32_DMA_SCR_MINC \
  69                                        | STM32_DMA_SCR_PINCOS \
  70                                        | STM32_DMA_SCR_PL_MASK)
  71#define STM32_DMA_SCR_IRQ_MASK          (STM32_DMA_SCR_TCIE \
  72                                        | STM32_DMA_SCR_TEIE \
  73                                        | STM32_DMA_SCR_DMEIE)
  74
  75/* DMA Stream x number of data register */
  76#define STM32_DMA_SNDTR(x)              (0x0014 + 0x18 * (x))
  77
  78/* DMA stream peripheral address register */
  79#define STM32_DMA_SPAR(x)               (0x0018 + 0x18 * (x))
  80
  81/* DMA stream x memory 0 address register */
  82#define STM32_DMA_SM0AR(x)              (0x001c + 0x18 * (x))
  83
  84/* DMA stream x memory 1 address register */
  85#define STM32_DMA_SM1AR(x)              (0x0020 + 0x18 * (x))
  86
  87/* DMA stream x FIFO control register */
  88#define STM32_DMA_SFCR(x)               (0x0024 + 0x18 * (x))
  89#define STM32_DMA_SFCR_FTH_MASK         GENMASK(1, 0)
  90#define STM32_DMA_SFCR_FTH(n)           (n & STM32_DMA_SFCR_FTH_MASK)
  91#define STM32_DMA_SFCR_FEIE             BIT(7) /* FIFO error interrupt enable */
  92#define STM32_DMA_SFCR_DMDIS            BIT(2) /* Direct mode disable */
  93#define STM32_DMA_SFCR_MASK             (STM32_DMA_SFCR_FEIE \
  94                                        | STM32_DMA_SFCR_DMDIS)
  95
  96/* DMA direction */
  97#define STM32_DMA_DEV_TO_MEM            0x00
  98#define STM32_DMA_MEM_TO_DEV            0x01
  99#define STM32_DMA_MEM_TO_MEM            0x02
 100
 101/* DMA priority level */
 102#define STM32_DMA_PRIORITY_LOW          0x00
 103#define STM32_DMA_PRIORITY_MEDIUM       0x01
 104#define STM32_DMA_PRIORITY_HIGH         0x02
 105#define STM32_DMA_PRIORITY_VERY_HIGH    0x03
 106
 107/* DMA FIFO threshold selection */
 108#define STM32_DMA_FIFO_THRESHOLD_1QUARTERFULL           0x00
 109#define STM32_DMA_FIFO_THRESHOLD_HALFFULL               0x01
 110#define STM32_DMA_FIFO_THRESHOLD_3QUARTERSFULL          0x02
 111#define STM32_DMA_FIFO_THRESHOLD_FULL                   0x03
 112
 113#define STM32_DMA_MAX_DATA_ITEMS        0xffff
 114#define STM32_DMA_MAX_CHANNELS          0x08
 115#define STM32_DMA_MAX_REQUEST_ID        0x08
 116#define STM32_DMA_MAX_DATA_PARAM        0x03
 117
 118enum stm32_dma_width {
 119        STM32_DMA_BYTE,
 120        STM32_DMA_HALF_WORD,
 121        STM32_DMA_WORD,
 122};
 123
 124enum stm32_dma_burst_size {
 125        STM32_DMA_BURST_SINGLE,
 126        STM32_DMA_BURST_INCR4,
 127        STM32_DMA_BURST_INCR8,
 128        STM32_DMA_BURST_INCR16,
 129};
 130
 131struct stm32_dma_cfg {
 132        u32 channel_id;
 133        u32 request_line;
 134        u32 stream_config;
 135        u32 threshold;
 136};
 137
 138struct stm32_dma_chan_reg {
 139        u32 dma_lisr;
 140        u32 dma_hisr;
 141        u32 dma_lifcr;
 142        u32 dma_hifcr;
 143        u32 dma_scr;
 144        u32 dma_sndtr;
 145        u32 dma_spar;
 146        u32 dma_sm0ar;
 147        u32 dma_sm1ar;
 148        u32 dma_sfcr;
 149};
 150
 151struct stm32_dma_sg_req {
 152        u32 len;
 153        struct stm32_dma_chan_reg chan_reg;
 154};
 155
 156struct stm32_dma_desc {
 157        struct virt_dma_desc vdesc;
 158        bool cyclic;
 159        u32 num_sgs;
 160        struct stm32_dma_sg_req sg_req[];
 161};
 162
 163struct stm32_dma_chan {
 164        struct virt_dma_chan vchan;
 165        bool config_init;
 166        bool busy;
 167        u32 id;
 168        u32 irq;
 169        struct stm32_dma_desc *desc;
 170        u32 next_sg;
 171        struct dma_slave_config dma_sconfig;
 172        struct stm32_dma_chan_reg chan_reg;
 173};
 174
 175struct stm32_dma_device {
 176        struct dma_device ddev;
 177        void __iomem *base;
 178        struct clk *clk;
 179        struct reset_control *rst;
 180        bool mem2mem;
 181        struct stm32_dma_chan chan[STM32_DMA_MAX_CHANNELS];
 182};
 183
 184static struct stm32_dma_device *stm32_dma_get_dev(struct stm32_dma_chan *chan)
 185{
 186        return container_of(chan->vchan.chan.device, struct stm32_dma_device,
 187                            ddev);
 188}
 189
 190static struct stm32_dma_chan *to_stm32_dma_chan(struct dma_chan *c)
 191{
 192        return container_of(c, struct stm32_dma_chan, vchan.chan);
 193}
 194
 195static struct stm32_dma_desc *to_stm32_dma_desc(struct virt_dma_desc *vdesc)
 196{
 197        return container_of(vdesc, struct stm32_dma_desc, vdesc);
 198}
 199
 200static struct device *chan2dev(struct stm32_dma_chan *chan)
 201{
 202        return &chan->vchan.chan.dev->device;
 203}
 204
 205static u32 stm32_dma_read(struct stm32_dma_device *dmadev, u32 reg)
 206{
 207        return readl_relaxed(dmadev->base + reg);
 208}
 209
 210static void stm32_dma_write(struct stm32_dma_device *dmadev, u32 reg, u32 val)
 211{
 212        writel_relaxed(val, dmadev->base + reg);
 213}
 214
 215static struct stm32_dma_desc *stm32_dma_alloc_desc(u32 num_sgs)
 216{
 217        return kzalloc(sizeof(struct stm32_dma_desc) +
 218                       sizeof(struct stm32_dma_sg_req) * num_sgs, GFP_NOWAIT);
 219}
 220
 221static int stm32_dma_get_width(struct stm32_dma_chan *chan,
 222                               enum dma_slave_buswidth width)
 223{
 224        switch (width) {
 225        case DMA_SLAVE_BUSWIDTH_1_BYTE:
 226                return STM32_DMA_BYTE;
 227        case DMA_SLAVE_BUSWIDTH_2_BYTES:
 228                return STM32_DMA_HALF_WORD;
 229        case DMA_SLAVE_BUSWIDTH_4_BYTES:
 230                return STM32_DMA_WORD;
 231        default:
 232                dev_err(chan2dev(chan), "Dma bus width not supported\n");
 233                return -EINVAL;
 234        }
 235}
 236
 237static int stm32_dma_get_burst(struct stm32_dma_chan *chan, u32 maxburst)
 238{
 239        switch (maxburst) {
 240        case 0:
 241        case 1:
 242                return STM32_DMA_BURST_SINGLE;
 243        case 4:
 244                return STM32_DMA_BURST_INCR4;
 245        case 8:
 246                return STM32_DMA_BURST_INCR8;
 247        case 16:
 248                return STM32_DMA_BURST_INCR16;
 249        default:
 250                dev_err(chan2dev(chan), "Dma burst size not supported\n");
 251                return -EINVAL;
 252        }
 253}
 254
 255static void stm32_dma_set_fifo_config(struct stm32_dma_chan *chan,
 256                                      u32 src_maxburst, u32 dst_maxburst)
 257{
 258        chan->chan_reg.dma_sfcr &= ~STM32_DMA_SFCR_MASK;
 259        chan->chan_reg.dma_scr &= ~STM32_DMA_SCR_DMEIE;
 260
 261        if ((!src_maxburst) && (!dst_maxburst)) {
 262                /* Using direct mode */
 263                chan->chan_reg.dma_scr |= STM32_DMA_SCR_DMEIE;
 264        } else {
 265                /* Using FIFO mode */
 266                chan->chan_reg.dma_sfcr |= STM32_DMA_SFCR_MASK;
 267        }
 268}
 269
 270static int stm32_dma_slave_config(struct dma_chan *c,
 271                                  struct dma_slave_config *config)
 272{
 273        struct stm32_dma_chan *chan = to_stm32_dma_chan(c);
 274
 275        memcpy(&chan->dma_sconfig, config, sizeof(*config));
 276
 277        chan->config_init = true;
 278
 279        return 0;
 280}
 281
 282static u32 stm32_dma_irq_status(struct stm32_dma_chan *chan)
 283{
 284        struct stm32_dma_device *dmadev = stm32_dma_get_dev(chan);
 285        u32 flags, dma_isr;
 286
 287        /*
 288         * Read "flags" from DMA_xISR register corresponding to the selected
 289         * DMA channel at the correct bit offset inside that register.
 290         *
 291         * If (ch % 4) is 2 or 3, left shift the mask by 16 bits.
 292         * If (ch % 4) is 1 or 3, additionally left shift the mask by 6 bits.
 293         */
 294
 295        if (chan->id & 4)
 296                dma_isr = stm32_dma_read(dmadev, STM32_DMA_HISR);
 297        else
 298                dma_isr = stm32_dma_read(dmadev, STM32_DMA_LISR);
 299
 300        flags = dma_isr >> (((chan->id & 2) << 3) | ((chan->id & 1) * 6));
 301
 302        return flags;
 303}
 304
 305static void stm32_dma_irq_clear(struct stm32_dma_chan *chan, u32 flags)
 306{
 307        struct stm32_dma_device *dmadev = stm32_dma_get_dev(chan);
 308        u32 dma_ifcr;
 309
 310        /*
 311         * Write "flags" to the DMA_xIFCR register corresponding to the selected
 312         * DMA channel at the correct bit offset inside that register.
 313         *
 314         * If (ch % 4) is 2 or 3, left shift the mask by 16 bits.
 315         * If (ch % 4) is 1 or 3, additionally left shift the mask by 6 bits.
 316         */
 317        dma_ifcr = flags << (((chan->id & 2) << 3) | ((chan->id & 1) * 6));
 318
 319        if (chan->id & 4)
 320                stm32_dma_write(dmadev, STM32_DMA_HIFCR, dma_ifcr);
 321        else
 322                stm32_dma_write(dmadev, STM32_DMA_LIFCR, dma_ifcr);
 323}
 324
 325static int stm32_dma_disable_chan(struct stm32_dma_chan *chan)
 326{
 327        struct stm32_dma_device *dmadev = stm32_dma_get_dev(chan);
 328        unsigned long timeout = jiffies + msecs_to_jiffies(5000);
 329        u32 dma_scr, id;
 330
 331        id = chan->id;
 332        dma_scr = stm32_dma_read(dmadev, STM32_DMA_SCR(id));
 333
 334        if (dma_scr & STM32_DMA_SCR_EN) {
 335                dma_scr &= ~STM32_DMA_SCR_EN;
 336                stm32_dma_write(dmadev, STM32_DMA_SCR(id), dma_scr);
 337
 338                do {
 339                        dma_scr = stm32_dma_read(dmadev, STM32_DMA_SCR(id));
 340                        dma_scr &= STM32_DMA_SCR_EN;
 341                        if (!dma_scr)
 342                                break;
 343
 344                        if (time_after_eq(jiffies, timeout)) {
 345                                dev_err(chan2dev(chan), "%s: timeout!\n",
 346                                        __func__);
 347                                return -EBUSY;
 348                        }
 349                        cond_resched();
 350                } while (1);
 351        }
 352
 353        return 0;
 354}
 355
 356static void stm32_dma_stop(struct stm32_dma_chan *chan)
 357{
 358        struct stm32_dma_device *dmadev = stm32_dma_get_dev(chan);
 359        u32 dma_scr, dma_sfcr, status;
 360        int ret;
 361
 362        /* Disable interrupts */
 363        dma_scr = stm32_dma_read(dmadev, STM32_DMA_SCR(chan->id));
 364        dma_scr &= ~STM32_DMA_SCR_IRQ_MASK;
 365        stm32_dma_write(dmadev, STM32_DMA_SCR(chan->id), dma_scr);
 366        dma_sfcr = stm32_dma_read(dmadev, STM32_DMA_SFCR(chan->id));
 367        dma_sfcr &= ~STM32_DMA_SFCR_FEIE;
 368        stm32_dma_write(dmadev, STM32_DMA_SFCR(chan->id), dma_sfcr);
 369
 370        /* Disable DMA */
 371        ret = stm32_dma_disable_chan(chan);
 372        if (ret < 0)
 373                return;
 374
 375        /* Clear interrupt status if it is there */
 376        status = stm32_dma_irq_status(chan);
 377        if (status) {
 378                dev_dbg(chan2dev(chan), "%s(): clearing interrupt: 0x%08x\n",
 379                        __func__, status);
 380                stm32_dma_irq_clear(chan, status);
 381        }
 382
 383        chan->busy = false;
 384}
 385
 386static int stm32_dma_terminate_all(struct dma_chan *c)
 387{
 388        struct stm32_dma_chan *chan = to_stm32_dma_chan(c);
 389        unsigned long flags;
 390        LIST_HEAD(head);
 391
 392        spin_lock_irqsave(&chan->vchan.lock, flags);
 393
 394        if (chan->busy) {
 395                stm32_dma_stop(chan);
 396                chan->desc = NULL;
 397        }
 398
 399        vchan_get_all_descriptors(&chan->vchan, &head);
 400        spin_unlock_irqrestore(&chan->vchan.lock, flags);
 401        vchan_dma_desc_free_list(&chan->vchan, &head);
 402
 403        return 0;
 404}
 405
 406static void stm32_dma_dump_reg(struct stm32_dma_chan *chan)
 407{
 408        struct stm32_dma_device *dmadev = stm32_dma_get_dev(chan);
 409        u32 scr = stm32_dma_read(dmadev, STM32_DMA_SCR(chan->id));
 410        u32 ndtr = stm32_dma_read(dmadev, STM32_DMA_SNDTR(chan->id));
 411        u32 spar = stm32_dma_read(dmadev, STM32_DMA_SPAR(chan->id));
 412        u32 sm0ar = stm32_dma_read(dmadev, STM32_DMA_SM0AR(chan->id));
 413        u32 sm1ar = stm32_dma_read(dmadev, STM32_DMA_SM1AR(chan->id));
 414        u32 sfcr = stm32_dma_read(dmadev, STM32_DMA_SFCR(chan->id));
 415
 416        dev_dbg(chan2dev(chan), "SCR:   0x%08x\n", scr);
 417        dev_dbg(chan2dev(chan), "NDTR:  0x%08x\n", ndtr);
 418        dev_dbg(chan2dev(chan), "SPAR:  0x%08x\n", spar);
 419        dev_dbg(chan2dev(chan), "SM0AR: 0x%08x\n", sm0ar);
 420        dev_dbg(chan2dev(chan), "SM1AR: 0x%08x\n", sm1ar);
 421        dev_dbg(chan2dev(chan), "SFCR:  0x%08x\n", sfcr);
 422}
 423
 424static int stm32_dma_start_transfer(struct stm32_dma_chan *chan)
 425{
 426        struct stm32_dma_device *dmadev = stm32_dma_get_dev(chan);
 427        struct virt_dma_desc *vdesc;
 428        struct stm32_dma_sg_req *sg_req;
 429        struct stm32_dma_chan_reg *reg;
 430        u32 status;
 431        int ret;
 432
 433        ret = stm32_dma_disable_chan(chan);
 434        if (ret < 0)
 435                return ret;
 436
 437        if (!chan->desc) {
 438                vdesc = vchan_next_desc(&chan->vchan);
 439                if (!vdesc)
 440                        return -EPERM;
 441
 442                chan->desc = to_stm32_dma_desc(vdesc);
 443                chan->next_sg = 0;
 444        }
 445
 446        if (chan->next_sg == chan->desc->num_sgs)
 447                chan->next_sg = 0;
 448
 449        sg_req = &chan->desc->sg_req[chan->next_sg];
 450        reg = &sg_req->chan_reg;
 451
 452        stm32_dma_write(dmadev, STM32_DMA_SCR(chan->id), reg->dma_scr);
 453        stm32_dma_write(dmadev, STM32_DMA_SPAR(chan->id), reg->dma_spar);
 454        stm32_dma_write(dmadev, STM32_DMA_SM0AR(chan->id), reg->dma_sm0ar);
 455        stm32_dma_write(dmadev, STM32_DMA_SFCR(chan->id), reg->dma_sfcr);
 456        stm32_dma_write(dmadev, STM32_DMA_SM1AR(chan->id), reg->dma_sm1ar);
 457        stm32_dma_write(dmadev, STM32_DMA_SNDTR(chan->id), reg->dma_sndtr);
 458
 459        chan->next_sg++;
 460
 461        /* Clear interrupt status if it is there */
 462        status = stm32_dma_irq_status(chan);
 463        if (status)
 464                stm32_dma_irq_clear(chan, status);
 465
 466        stm32_dma_dump_reg(chan);
 467
 468        /* Start DMA */
 469        reg->dma_scr |= STM32_DMA_SCR_EN;
 470        stm32_dma_write(dmadev, STM32_DMA_SCR(chan->id), reg->dma_scr);
 471
 472        chan->busy = true;
 473
 474        return 0;
 475}
 476
 477static void stm32_dma_configure_next_sg(struct stm32_dma_chan *chan)
 478{
 479        struct stm32_dma_device *dmadev = stm32_dma_get_dev(chan);
 480        struct stm32_dma_sg_req *sg_req;
 481        u32 dma_scr, dma_sm0ar, dma_sm1ar, id;
 482
 483        id = chan->id;
 484        dma_scr = stm32_dma_read(dmadev, STM32_DMA_SCR(id));
 485
 486        if (dma_scr & STM32_DMA_SCR_DBM) {
 487                if (chan->next_sg == chan->desc->num_sgs)
 488                        chan->next_sg = 0;
 489
 490                sg_req = &chan->desc->sg_req[chan->next_sg];
 491
 492                if (dma_scr & STM32_DMA_SCR_CT) {
 493                        dma_sm0ar = sg_req->chan_reg.dma_sm0ar;
 494                        stm32_dma_write(dmadev, STM32_DMA_SM0AR(id), dma_sm0ar);
 495                        dev_dbg(chan2dev(chan), "CT=1 <=> SM0AR: 0x%08x\n",
 496                                stm32_dma_read(dmadev, STM32_DMA_SM0AR(id)));
 497                } else {
 498                        dma_sm1ar = sg_req->chan_reg.dma_sm1ar;
 499                        stm32_dma_write(dmadev, STM32_DMA_SM1AR(id), dma_sm1ar);
 500                        dev_dbg(chan2dev(chan), "CT=0 <=> SM1AR: 0x%08x\n",
 501                                stm32_dma_read(dmadev, STM32_DMA_SM1AR(id)));
 502                }
 503
 504                chan->next_sg++;
 505        }
 506}
 507
 508static void stm32_dma_handle_chan_done(struct stm32_dma_chan *chan)
 509{
 510        if (chan->desc) {
 511                if (chan->desc->cyclic) {
 512                        vchan_cyclic_callback(&chan->desc->vdesc);
 513                        stm32_dma_configure_next_sg(chan);
 514                } else {
 515                        chan->busy = false;
 516                        if (chan->next_sg == chan->desc->num_sgs) {
 517                                list_del(&chan->desc->vdesc.node);
 518                                vchan_cookie_complete(&chan->desc->vdesc);
 519                                chan->desc = NULL;
 520                        }
 521                        stm32_dma_start_transfer(chan);
 522                }
 523        }
 524}
 525
 526static irqreturn_t stm32_dma_chan_irq(int irq, void *devid)
 527{
 528        struct stm32_dma_chan *chan = devid;
 529        struct stm32_dma_device *dmadev = stm32_dma_get_dev(chan);
 530        u32 status, scr, sfcr;
 531
 532        spin_lock(&chan->vchan.lock);
 533
 534        status = stm32_dma_irq_status(chan);
 535        scr = stm32_dma_read(dmadev, STM32_DMA_SCR(chan->id));
 536        sfcr = stm32_dma_read(dmadev, STM32_DMA_SFCR(chan->id));
 537
 538        if ((status & STM32_DMA_TCI) && (scr & STM32_DMA_SCR_TCIE)) {
 539                stm32_dma_irq_clear(chan, STM32_DMA_TCI);
 540                stm32_dma_handle_chan_done(chan);
 541
 542        } else {
 543                stm32_dma_irq_clear(chan, status);
 544                dev_err(chan2dev(chan), "DMA error: status=0x%08x\n", status);
 545        }
 546
 547        spin_unlock(&chan->vchan.lock);
 548
 549        return IRQ_HANDLED;
 550}
 551
 552static void stm32_dma_issue_pending(struct dma_chan *c)
 553{
 554        struct stm32_dma_chan *chan = to_stm32_dma_chan(c);
 555        unsigned long flags;
 556        int ret;
 557
 558        spin_lock_irqsave(&chan->vchan.lock, flags);
 559        if (!chan->busy) {
 560                if (vchan_issue_pending(&chan->vchan) && !chan->desc) {
 561                        ret = stm32_dma_start_transfer(chan);
 562                        if ((!ret) && (chan->desc->cyclic))
 563                                stm32_dma_configure_next_sg(chan);
 564                }
 565        }
 566        spin_unlock_irqrestore(&chan->vchan.lock, flags);
 567}
 568
 569static int stm32_dma_set_xfer_param(struct stm32_dma_chan *chan,
 570                                    enum dma_transfer_direction direction,
 571                                    enum dma_slave_buswidth *buswidth)
 572{
 573        enum dma_slave_buswidth src_addr_width, dst_addr_width;
 574        int src_bus_width, dst_bus_width;
 575        int src_burst_size, dst_burst_size;
 576        u32 src_maxburst, dst_maxburst;
 577        dma_addr_t src_addr, dst_addr;
 578        u32 dma_scr = 0;
 579
 580        src_addr_width = chan->dma_sconfig.src_addr_width;
 581        dst_addr_width = chan->dma_sconfig.dst_addr_width;
 582        src_maxburst = chan->dma_sconfig.src_maxburst;
 583        dst_maxburst = chan->dma_sconfig.dst_maxburst;
 584        src_addr = chan->dma_sconfig.src_addr;
 585        dst_addr = chan->dma_sconfig.dst_addr;
 586
 587        switch (direction) {
 588        case DMA_MEM_TO_DEV:
 589                dst_bus_width = stm32_dma_get_width(chan, dst_addr_width);
 590                if (dst_bus_width < 0)
 591                        return dst_bus_width;
 592
 593                dst_burst_size = stm32_dma_get_burst(chan, dst_maxburst);
 594                if (dst_burst_size < 0)
 595                        return dst_burst_size;
 596
 597                if (!src_addr_width)
 598                        src_addr_width = dst_addr_width;
 599
 600                src_bus_width = stm32_dma_get_width(chan, src_addr_width);
 601                if (src_bus_width < 0)
 602                        return src_bus_width;
 603
 604                src_burst_size = stm32_dma_get_burst(chan, src_maxburst);
 605                if (src_burst_size < 0)
 606                        return src_burst_size;
 607
 608                dma_scr = STM32_DMA_SCR_DIR(STM32_DMA_MEM_TO_DEV) |
 609                        STM32_DMA_SCR_PSIZE(dst_bus_width) |
 610                        STM32_DMA_SCR_MSIZE(src_bus_width) |
 611                        STM32_DMA_SCR_PBURST(dst_burst_size) |
 612                        STM32_DMA_SCR_MBURST(src_burst_size);
 613
 614                chan->chan_reg.dma_spar = chan->dma_sconfig.dst_addr;
 615                *buswidth = dst_addr_width;
 616                break;
 617
 618        case DMA_DEV_TO_MEM:
 619                src_bus_width = stm32_dma_get_width(chan, src_addr_width);
 620                if (src_bus_width < 0)
 621                        return src_bus_width;
 622
 623                src_burst_size = stm32_dma_get_burst(chan, src_maxburst);
 624                if (src_burst_size < 0)
 625                        return src_burst_size;
 626
 627                if (!dst_addr_width)
 628                        dst_addr_width = src_addr_width;
 629
 630                dst_bus_width = stm32_dma_get_width(chan, dst_addr_width);
 631                if (dst_bus_width < 0)
 632                        return dst_bus_width;
 633
 634                dst_burst_size = stm32_dma_get_burst(chan, dst_maxburst);
 635                if (dst_burst_size < 0)
 636                        return dst_burst_size;
 637
 638                dma_scr = STM32_DMA_SCR_DIR(STM32_DMA_DEV_TO_MEM) |
 639                        STM32_DMA_SCR_PSIZE(src_bus_width) |
 640                        STM32_DMA_SCR_MSIZE(dst_bus_width) |
 641                        STM32_DMA_SCR_PBURST(src_burst_size) |
 642                        STM32_DMA_SCR_MBURST(dst_burst_size);
 643
 644                chan->chan_reg.dma_spar = chan->dma_sconfig.src_addr;
 645                *buswidth = chan->dma_sconfig.src_addr_width;
 646                break;
 647
 648        default:
 649                dev_err(chan2dev(chan), "Dma direction is not supported\n");
 650                return -EINVAL;
 651        }
 652
 653        stm32_dma_set_fifo_config(chan, src_maxburst, dst_maxburst);
 654
 655        chan->chan_reg.dma_scr &= ~(STM32_DMA_SCR_DIR_MASK |
 656                        STM32_DMA_SCR_PSIZE_MASK | STM32_DMA_SCR_MSIZE_MASK |
 657                        STM32_DMA_SCR_PBURST_MASK | STM32_DMA_SCR_MBURST_MASK);
 658        chan->chan_reg.dma_scr |= dma_scr;
 659
 660        return 0;
 661}
 662
 663static void stm32_dma_clear_reg(struct stm32_dma_chan_reg *regs)
 664{
 665        memset(regs, 0, sizeof(struct stm32_dma_chan_reg));
 666}
 667
 668static struct dma_async_tx_descriptor *stm32_dma_prep_slave_sg(
 669        struct dma_chan *c, struct scatterlist *sgl,
 670        u32 sg_len, enum dma_transfer_direction direction,
 671        unsigned long flags, void *context)
 672{
 673        struct stm32_dma_chan *chan = to_stm32_dma_chan(c);
 674        struct stm32_dma_desc *desc;
 675        struct scatterlist *sg;
 676        enum dma_slave_buswidth buswidth;
 677        u32 nb_data_items;
 678        int i, ret;
 679
 680        if (!chan->config_init) {
 681                dev_err(chan2dev(chan), "dma channel is not configured\n");
 682                return NULL;
 683        }
 684
 685        if (sg_len < 1) {
 686                dev_err(chan2dev(chan), "Invalid segment length %d\n", sg_len);
 687                return NULL;
 688        }
 689
 690        desc = stm32_dma_alloc_desc(sg_len);
 691        if (!desc)
 692                return NULL;
 693
 694        ret = stm32_dma_set_xfer_param(chan, direction, &buswidth);
 695        if (ret < 0)
 696                goto err;
 697
 698        /* Set peripheral flow controller */
 699        if (chan->dma_sconfig.device_fc)
 700                chan->chan_reg.dma_scr |= STM32_DMA_SCR_PFCTRL;
 701        else
 702                chan->chan_reg.dma_scr &= ~STM32_DMA_SCR_PFCTRL;
 703
 704        for_each_sg(sgl, sg, sg_len, i) {
 705                desc->sg_req[i].len = sg_dma_len(sg);
 706
 707                nb_data_items = desc->sg_req[i].len / buswidth;
 708                if (nb_data_items > STM32_DMA_MAX_DATA_ITEMS) {
 709                        dev_err(chan2dev(chan), "nb items not supported\n");
 710                        goto err;
 711                }
 712
 713                stm32_dma_clear_reg(&desc->sg_req[i].chan_reg);
 714                desc->sg_req[i].chan_reg.dma_scr = chan->chan_reg.dma_scr;
 715                desc->sg_req[i].chan_reg.dma_sfcr = chan->chan_reg.dma_sfcr;
 716                desc->sg_req[i].chan_reg.dma_spar = chan->chan_reg.dma_spar;
 717                desc->sg_req[i].chan_reg.dma_sm0ar = sg_dma_address(sg);
 718                desc->sg_req[i].chan_reg.dma_sm1ar = sg_dma_address(sg);
 719                desc->sg_req[i].chan_reg.dma_sndtr = nb_data_items;
 720        }
 721
 722        desc->num_sgs = sg_len;
 723        desc->cyclic = false;
 724
 725        return vchan_tx_prep(&chan->vchan, &desc->vdesc, flags);
 726
 727err:
 728        kfree(desc);
 729        return NULL;
 730}
 731
 732static struct dma_async_tx_descriptor *stm32_dma_prep_dma_cyclic(
 733        struct dma_chan *c, dma_addr_t buf_addr, size_t buf_len,
 734        size_t period_len, enum dma_transfer_direction direction,
 735        unsigned long flags)
 736{
 737        struct stm32_dma_chan *chan = to_stm32_dma_chan(c);
 738        struct stm32_dma_desc *desc;
 739        enum dma_slave_buswidth buswidth;
 740        u32 num_periods, nb_data_items;
 741        int i, ret;
 742
 743        if (!buf_len || !period_len) {
 744                dev_err(chan2dev(chan), "Invalid buffer/period len\n");
 745                return NULL;
 746        }
 747
 748        if (!chan->config_init) {
 749                dev_err(chan2dev(chan), "dma channel is not configured\n");
 750                return NULL;
 751        }
 752
 753        if (buf_len % period_len) {
 754                dev_err(chan2dev(chan), "buf_len not multiple of period_len\n");
 755                return NULL;
 756        }
 757
 758        /*
 759         * We allow to take more number of requests till DMA is
 760         * not started. The driver will loop over all requests.
 761         * Once DMA is started then new requests can be queued only after
 762         * terminating the DMA.
 763         */
 764        if (chan->busy) {
 765                dev_err(chan2dev(chan), "Request not allowed when dma busy\n");
 766                return NULL;
 767        }
 768
 769        ret = stm32_dma_set_xfer_param(chan, direction, &buswidth);
 770        if (ret < 0)
 771                return NULL;
 772
 773        nb_data_items = period_len / buswidth;
 774        if (nb_data_items > STM32_DMA_MAX_DATA_ITEMS) {
 775                dev_err(chan2dev(chan), "number of items not supported\n");
 776                return NULL;
 777        }
 778
 779        /*  Enable Circular mode or double buffer mode */
 780        if (buf_len == period_len)
 781                chan->chan_reg.dma_scr |= STM32_DMA_SCR_CIRC;
 782        else
 783                chan->chan_reg.dma_scr |= STM32_DMA_SCR_DBM;
 784
 785        /* Clear periph ctrl if client set it */
 786        chan->chan_reg.dma_scr &= ~STM32_DMA_SCR_PFCTRL;
 787
 788        num_periods = buf_len / period_len;
 789
 790        desc = stm32_dma_alloc_desc(num_periods);
 791        if (!desc)
 792                return NULL;
 793
 794        for (i = 0; i < num_periods; i++) {
 795                desc->sg_req[i].len = period_len;
 796
 797                stm32_dma_clear_reg(&desc->sg_req[i].chan_reg);
 798                desc->sg_req[i].chan_reg.dma_scr = chan->chan_reg.dma_scr;
 799                desc->sg_req[i].chan_reg.dma_sfcr = chan->chan_reg.dma_sfcr;
 800                desc->sg_req[i].chan_reg.dma_spar = chan->chan_reg.dma_spar;
 801                desc->sg_req[i].chan_reg.dma_sm0ar = buf_addr;
 802                desc->sg_req[i].chan_reg.dma_sm1ar = buf_addr;
 803                desc->sg_req[i].chan_reg.dma_sndtr = nb_data_items;
 804                buf_addr += period_len;
 805        }
 806
 807        desc->num_sgs = num_periods;
 808        desc->cyclic = true;
 809
 810        return vchan_tx_prep(&chan->vchan, &desc->vdesc, flags);
 811}
 812
 813static struct dma_async_tx_descriptor *stm32_dma_prep_dma_memcpy(
 814        struct dma_chan *c, dma_addr_t dest,
 815        dma_addr_t src, size_t len, unsigned long flags)
 816{
 817        struct stm32_dma_chan *chan = to_stm32_dma_chan(c);
 818        u32 num_sgs;
 819        struct stm32_dma_desc *desc;
 820        size_t xfer_count, offset;
 821        int i;
 822
 823        num_sgs = DIV_ROUND_UP(len, STM32_DMA_MAX_DATA_ITEMS);
 824        desc = stm32_dma_alloc_desc(num_sgs);
 825        if (!desc)
 826                return NULL;
 827
 828        for (offset = 0, i = 0; offset < len; offset += xfer_count, i++) {
 829                xfer_count = min_t(size_t, len - offset,
 830                                   STM32_DMA_MAX_DATA_ITEMS);
 831
 832                desc->sg_req[i].len = xfer_count;
 833
 834                stm32_dma_clear_reg(&desc->sg_req[i].chan_reg);
 835                desc->sg_req[i].chan_reg.dma_scr =
 836                        STM32_DMA_SCR_DIR(STM32_DMA_MEM_TO_MEM) |
 837                        STM32_DMA_SCR_MINC |
 838                        STM32_DMA_SCR_PINC |
 839                        STM32_DMA_SCR_TCIE |
 840                        STM32_DMA_SCR_TEIE;
 841                desc->sg_req[i].chan_reg.dma_sfcr = STM32_DMA_SFCR_DMDIS |
 842                        STM32_DMA_SFCR_FTH(STM32_DMA_FIFO_THRESHOLD_FULL) |
 843                        STM32_DMA_SFCR_FEIE;
 844                desc->sg_req[i].chan_reg.dma_spar = src + offset;
 845                desc->sg_req[i].chan_reg.dma_sm0ar = dest + offset;
 846                desc->sg_req[i].chan_reg.dma_sndtr = xfer_count;
 847        }
 848
 849        desc->num_sgs = num_sgs;
 850        desc->cyclic = false;
 851
 852        return vchan_tx_prep(&chan->vchan, &desc->vdesc, flags);
 853}
 854
 855static size_t stm32_dma_desc_residue(struct stm32_dma_chan *chan,
 856                                     struct stm32_dma_desc *desc,
 857                                     u32 next_sg)
 858{
 859        struct stm32_dma_device *dmadev = stm32_dma_get_dev(chan);
 860        u32 dma_scr, width, residue, count;
 861        int i;
 862
 863        residue = 0;
 864
 865        for (i = next_sg; i < desc->num_sgs; i++)
 866                residue += desc->sg_req[i].len;
 867
 868        if (next_sg != 0) {
 869                dma_scr = stm32_dma_read(dmadev, STM32_DMA_SCR(chan->id));
 870                width = STM32_DMA_SCR_PSIZE_GET(dma_scr);
 871                count = stm32_dma_read(dmadev, STM32_DMA_SNDTR(chan->id));
 872
 873                residue += count << width;
 874        }
 875
 876        return residue;
 877}
 878
 879static enum dma_status stm32_dma_tx_status(struct dma_chan *c,
 880                                           dma_cookie_t cookie,
 881                                           struct dma_tx_state *state)
 882{
 883        struct stm32_dma_chan *chan = to_stm32_dma_chan(c);
 884        struct virt_dma_desc *vdesc;
 885        enum dma_status status;
 886        unsigned long flags;
 887        u32 residue;
 888
 889        status = dma_cookie_status(c, cookie, state);
 890        if ((status == DMA_COMPLETE) || (!state))
 891                return status;
 892
 893        spin_lock_irqsave(&chan->vchan.lock, flags);
 894        vdesc = vchan_find_desc(&chan->vchan, cookie);
 895        if (cookie == chan->desc->vdesc.tx.cookie) {
 896                residue = stm32_dma_desc_residue(chan, chan->desc,
 897                                                 chan->next_sg);
 898        } else if (vdesc) {
 899                residue = stm32_dma_desc_residue(chan,
 900                                                 to_stm32_dma_desc(vdesc), 0);
 901        } else {
 902                residue = 0;
 903        }
 904
 905        dma_set_residue(state, residue);
 906
 907        spin_unlock_irqrestore(&chan->vchan.lock, flags);
 908
 909        return status;
 910}
 911
 912static int stm32_dma_alloc_chan_resources(struct dma_chan *c)
 913{
 914        struct stm32_dma_chan *chan = to_stm32_dma_chan(c);
 915        struct stm32_dma_device *dmadev = stm32_dma_get_dev(chan);
 916        int ret;
 917
 918        chan->config_init = false;
 919        ret = clk_prepare_enable(dmadev->clk);
 920        if (ret < 0) {
 921                dev_err(chan2dev(chan), "clk_prepare_enable failed: %d\n", ret);
 922                return ret;
 923        }
 924
 925        ret = stm32_dma_disable_chan(chan);
 926        if (ret < 0)
 927                clk_disable_unprepare(dmadev->clk);
 928
 929        return ret;
 930}
 931
 932static void stm32_dma_free_chan_resources(struct dma_chan *c)
 933{
 934        struct stm32_dma_chan *chan = to_stm32_dma_chan(c);
 935        struct stm32_dma_device *dmadev = stm32_dma_get_dev(chan);
 936        unsigned long flags;
 937
 938        dev_dbg(chan2dev(chan), "Freeing channel %d\n", chan->id);
 939
 940        if (chan->busy) {
 941                spin_lock_irqsave(&chan->vchan.lock, flags);
 942                stm32_dma_stop(chan);
 943                chan->desc = NULL;
 944                spin_unlock_irqrestore(&chan->vchan.lock, flags);
 945        }
 946
 947        clk_disable_unprepare(dmadev->clk);
 948
 949        vchan_free_chan_resources(to_virt_chan(c));
 950}
 951
 952static void stm32_dma_desc_free(struct virt_dma_desc *vdesc)
 953{
 954        kfree(container_of(vdesc, struct stm32_dma_desc, vdesc));
 955}
 956
 957static void stm32_dma_set_config(struct stm32_dma_chan *chan,
 958                          struct stm32_dma_cfg *cfg)
 959{
 960        stm32_dma_clear_reg(&chan->chan_reg);
 961
 962        chan->chan_reg.dma_scr = cfg->stream_config & STM32_DMA_SCR_CFG_MASK;
 963        chan->chan_reg.dma_scr |= STM32_DMA_SCR_REQ(cfg->request_line);
 964
 965        /* Enable Interrupts  */
 966        chan->chan_reg.dma_scr |= STM32_DMA_SCR_TEIE | STM32_DMA_SCR_TCIE;
 967
 968        chan->chan_reg.dma_sfcr = cfg->threshold & STM32_DMA_SFCR_FTH_MASK;
 969}
 970
 971static struct dma_chan *stm32_dma_of_xlate(struct of_phandle_args *dma_spec,
 972                                           struct of_dma *ofdma)
 973{
 974        struct stm32_dma_device *dmadev = ofdma->of_dma_data;
 975        struct stm32_dma_cfg cfg;
 976        struct stm32_dma_chan *chan;
 977        struct dma_chan *c;
 978
 979        if (dma_spec->args_count < 3)
 980                return NULL;
 981
 982        cfg.channel_id = dma_spec->args[0];
 983        cfg.request_line = dma_spec->args[1];
 984        cfg.stream_config = dma_spec->args[2];
 985        cfg.threshold = 0;
 986
 987        if ((cfg.channel_id >= STM32_DMA_MAX_CHANNELS) || (cfg.request_line >=
 988                                STM32_DMA_MAX_REQUEST_ID))
 989                return NULL;
 990
 991        if (dma_spec->args_count > 3)
 992                cfg.threshold = dma_spec->args[3];
 993
 994        chan = &dmadev->chan[cfg.channel_id];
 995
 996        c = dma_get_slave_channel(&chan->vchan.chan);
 997        if (c)
 998                stm32_dma_set_config(chan, &cfg);
 999
1000        return c;
1001}
1002
1003static const struct of_device_id stm32_dma_of_match[] = {
1004        { .compatible = "st,stm32-dma", },
1005        { /* sentinel */ },
1006};
1007MODULE_DEVICE_TABLE(of, stm32_dma_of_match);
1008
1009static int stm32_dma_probe(struct platform_device *pdev)
1010{
1011        struct stm32_dma_chan *chan;
1012        struct stm32_dma_device *dmadev;
1013        struct dma_device *dd;
1014        const struct of_device_id *match;
1015        struct resource *res;
1016        int i, ret;
1017
1018        match = of_match_device(stm32_dma_of_match, &pdev->dev);
1019        if (!match) {
1020                dev_err(&pdev->dev, "Error: No device match found\n");
1021                return -ENODEV;
1022        }
1023
1024        dmadev = devm_kzalloc(&pdev->dev, sizeof(*dmadev), GFP_KERNEL);
1025        if (!dmadev)
1026                return -ENOMEM;
1027
1028        dd = &dmadev->ddev;
1029
1030        res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
1031        dmadev->base = devm_ioremap_resource(&pdev->dev, res);
1032        if (IS_ERR(dmadev->base))
1033                return PTR_ERR(dmadev->base);
1034
1035        dmadev->clk = devm_clk_get(&pdev->dev, NULL);
1036        if (IS_ERR(dmadev->clk)) {
1037                dev_err(&pdev->dev, "Error: Missing controller clock\n");
1038                return PTR_ERR(dmadev->clk);
1039        }
1040
1041        dmadev->mem2mem = of_property_read_bool(pdev->dev.of_node,
1042                                                "st,mem2mem");
1043
1044        dmadev->rst = devm_reset_control_get(&pdev->dev, NULL);
1045        if (!IS_ERR(dmadev->rst)) {
1046                reset_control_assert(dmadev->rst);
1047                udelay(2);
1048                reset_control_deassert(dmadev->rst);
1049        }
1050
1051        dma_cap_set(DMA_SLAVE, dd->cap_mask);
1052        dma_cap_set(DMA_PRIVATE, dd->cap_mask);
1053        dma_cap_set(DMA_CYCLIC, dd->cap_mask);
1054        dd->device_alloc_chan_resources = stm32_dma_alloc_chan_resources;
1055        dd->device_free_chan_resources = stm32_dma_free_chan_resources;
1056        dd->device_tx_status = stm32_dma_tx_status;
1057        dd->device_issue_pending = stm32_dma_issue_pending;
1058        dd->device_prep_slave_sg = stm32_dma_prep_slave_sg;
1059        dd->device_prep_dma_cyclic = stm32_dma_prep_dma_cyclic;
1060        dd->device_config = stm32_dma_slave_config;
1061        dd->device_terminate_all = stm32_dma_terminate_all;
1062        dd->src_addr_widths = BIT(DMA_SLAVE_BUSWIDTH_1_BYTE) |
1063                BIT(DMA_SLAVE_BUSWIDTH_2_BYTES) |
1064                BIT(DMA_SLAVE_BUSWIDTH_4_BYTES);
1065        dd->dst_addr_widths = BIT(DMA_SLAVE_BUSWIDTH_1_BYTE) |
1066                BIT(DMA_SLAVE_BUSWIDTH_2_BYTES) |
1067                BIT(DMA_SLAVE_BUSWIDTH_4_BYTES);
1068        dd->directions = BIT(DMA_DEV_TO_MEM) | BIT(DMA_MEM_TO_DEV);
1069        dd->residue_granularity = DMA_RESIDUE_GRANULARITY_BURST;
1070        dd->dev = &pdev->dev;
1071        INIT_LIST_HEAD(&dd->channels);
1072
1073        if (dmadev->mem2mem) {
1074                dma_cap_set(DMA_MEMCPY, dd->cap_mask);
1075                dd->device_prep_dma_memcpy = stm32_dma_prep_dma_memcpy;
1076                dd->directions |= BIT(DMA_MEM_TO_MEM);
1077        }
1078
1079        for (i = 0; i < STM32_DMA_MAX_CHANNELS; i++) {
1080                chan = &dmadev->chan[i];
1081                chan->id = i;
1082                chan->vchan.desc_free = stm32_dma_desc_free;
1083                vchan_init(&chan->vchan, dd);
1084        }
1085
1086        ret = dma_async_device_register(dd);
1087        if (ret)
1088                return ret;
1089
1090        for (i = 0; i < STM32_DMA_MAX_CHANNELS; i++) {
1091                chan = &dmadev->chan[i];
1092                res = platform_get_resource(pdev, IORESOURCE_IRQ, i);
1093                if (!res) {
1094                        ret = -EINVAL;
1095                        dev_err(&pdev->dev, "No irq resource for chan %d\n", i);
1096                        goto err_unregister;
1097                }
1098                chan->irq = res->start;
1099                ret = devm_request_irq(&pdev->dev, chan->irq,
1100                                       stm32_dma_chan_irq, 0,
1101                                       dev_name(chan2dev(chan)), chan);
1102                if (ret) {
1103                        dev_err(&pdev->dev,
1104                                "request_irq failed with err %d channel %d\n",
1105                                ret, i);
1106                        goto err_unregister;
1107                }
1108        }
1109
1110        ret = of_dma_controller_register(pdev->dev.of_node,
1111                                         stm32_dma_of_xlate, dmadev);
1112        if (ret < 0) {
1113                dev_err(&pdev->dev,
1114                        "STM32 DMA DMA OF registration failed %d\n", ret);
1115                goto err_unregister;
1116        }
1117
1118        platform_set_drvdata(pdev, dmadev);
1119
1120        dev_info(&pdev->dev, "STM32 DMA driver registered\n");
1121
1122        return 0;
1123
1124err_unregister:
1125        dma_async_device_unregister(dd);
1126
1127        return ret;
1128}
1129
1130static struct platform_driver stm32_dma_driver = {
1131        .driver = {
1132                .name = "stm32-dma",
1133                .of_match_table = stm32_dma_of_match,
1134        },
1135};
1136
1137static int __init stm32_dma_init(void)
1138{
1139        return platform_driver_probe(&stm32_dma_driver, stm32_dma_probe);
1140}
1141subsys_initcall(stm32_dma_init);
1142