linux/drivers/dma/xilinx/zynqmp_dma.c
<<
>>
Prefs
   1// SPDX-License-Identifier: GPL-2.0-or-later
   2/*
   3 * DMA driver for Xilinx ZynqMP DMA Engine
   4 *
   5 * Copyright (C) 2016 Xilinx, Inc. All rights reserved.
   6 */
   7
   8#include <linux/bitops.h>
   9#include <linux/dmapool.h>
  10#include <linux/dma/xilinx_dma.h>
  11#include <linux/init.h>
  12#include <linux/interrupt.h>
  13#include <linux/io.h>
  14#include <linux/module.h>
  15#include <linux/of_address.h>
  16#include <linux/of_dma.h>
  17#include <linux/of_irq.h>
  18#include <linux/of_platform.h>
  19#include <linux/slab.h>
  20#include <linux/clk.h>
  21#include <linux/io-64-nonatomic-lo-hi.h>
  22#include <linux/pm_runtime.h>
  23
  24#include "../dmaengine.h"
  25
  26/* Register Offsets */
  27#define ZYNQMP_DMA_ISR                  0x100
  28#define ZYNQMP_DMA_IMR                  0x104
  29#define ZYNQMP_DMA_IER                  0x108
  30#define ZYNQMP_DMA_IDS                  0x10C
  31#define ZYNQMP_DMA_CTRL0                0x110
  32#define ZYNQMP_DMA_CTRL1                0x114
  33#define ZYNQMP_DMA_DATA_ATTR            0x120
  34#define ZYNQMP_DMA_DSCR_ATTR            0x124
  35#define ZYNQMP_DMA_SRC_DSCR_WRD0        0x128
  36#define ZYNQMP_DMA_SRC_DSCR_WRD1        0x12C
  37#define ZYNQMP_DMA_SRC_DSCR_WRD2        0x130
  38#define ZYNQMP_DMA_SRC_DSCR_WRD3        0x134
  39#define ZYNQMP_DMA_DST_DSCR_WRD0        0x138
  40#define ZYNQMP_DMA_DST_DSCR_WRD1        0x13C
  41#define ZYNQMP_DMA_DST_DSCR_WRD2        0x140
  42#define ZYNQMP_DMA_DST_DSCR_WRD3        0x144
  43#define ZYNQMP_DMA_SRC_START_LSB        0x158
  44#define ZYNQMP_DMA_SRC_START_MSB        0x15C
  45#define ZYNQMP_DMA_DST_START_LSB        0x160
  46#define ZYNQMP_DMA_DST_START_MSB        0x164
  47#define ZYNQMP_DMA_TOTAL_BYTE           0x188
  48#define ZYNQMP_DMA_RATE_CTRL            0x18C
  49#define ZYNQMP_DMA_IRQ_SRC_ACCT         0x190
  50#define ZYNQMP_DMA_IRQ_DST_ACCT         0x194
  51#define ZYNQMP_DMA_CTRL2                0x200
  52
  53/* Interrupt registers bit field definitions */
  54#define ZYNQMP_DMA_DONE                 BIT(10)
  55#define ZYNQMP_DMA_AXI_WR_DATA          BIT(9)
  56#define ZYNQMP_DMA_AXI_RD_DATA          BIT(8)
  57#define ZYNQMP_DMA_AXI_RD_DST_DSCR      BIT(7)
  58#define ZYNQMP_DMA_AXI_RD_SRC_DSCR      BIT(6)
  59#define ZYNQMP_DMA_IRQ_DST_ACCT_ERR     BIT(5)
  60#define ZYNQMP_DMA_IRQ_SRC_ACCT_ERR     BIT(4)
  61#define ZYNQMP_DMA_BYTE_CNT_OVRFL       BIT(3)
  62#define ZYNQMP_DMA_DST_DSCR_DONE        BIT(2)
  63#define ZYNQMP_DMA_INV_APB              BIT(0)
  64
  65/* Control 0 register bit field definitions */
  66#define ZYNQMP_DMA_OVR_FETCH            BIT(7)
  67#define ZYNQMP_DMA_POINT_TYPE_SG        BIT(6)
  68#define ZYNQMP_DMA_RATE_CTRL_EN         BIT(3)
  69
  70/* Control 1 register bit field definitions */
  71#define ZYNQMP_DMA_SRC_ISSUE            GENMASK(4, 0)
  72
  73/* Data Attribute register bit field definitions */
  74#define ZYNQMP_DMA_ARBURST              GENMASK(27, 26)
  75#define ZYNQMP_DMA_ARCACHE              GENMASK(25, 22)
  76#define ZYNQMP_DMA_ARCACHE_OFST         22
  77#define ZYNQMP_DMA_ARQOS                GENMASK(21, 18)
  78#define ZYNQMP_DMA_ARQOS_OFST           18
  79#define ZYNQMP_DMA_ARLEN                GENMASK(17, 14)
  80#define ZYNQMP_DMA_ARLEN_OFST           14
  81#define ZYNQMP_DMA_AWBURST              GENMASK(13, 12)
  82#define ZYNQMP_DMA_AWCACHE              GENMASK(11, 8)
  83#define ZYNQMP_DMA_AWCACHE_OFST         8
  84#define ZYNQMP_DMA_AWQOS                GENMASK(7, 4)
  85#define ZYNQMP_DMA_AWQOS_OFST           4
  86#define ZYNQMP_DMA_AWLEN                GENMASK(3, 0)
  87#define ZYNQMP_DMA_AWLEN_OFST           0
  88
  89/* Descriptor Attribute register bit field definitions */
  90#define ZYNQMP_DMA_AXCOHRNT             BIT(8)
  91#define ZYNQMP_DMA_AXCACHE              GENMASK(7, 4)
  92#define ZYNQMP_DMA_AXCACHE_OFST         4
  93#define ZYNQMP_DMA_AXQOS                GENMASK(3, 0)
  94#define ZYNQMP_DMA_AXQOS_OFST           0
  95
  96/* Control register 2 bit field definitions */
  97#define ZYNQMP_DMA_ENABLE               BIT(0)
  98
  99/* Buffer Descriptor definitions */
 100#define ZYNQMP_DMA_DESC_CTRL_STOP       0x10
 101#define ZYNQMP_DMA_DESC_CTRL_COMP_INT   0x4
 102#define ZYNQMP_DMA_DESC_CTRL_SIZE_256   0x2
 103#define ZYNQMP_DMA_DESC_CTRL_COHRNT     0x1
 104
 105/* Interrupt Mask specific definitions */
 106#define ZYNQMP_DMA_INT_ERR      (ZYNQMP_DMA_AXI_RD_DATA | \
 107                                ZYNQMP_DMA_AXI_WR_DATA | \
 108                                ZYNQMP_DMA_AXI_RD_DST_DSCR | \
 109                                ZYNQMP_DMA_AXI_RD_SRC_DSCR | \
 110                                ZYNQMP_DMA_INV_APB)
 111#define ZYNQMP_DMA_INT_OVRFL    (ZYNQMP_DMA_BYTE_CNT_OVRFL | \
 112                                ZYNQMP_DMA_IRQ_SRC_ACCT_ERR | \
 113                                ZYNQMP_DMA_IRQ_DST_ACCT_ERR)
 114#define ZYNQMP_DMA_INT_DONE     (ZYNQMP_DMA_DONE | ZYNQMP_DMA_DST_DSCR_DONE)
 115#define ZYNQMP_DMA_INT_EN_DEFAULT_MASK  (ZYNQMP_DMA_INT_DONE | \
 116                                        ZYNQMP_DMA_INT_ERR | \
 117                                        ZYNQMP_DMA_INT_OVRFL | \
 118                                        ZYNQMP_DMA_DST_DSCR_DONE)
 119
 120/* Max number of descriptors per channel */
 121#define ZYNQMP_DMA_NUM_DESCS    32
 122
 123/* Max transfer size per descriptor */
 124#define ZYNQMP_DMA_MAX_TRANS_LEN        0x40000000
 125
 126/* Max burst lengths */
 127#define ZYNQMP_DMA_MAX_DST_BURST_LEN    32768U
 128#define ZYNQMP_DMA_MAX_SRC_BURST_LEN    32768U
 129
 130/* Reset values for data attributes */
 131#define ZYNQMP_DMA_AXCACHE_VAL          0xF
 132
 133#define ZYNQMP_DMA_SRC_ISSUE_RST_VAL    0x1F
 134
 135#define ZYNQMP_DMA_IDS_DEFAULT_MASK     0xFFF
 136
 137/* Bus width in bits */
 138#define ZYNQMP_DMA_BUS_WIDTH_64         64
 139#define ZYNQMP_DMA_BUS_WIDTH_128        128
 140
 141#define ZDMA_PM_TIMEOUT                 100
 142
 143#define ZYNQMP_DMA_DESC_SIZE(chan)      (chan->desc_size)
 144
 145#define to_chan(chan)           container_of(chan, struct zynqmp_dma_chan, \
 146                                             common)
 147#define tx_to_desc(tx)          container_of(tx, struct zynqmp_dma_desc_sw, \
 148                                             async_tx)
 149
 150/**
 151 * struct zynqmp_dma_desc_ll - Hw linked list descriptor
 152 * @addr: Buffer address
 153 * @size: Size of the buffer
 154 * @ctrl: Control word
 155 * @nxtdscraddr: Next descriptor base address
 156 * @rsvd: Reserved field and for Hw internal use.
 157 */
 158struct zynqmp_dma_desc_ll {
 159        u64 addr;
 160        u32 size;
 161        u32 ctrl;
 162        u64 nxtdscraddr;
 163        u64 rsvd;
 164};
 165
 166/**
 167 * struct zynqmp_dma_desc_sw - Per Transaction structure
 168 * @src: Source address for simple mode dma
 169 * @dst: Destination address for simple mode dma
 170 * @len: Transfer length for simple mode dma
 171 * @node: Node in the channel descriptor list
 172 * @tx_list: List head for the current transfer
 173 * @async_tx: Async transaction descriptor
 174 * @src_v: Virtual address of the src descriptor
 175 * @src_p: Physical address of the src descriptor
 176 * @dst_v: Virtual address of the dst descriptor
 177 * @dst_p: Physical address of the dst descriptor
 178 */
 179struct zynqmp_dma_desc_sw {
 180        u64 src;
 181        u64 dst;
 182        u32 len;
 183        struct list_head node;
 184        struct list_head tx_list;
 185        struct dma_async_tx_descriptor async_tx;
 186        struct zynqmp_dma_desc_ll *src_v;
 187        dma_addr_t src_p;
 188        struct zynqmp_dma_desc_ll *dst_v;
 189        dma_addr_t dst_p;
 190};
 191
 192/**
 193 * struct zynqmp_dma_chan - Driver specific DMA channel structure
 194 * @zdev: Driver specific device structure
 195 * @regs: Control registers offset
 196 * @lock: Descriptor operation lock
 197 * @pending_list: Descriptors waiting
 198 * @free_list: Descriptors free
 199 * @active_list: Descriptors active
 200 * @sw_desc_pool: SW descriptor pool
 201 * @done_list: Complete descriptors
 202 * @common: DMA common channel
 203 * @desc_pool_v: Statically allocated descriptor base
 204 * @desc_pool_p: Physical allocated descriptor base
 205 * @desc_free_cnt: Descriptor available count
 206 * @dev: The dma device
 207 * @irq: Channel IRQ
 208 * @is_dmacoherent: Tells whether dma operations are coherent or not
 209 * @tasklet: Cleanup work after irq
 210 * @idle : Channel status;
 211 * @desc_size: Size of the low level descriptor
 212 * @err: Channel has errors
 213 * @bus_width: Bus width
 214 * @src_burst_len: Source burst length
 215 * @dst_burst_len: Dest burst length
 216 */
 217struct zynqmp_dma_chan {
 218        struct zynqmp_dma_device *zdev;
 219        void __iomem *regs;
 220        spinlock_t lock;
 221        struct list_head pending_list;
 222        struct list_head free_list;
 223        struct list_head active_list;
 224        struct zynqmp_dma_desc_sw *sw_desc_pool;
 225        struct list_head done_list;
 226        struct dma_chan common;
 227        void *desc_pool_v;
 228        dma_addr_t desc_pool_p;
 229        u32 desc_free_cnt;
 230        struct device *dev;
 231        int irq;
 232        bool is_dmacoherent;
 233        struct tasklet_struct tasklet;
 234        bool idle;
 235        u32 desc_size;
 236        bool err;
 237        u32 bus_width;
 238        u32 src_burst_len;
 239        u32 dst_burst_len;
 240};
 241
 242/**
 243 * struct zynqmp_dma_device - DMA device structure
 244 * @dev: Device Structure
 245 * @common: DMA device structure
 246 * @chan: Driver specific DMA channel
 247 * @clk_main: Pointer to main clock
 248 * @clk_apb: Pointer to apb clock
 249 */
 250struct zynqmp_dma_device {
 251        struct device *dev;
 252        struct dma_device common;
 253        struct zynqmp_dma_chan *chan;
 254        struct clk *clk_main;
 255        struct clk *clk_apb;
 256};
 257
 258static inline void zynqmp_dma_writeq(struct zynqmp_dma_chan *chan, u32 reg,
 259                                     u64 value)
 260{
 261        lo_hi_writeq(value, chan->regs + reg);
 262}
 263
 264/**
 265 * zynqmp_dma_update_desc_to_ctrlr - Updates descriptor to the controller
 266 * @chan: ZynqMP DMA DMA channel pointer
 267 * @desc: Transaction descriptor pointer
 268 */
 269static void zynqmp_dma_update_desc_to_ctrlr(struct zynqmp_dma_chan *chan,
 270                                      struct zynqmp_dma_desc_sw *desc)
 271{
 272        dma_addr_t addr;
 273
 274        addr = desc->src_p;
 275        zynqmp_dma_writeq(chan, ZYNQMP_DMA_SRC_START_LSB, addr);
 276        addr = desc->dst_p;
 277        zynqmp_dma_writeq(chan, ZYNQMP_DMA_DST_START_LSB, addr);
 278}
 279
 280/**
 281 * zynqmp_dma_desc_config_eod - Mark the descriptor as end descriptor
 282 * @chan: ZynqMP DMA channel pointer
 283 * @desc: Hw descriptor pointer
 284 */
 285static void zynqmp_dma_desc_config_eod(struct zynqmp_dma_chan *chan,
 286                                       void *desc)
 287{
 288        struct zynqmp_dma_desc_ll *hw = (struct zynqmp_dma_desc_ll *)desc;
 289
 290        hw->ctrl |= ZYNQMP_DMA_DESC_CTRL_STOP;
 291        hw++;
 292        hw->ctrl |= ZYNQMP_DMA_DESC_CTRL_COMP_INT | ZYNQMP_DMA_DESC_CTRL_STOP;
 293}
 294
 295/**
 296 * zynqmp_dma_config_sg_ll_desc - Configure the linked list descriptor
 297 * @chan: ZynqMP DMA channel pointer
 298 * @sdesc: Hw descriptor pointer
 299 * @src: Source buffer address
 300 * @dst: Destination buffer address
 301 * @len: Transfer length
 302 * @prev: Previous hw descriptor pointer
 303 */
 304static void zynqmp_dma_config_sg_ll_desc(struct zynqmp_dma_chan *chan,
 305                                   struct zynqmp_dma_desc_ll *sdesc,
 306                                   dma_addr_t src, dma_addr_t dst, size_t len,
 307                                   struct zynqmp_dma_desc_ll *prev)
 308{
 309        struct zynqmp_dma_desc_ll *ddesc = sdesc + 1;
 310
 311        sdesc->size = ddesc->size = len;
 312        sdesc->addr = src;
 313        ddesc->addr = dst;
 314
 315        sdesc->ctrl = ddesc->ctrl = ZYNQMP_DMA_DESC_CTRL_SIZE_256;
 316        if (chan->is_dmacoherent) {
 317                sdesc->ctrl |= ZYNQMP_DMA_DESC_CTRL_COHRNT;
 318                ddesc->ctrl |= ZYNQMP_DMA_DESC_CTRL_COHRNT;
 319        }
 320
 321        if (prev) {
 322                dma_addr_t addr = chan->desc_pool_p +
 323                            ((uintptr_t)sdesc - (uintptr_t)chan->desc_pool_v);
 324                ddesc = prev + 1;
 325                prev->nxtdscraddr = addr;
 326                ddesc->nxtdscraddr = addr + ZYNQMP_DMA_DESC_SIZE(chan);
 327        }
 328}
 329
 330/**
 331 * zynqmp_dma_init - Initialize the channel
 332 * @chan: ZynqMP DMA channel pointer
 333 */
 334static void zynqmp_dma_init(struct zynqmp_dma_chan *chan)
 335{
 336        u32 val;
 337
 338        writel(ZYNQMP_DMA_IDS_DEFAULT_MASK, chan->regs + ZYNQMP_DMA_IDS);
 339        val = readl(chan->regs + ZYNQMP_DMA_ISR);
 340        writel(val, chan->regs + ZYNQMP_DMA_ISR);
 341
 342        if (chan->is_dmacoherent) {
 343                val = ZYNQMP_DMA_AXCOHRNT;
 344                val = (val & ~ZYNQMP_DMA_AXCACHE) |
 345                        (ZYNQMP_DMA_AXCACHE_VAL << ZYNQMP_DMA_AXCACHE_OFST);
 346                writel(val, chan->regs + ZYNQMP_DMA_DSCR_ATTR);
 347        }
 348
 349        val = readl(chan->regs + ZYNQMP_DMA_DATA_ATTR);
 350        if (chan->is_dmacoherent) {
 351                val = (val & ~ZYNQMP_DMA_ARCACHE) |
 352                        (ZYNQMP_DMA_AXCACHE_VAL << ZYNQMP_DMA_ARCACHE_OFST);
 353                val = (val & ~ZYNQMP_DMA_AWCACHE) |
 354                        (ZYNQMP_DMA_AXCACHE_VAL << ZYNQMP_DMA_AWCACHE_OFST);
 355        }
 356        writel(val, chan->regs + ZYNQMP_DMA_DATA_ATTR);
 357
 358        /* Clearing the interrupt account rgisters */
 359        val = readl(chan->regs + ZYNQMP_DMA_IRQ_SRC_ACCT);
 360        val = readl(chan->regs + ZYNQMP_DMA_IRQ_DST_ACCT);
 361
 362        chan->idle = true;
 363}
 364
 365/**
 366 * zynqmp_dma_tx_submit - Submit DMA transaction
 367 * @tx: Async transaction descriptor pointer
 368 *
 369 * Return: cookie value
 370 */
 371static dma_cookie_t zynqmp_dma_tx_submit(struct dma_async_tx_descriptor *tx)
 372{
 373        struct zynqmp_dma_chan *chan = to_chan(tx->chan);
 374        struct zynqmp_dma_desc_sw *desc, *new;
 375        dma_cookie_t cookie;
 376        unsigned long irqflags;
 377
 378        new = tx_to_desc(tx);
 379        spin_lock_irqsave(&chan->lock, irqflags);
 380        cookie = dma_cookie_assign(tx);
 381
 382        if (!list_empty(&chan->pending_list)) {
 383                desc = list_last_entry(&chan->pending_list,
 384                                     struct zynqmp_dma_desc_sw, node);
 385                if (!list_empty(&desc->tx_list))
 386                        desc = list_last_entry(&desc->tx_list,
 387                                               struct zynqmp_dma_desc_sw, node);
 388                desc->src_v->nxtdscraddr = new->src_p;
 389                desc->src_v->ctrl &= ~ZYNQMP_DMA_DESC_CTRL_STOP;
 390                desc->dst_v->nxtdscraddr = new->dst_p;
 391                desc->dst_v->ctrl &= ~ZYNQMP_DMA_DESC_CTRL_STOP;
 392        }
 393
 394        list_add_tail(&new->node, &chan->pending_list);
 395        spin_unlock_irqrestore(&chan->lock, irqflags);
 396
 397        return cookie;
 398}
 399
 400/**
 401 * zynqmp_dma_get_descriptor - Get the sw descriptor from the pool
 402 * @chan: ZynqMP DMA channel pointer
 403 *
 404 * Return: The sw descriptor
 405 */
 406static struct zynqmp_dma_desc_sw *
 407zynqmp_dma_get_descriptor(struct zynqmp_dma_chan *chan)
 408{
 409        struct zynqmp_dma_desc_sw *desc;
 410        unsigned long irqflags;
 411
 412        spin_lock_irqsave(&chan->lock, irqflags);
 413        desc = list_first_entry(&chan->free_list,
 414                                struct zynqmp_dma_desc_sw, node);
 415        list_del(&desc->node);
 416        spin_unlock_irqrestore(&chan->lock, irqflags);
 417
 418        INIT_LIST_HEAD(&desc->tx_list);
 419        /* Clear the src and dst descriptor memory */
 420        memset((void *)desc->src_v, 0, ZYNQMP_DMA_DESC_SIZE(chan));
 421        memset((void *)desc->dst_v, 0, ZYNQMP_DMA_DESC_SIZE(chan));
 422
 423        return desc;
 424}
 425
 426/**
 427 * zynqmp_dma_free_descriptor - Issue pending transactions
 428 * @chan: ZynqMP DMA channel pointer
 429 * @sdesc: Transaction descriptor pointer
 430 */
 431static void zynqmp_dma_free_descriptor(struct zynqmp_dma_chan *chan,
 432                                 struct zynqmp_dma_desc_sw *sdesc)
 433{
 434        struct zynqmp_dma_desc_sw *child, *next;
 435
 436        chan->desc_free_cnt++;
 437        list_del(&sdesc->node);
 438        list_add_tail(&sdesc->node, &chan->free_list);
 439        list_for_each_entry_safe(child, next, &sdesc->tx_list, node) {
 440                chan->desc_free_cnt++;
 441                list_move_tail(&child->node, &chan->free_list);
 442        }
 443}
 444
 445/**
 446 * zynqmp_dma_free_desc_list - Free descriptors list
 447 * @chan: ZynqMP DMA channel pointer
 448 * @list: List to parse and delete the descriptor
 449 */
 450static void zynqmp_dma_free_desc_list(struct zynqmp_dma_chan *chan,
 451                                      struct list_head *list)
 452{
 453        struct zynqmp_dma_desc_sw *desc, *next;
 454
 455        list_for_each_entry_safe(desc, next, list, node)
 456                zynqmp_dma_free_descriptor(chan, desc);
 457}
 458
 459/**
 460 * zynqmp_dma_alloc_chan_resources - Allocate channel resources
 461 * @dchan: DMA channel
 462 *
 463 * Return: Number of descriptors on success and failure value on error
 464 */
 465static int zynqmp_dma_alloc_chan_resources(struct dma_chan *dchan)
 466{
 467        struct zynqmp_dma_chan *chan = to_chan(dchan);
 468        struct zynqmp_dma_desc_sw *desc;
 469        int i, ret;
 470
 471        ret = pm_runtime_get_sync(chan->dev);
 472        if (ret < 0)
 473                return ret;
 474
 475        chan->sw_desc_pool = kcalloc(ZYNQMP_DMA_NUM_DESCS, sizeof(*desc),
 476                                     GFP_KERNEL);
 477        if (!chan->sw_desc_pool)
 478                return -ENOMEM;
 479
 480        chan->idle = true;
 481        chan->desc_free_cnt = ZYNQMP_DMA_NUM_DESCS;
 482
 483        INIT_LIST_HEAD(&chan->free_list);
 484
 485        for (i = 0; i < ZYNQMP_DMA_NUM_DESCS; i++) {
 486                desc = chan->sw_desc_pool + i;
 487                dma_async_tx_descriptor_init(&desc->async_tx, &chan->common);
 488                desc->async_tx.tx_submit = zynqmp_dma_tx_submit;
 489                list_add_tail(&desc->node, &chan->free_list);
 490        }
 491
 492        chan->desc_pool_v = dma_alloc_coherent(chan->dev,
 493                                               (2 * chan->desc_size * ZYNQMP_DMA_NUM_DESCS),
 494                                               &chan->desc_pool_p, GFP_KERNEL);
 495        if (!chan->desc_pool_v)
 496                return -ENOMEM;
 497
 498        for (i = 0; i < ZYNQMP_DMA_NUM_DESCS; i++) {
 499                desc = chan->sw_desc_pool + i;
 500                desc->src_v = (struct zynqmp_dma_desc_ll *) (chan->desc_pool_v +
 501                                        (i * ZYNQMP_DMA_DESC_SIZE(chan) * 2));
 502                desc->dst_v = (struct zynqmp_dma_desc_ll *) (desc->src_v + 1);
 503                desc->src_p = chan->desc_pool_p +
 504                                (i * ZYNQMP_DMA_DESC_SIZE(chan) * 2);
 505                desc->dst_p = desc->src_p + ZYNQMP_DMA_DESC_SIZE(chan);
 506        }
 507
 508        return ZYNQMP_DMA_NUM_DESCS;
 509}
 510
 511/**
 512 * zynqmp_dma_start - Start DMA channel
 513 * @chan: ZynqMP DMA channel pointer
 514 */
 515static void zynqmp_dma_start(struct zynqmp_dma_chan *chan)
 516{
 517        writel(ZYNQMP_DMA_INT_EN_DEFAULT_MASK, chan->regs + ZYNQMP_DMA_IER);
 518        writel(0, chan->regs + ZYNQMP_DMA_TOTAL_BYTE);
 519        chan->idle = false;
 520        writel(ZYNQMP_DMA_ENABLE, chan->regs + ZYNQMP_DMA_CTRL2);
 521}
 522
 523/**
 524 * zynqmp_dma_handle_ovfl_int - Process the overflow interrupt
 525 * @chan: ZynqMP DMA channel pointer
 526 * @status: Interrupt status value
 527 */
 528static void zynqmp_dma_handle_ovfl_int(struct zynqmp_dma_chan *chan, u32 status)
 529{
 530        if (status & ZYNQMP_DMA_BYTE_CNT_OVRFL)
 531                writel(0, chan->regs + ZYNQMP_DMA_TOTAL_BYTE);
 532        if (status & ZYNQMP_DMA_IRQ_DST_ACCT_ERR)
 533                readl(chan->regs + ZYNQMP_DMA_IRQ_DST_ACCT);
 534        if (status & ZYNQMP_DMA_IRQ_SRC_ACCT_ERR)
 535                readl(chan->regs + ZYNQMP_DMA_IRQ_SRC_ACCT);
 536}
 537
 538static void zynqmp_dma_config(struct zynqmp_dma_chan *chan)
 539{
 540        u32 val, burst_val;
 541
 542        val = readl(chan->regs + ZYNQMP_DMA_CTRL0);
 543        val |= ZYNQMP_DMA_POINT_TYPE_SG;
 544        writel(val, chan->regs + ZYNQMP_DMA_CTRL0);
 545
 546        val = readl(chan->regs + ZYNQMP_DMA_DATA_ATTR);
 547        burst_val = __ilog2_u32(chan->src_burst_len);
 548        val = (val & ~ZYNQMP_DMA_ARLEN) |
 549                ((burst_val << ZYNQMP_DMA_ARLEN_OFST) & ZYNQMP_DMA_ARLEN);
 550        burst_val = __ilog2_u32(chan->dst_burst_len);
 551        val = (val & ~ZYNQMP_DMA_AWLEN) |
 552                ((burst_val << ZYNQMP_DMA_AWLEN_OFST) & ZYNQMP_DMA_AWLEN);
 553        writel(val, chan->regs + ZYNQMP_DMA_DATA_ATTR);
 554}
 555
 556/**
 557 * zynqmp_dma_device_config - Zynqmp dma device configuration
 558 * @dchan: DMA channel
 559 * @config: DMA device config
 560 *
 561 * Return: 0 always
 562 */
 563static int zynqmp_dma_device_config(struct dma_chan *dchan,
 564                                    struct dma_slave_config *config)
 565{
 566        struct zynqmp_dma_chan *chan = to_chan(dchan);
 567
 568        chan->src_burst_len = clamp(config->src_maxburst, 1U,
 569                ZYNQMP_DMA_MAX_SRC_BURST_LEN);
 570        chan->dst_burst_len = clamp(config->dst_maxburst, 1U,
 571                ZYNQMP_DMA_MAX_DST_BURST_LEN);
 572
 573        return 0;
 574}
 575
 576/**
 577 * zynqmp_dma_start_transfer - Initiate the new transfer
 578 * @chan: ZynqMP DMA channel pointer
 579 */
 580static void zynqmp_dma_start_transfer(struct zynqmp_dma_chan *chan)
 581{
 582        struct zynqmp_dma_desc_sw *desc;
 583
 584        if (!chan->idle)
 585                return;
 586
 587        zynqmp_dma_config(chan);
 588
 589        desc = list_first_entry_or_null(&chan->pending_list,
 590                                        struct zynqmp_dma_desc_sw, node);
 591        if (!desc)
 592                return;
 593
 594        list_splice_tail_init(&chan->pending_list, &chan->active_list);
 595        zynqmp_dma_update_desc_to_ctrlr(chan, desc);
 596        zynqmp_dma_start(chan);
 597}
 598
 599
 600/**
 601 * zynqmp_dma_chan_desc_cleanup - Cleanup the completed descriptors
 602 * @chan: ZynqMP DMA channel
 603 */
 604static void zynqmp_dma_chan_desc_cleanup(struct zynqmp_dma_chan *chan)
 605{
 606        struct zynqmp_dma_desc_sw *desc, *next;
 607
 608        list_for_each_entry_safe(desc, next, &chan->done_list, node) {
 609                dma_async_tx_callback callback;
 610                void *callback_param;
 611
 612                callback = desc->async_tx.callback;
 613                callback_param = desc->async_tx.callback_param;
 614                if (callback) {
 615                        spin_unlock(&chan->lock);
 616                        callback(callback_param);
 617                        spin_lock(&chan->lock);
 618                }
 619
 620                /* Run any dependencies, then free the descriptor */
 621                zynqmp_dma_free_descriptor(chan, desc);
 622        }
 623}
 624
 625/**
 626 * zynqmp_dma_complete_descriptor - Mark the active descriptor as complete
 627 * @chan: ZynqMP DMA channel pointer
 628 */
 629static void zynqmp_dma_complete_descriptor(struct zynqmp_dma_chan *chan)
 630{
 631        struct zynqmp_dma_desc_sw *desc;
 632
 633        desc = list_first_entry_or_null(&chan->active_list,
 634                                        struct zynqmp_dma_desc_sw, node);
 635        if (!desc)
 636                return;
 637        list_del(&desc->node);
 638        dma_cookie_complete(&desc->async_tx);
 639        list_add_tail(&desc->node, &chan->done_list);
 640}
 641
 642/**
 643 * zynqmp_dma_issue_pending - Issue pending transactions
 644 * @dchan: DMA channel pointer
 645 */
 646static void zynqmp_dma_issue_pending(struct dma_chan *dchan)
 647{
 648        struct zynqmp_dma_chan *chan = to_chan(dchan);
 649        unsigned long irqflags;
 650
 651        spin_lock_irqsave(&chan->lock, irqflags);
 652        zynqmp_dma_start_transfer(chan);
 653        spin_unlock_irqrestore(&chan->lock, irqflags);
 654}
 655
 656/**
 657 * zynqmp_dma_free_descriptors - Free channel descriptors
 658 * @chan: ZynqMP DMA channel pointer
 659 */
 660static void zynqmp_dma_free_descriptors(struct zynqmp_dma_chan *chan)
 661{
 662        zynqmp_dma_free_desc_list(chan, &chan->active_list);
 663        zynqmp_dma_free_desc_list(chan, &chan->pending_list);
 664        zynqmp_dma_free_desc_list(chan, &chan->done_list);
 665}
 666
 667/**
 668 * zynqmp_dma_free_chan_resources - Free channel resources
 669 * @dchan: DMA channel pointer
 670 */
 671static void zynqmp_dma_free_chan_resources(struct dma_chan *dchan)
 672{
 673        struct zynqmp_dma_chan *chan = to_chan(dchan);
 674        unsigned long irqflags;
 675
 676        spin_lock_irqsave(&chan->lock, irqflags);
 677        zynqmp_dma_free_descriptors(chan);
 678        spin_unlock_irqrestore(&chan->lock, irqflags);
 679        dma_free_coherent(chan->dev,
 680                (2 * ZYNQMP_DMA_DESC_SIZE(chan) * ZYNQMP_DMA_NUM_DESCS),
 681                chan->desc_pool_v, chan->desc_pool_p);
 682        kfree(chan->sw_desc_pool);
 683        pm_runtime_mark_last_busy(chan->dev);
 684        pm_runtime_put_autosuspend(chan->dev);
 685}
 686
 687/**
 688 * zynqmp_dma_reset - Reset the channel
 689 * @chan: ZynqMP DMA channel pointer
 690 */
 691static void zynqmp_dma_reset(struct zynqmp_dma_chan *chan)
 692{
 693        writel(ZYNQMP_DMA_IDS_DEFAULT_MASK, chan->regs + ZYNQMP_DMA_IDS);
 694
 695        zynqmp_dma_complete_descriptor(chan);
 696        zynqmp_dma_chan_desc_cleanup(chan);
 697        zynqmp_dma_free_descriptors(chan);
 698        zynqmp_dma_init(chan);
 699}
 700
 701/**
 702 * zynqmp_dma_irq_handler - ZynqMP DMA Interrupt handler
 703 * @irq: IRQ number
 704 * @data: Pointer to the ZynqMP DMA channel structure
 705 *
 706 * Return: IRQ_HANDLED/IRQ_NONE
 707 */
 708static irqreturn_t zynqmp_dma_irq_handler(int irq, void *data)
 709{
 710        struct zynqmp_dma_chan *chan = (struct zynqmp_dma_chan *)data;
 711        u32 isr, imr, status;
 712        irqreturn_t ret = IRQ_NONE;
 713
 714        isr = readl(chan->regs + ZYNQMP_DMA_ISR);
 715        imr = readl(chan->regs + ZYNQMP_DMA_IMR);
 716        status = isr & ~imr;
 717
 718        writel(isr, chan->regs + ZYNQMP_DMA_ISR);
 719        if (status & ZYNQMP_DMA_INT_DONE) {
 720                tasklet_schedule(&chan->tasklet);
 721                ret = IRQ_HANDLED;
 722        }
 723
 724        if (status & ZYNQMP_DMA_DONE)
 725                chan->idle = true;
 726
 727        if (status & ZYNQMP_DMA_INT_ERR) {
 728                chan->err = true;
 729                tasklet_schedule(&chan->tasklet);
 730                dev_err(chan->dev, "Channel %p has errors\n", chan);
 731                ret = IRQ_HANDLED;
 732        }
 733
 734        if (status & ZYNQMP_DMA_INT_OVRFL) {
 735                zynqmp_dma_handle_ovfl_int(chan, status);
 736                dev_dbg(chan->dev, "Channel %p overflow interrupt\n", chan);
 737                ret = IRQ_HANDLED;
 738        }
 739
 740        return ret;
 741}
 742
 743/**
 744 * zynqmp_dma_do_tasklet - Schedule completion tasklet
 745 * @data: Pointer to the ZynqMP DMA channel structure
 746 */
 747static void zynqmp_dma_do_tasklet(unsigned long data)
 748{
 749        struct zynqmp_dma_chan *chan = (struct zynqmp_dma_chan *)data;
 750        u32 count;
 751        unsigned long irqflags;
 752
 753        spin_lock_irqsave(&chan->lock, irqflags);
 754
 755        if (chan->err) {
 756                zynqmp_dma_reset(chan);
 757                chan->err = false;
 758                goto unlock;
 759        }
 760
 761        count = readl(chan->regs + ZYNQMP_DMA_IRQ_DST_ACCT);
 762
 763        while (count) {
 764                zynqmp_dma_complete_descriptor(chan);
 765                zynqmp_dma_chan_desc_cleanup(chan);
 766                count--;
 767        }
 768
 769        if (chan->idle)
 770                zynqmp_dma_start_transfer(chan);
 771
 772unlock:
 773        spin_unlock_irqrestore(&chan->lock, irqflags);
 774}
 775
 776/**
 777 * zynqmp_dma_device_terminate_all - Aborts all transfers on a channel
 778 * @dchan: DMA channel pointer
 779 *
 780 * Return: Always '0'
 781 */
 782static int zynqmp_dma_device_terminate_all(struct dma_chan *dchan)
 783{
 784        struct zynqmp_dma_chan *chan = to_chan(dchan);
 785        unsigned long irqflags;
 786
 787        spin_lock_irqsave(&chan->lock, irqflags);
 788        writel(ZYNQMP_DMA_IDS_DEFAULT_MASK, chan->regs + ZYNQMP_DMA_IDS);
 789        zynqmp_dma_free_descriptors(chan);
 790        spin_unlock_irqrestore(&chan->lock, irqflags);
 791
 792        return 0;
 793}
 794
 795/**
 796 * zynqmp_dma_prep_memcpy - prepare descriptors for memcpy transaction
 797 * @dchan: DMA channel
 798 * @dma_dst: Destination buffer address
 799 * @dma_src: Source buffer address
 800 * @len: Transfer length
 801 * @flags: transfer ack flags
 802 *
 803 * Return: Async transaction descriptor on success and NULL on failure
 804 */
 805static struct dma_async_tx_descriptor *zynqmp_dma_prep_memcpy(
 806                                struct dma_chan *dchan, dma_addr_t dma_dst,
 807                                dma_addr_t dma_src, size_t len, ulong flags)
 808{
 809        struct zynqmp_dma_chan *chan;
 810        struct zynqmp_dma_desc_sw *new, *first = NULL;
 811        void *desc = NULL, *prev = NULL;
 812        size_t copy;
 813        u32 desc_cnt;
 814        unsigned long irqflags;
 815
 816        chan = to_chan(dchan);
 817
 818        desc_cnt = DIV_ROUND_UP(len, ZYNQMP_DMA_MAX_TRANS_LEN);
 819
 820        spin_lock_irqsave(&chan->lock, irqflags);
 821        if (desc_cnt > chan->desc_free_cnt) {
 822                spin_unlock_irqrestore(&chan->lock, irqflags);
 823                dev_dbg(chan->dev, "chan %p descs are not available\n", chan);
 824                return NULL;
 825        }
 826        chan->desc_free_cnt = chan->desc_free_cnt - desc_cnt;
 827        spin_unlock_irqrestore(&chan->lock, irqflags);
 828
 829        do {
 830                /* Allocate and populate the descriptor */
 831                new = zynqmp_dma_get_descriptor(chan);
 832
 833                copy = min_t(size_t, len, ZYNQMP_DMA_MAX_TRANS_LEN);
 834                desc = (struct zynqmp_dma_desc_ll *)new->src_v;
 835                zynqmp_dma_config_sg_ll_desc(chan, desc, dma_src,
 836                                             dma_dst, copy, prev);
 837                prev = desc;
 838                len -= copy;
 839                dma_src += copy;
 840                dma_dst += copy;
 841                if (!first)
 842                        first = new;
 843                else
 844                        list_add_tail(&new->node, &first->tx_list);
 845        } while (len);
 846
 847        zynqmp_dma_desc_config_eod(chan, desc);
 848        async_tx_ack(&first->async_tx);
 849        first->async_tx.flags = flags;
 850        return &first->async_tx;
 851}
 852
 853/**
 854 * zynqmp_dma_chan_remove - Channel remove function
 855 * @chan: ZynqMP DMA channel pointer
 856 */
 857static void zynqmp_dma_chan_remove(struct zynqmp_dma_chan *chan)
 858{
 859        if (!chan)
 860                return;
 861
 862        if (chan->irq)
 863                devm_free_irq(chan->zdev->dev, chan->irq, chan);
 864        tasklet_kill(&chan->tasklet);
 865        list_del(&chan->common.device_node);
 866}
 867
 868/**
 869 * zynqmp_dma_chan_probe - Per Channel Probing
 870 * @zdev: Driver specific device structure
 871 * @pdev: Pointer to the platform_device structure
 872 *
 873 * Return: '0' on success and failure value on error
 874 */
 875static int zynqmp_dma_chan_probe(struct zynqmp_dma_device *zdev,
 876                           struct platform_device *pdev)
 877{
 878        struct zynqmp_dma_chan *chan;
 879        struct resource *res;
 880        struct device_node *node = pdev->dev.of_node;
 881        int err;
 882
 883        chan = devm_kzalloc(zdev->dev, sizeof(*chan), GFP_KERNEL);
 884        if (!chan)
 885                return -ENOMEM;
 886        chan->dev = zdev->dev;
 887        chan->zdev = zdev;
 888
 889        res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
 890        chan->regs = devm_ioremap_resource(&pdev->dev, res);
 891        if (IS_ERR(chan->regs))
 892                return PTR_ERR(chan->regs);
 893
 894        chan->bus_width = ZYNQMP_DMA_BUS_WIDTH_64;
 895        chan->dst_burst_len = ZYNQMP_DMA_MAX_DST_BURST_LEN;
 896        chan->src_burst_len = ZYNQMP_DMA_MAX_SRC_BURST_LEN;
 897        err = of_property_read_u32(node, "xlnx,bus-width", &chan->bus_width);
 898        if (err < 0) {
 899                dev_err(&pdev->dev, "missing xlnx,bus-width property\n");
 900                return err;
 901        }
 902
 903        if (chan->bus_width != ZYNQMP_DMA_BUS_WIDTH_64 &&
 904            chan->bus_width != ZYNQMP_DMA_BUS_WIDTH_128) {
 905                dev_err(zdev->dev, "invalid bus-width value");
 906                return -EINVAL;
 907        }
 908
 909        chan->is_dmacoherent =  of_property_read_bool(node, "dma-coherent");
 910        zdev->chan = chan;
 911        tasklet_init(&chan->tasklet, zynqmp_dma_do_tasklet, (ulong)chan);
 912        spin_lock_init(&chan->lock);
 913        INIT_LIST_HEAD(&chan->active_list);
 914        INIT_LIST_HEAD(&chan->pending_list);
 915        INIT_LIST_HEAD(&chan->done_list);
 916        INIT_LIST_HEAD(&chan->free_list);
 917
 918        dma_cookie_init(&chan->common);
 919        chan->common.device = &zdev->common;
 920        list_add_tail(&chan->common.device_node, &zdev->common.channels);
 921
 922        zynqmp_dma_init(chan);
 923        chan->irq = platform_get_irq(pdev, 0);
 924        if (chan->irq < 0)
 925                return -ENXIO;
 926        err = devm_request_irq(&pdev->dev, chan->irq, zynqmp_dma_irq_handler, 0,
 927                               "zynqmp-dma", chan);
 928        if (err)
 929                return err;
 930
 931        chan->desc_size = sizeof(struct zynqmp_dma_desc_ll);
 932        chan->idle = true;
 933        return 0;
 934}
 935
 936/**
 937 * of_zynqmp_dma_xlate - Translation function
 938 * @dma_spec: Pointer to DMA specifier as found in the device tree
 939 * @ofdma: Pointer to DMA controller data
 940 *
 941 * Return: DMA channel pointer on success and NULL on error
 942 */
 943static struct dma_chan *of_zynqmp_dma_xlate(struct of_phandle_args *dma_spec,
 944                                            struct of_dma *ofdma)
 945{
 946        struct zynqmp_dma_device *zdev = ofdma->of_dma_data;
 947
 948        return dma_get_slave_channel(&zdev->chan->common);
 949}
 950
 951/**
 952 * zynqmp_dma_suspend - Suspend method for the driver
 953 * @dev:        Address of the device structure
 954 *
 955 * Put the driver into low power mode.
 956 * Return: 0 on success and failure value on error
 957 */
 958static int __maybe_unused zynqmp_dma_suspend(struct device *dev)
 959{
 960        if (!device_may_wakeup(dev))
 961                return pm_runtime_force_suspend(dev);
 962
 963        return 0;
 964}
 965
 966/**
 967 * zynqmp_dma_resume - Resume from suspend
 968 * @dev:        Address of the device structure
 969 *
 970 * Resume operation after suspend.
 971 * Return: 0 on success and failure value on error
 972 */
 973static int __maybe_unused zynqmp_dma_resume(struct device *dev)
 974{
 975        if (!device_may_wakeup(dev))
 976                return pm_runtime_force_resume(dev);
 977
 978        return 0;
 979}
 980
 981/**
 982 * zynqmp_dma_runtime_suspend - Runtime suspend method for the driver
 983 * @dev:        Address of the device structure
 984 *
 985 * Put the driver into low power mode.
 986 * Return: 0 always
 987 */
 988static int __maybe_unused zynqmp_dma_runtime_suspend(struct device *dev)
 989{
 990        struct zynqmp_dma_device *zdev = dev_get_drvdata(dev);
 991
 992        clk_disable_unprepare(zdev->clk_main);
 993        clk_disable_unprepare(zdev->clk_apb);
 994
 995        return 0;
 996}
 997
 998/**
 999 * zynqmp_dma_runtime_resume - Runtime suspend method for the driver
1000 * @dev:        Address of the device structure
1001 *
1002 * Put the driver into low power mode.
1003 * Return: 0 always
1004 */
1005static int __maybe_unused zynqmp_dma_runtime_resume(struct device *dev)
1006{
1007        struct zynqmp_dma_device *zdev = dev_get_drvdata(dev);
1008        int err;
1009
1010        err = clk_prepare_enable(zdev->clk_main);
1011        if (err) {
1012                dev_err(dev, "Unable to enable main clock.\n");
1013                return err;
1014        }
1015
1016        err = clk_prepare_enable(zdev->clk_apb);
1017        if (err) {
1018                dev_err(dev, "Unable to enable apb clock.\n");
1019                clk_disable_unprepare(zdev->clk_main);
1020                return err;
1021        }
1022
1023        return 0;
1024}
1025
1026static const struct dev_pm_ops zynqmp_dma_dev_pm_ops = {
1027        SET_SYSTEM_SLEEP_PM_OPS(zynqmp_dma_suspend, zynqmp_dma_resume)
1028        SET_RUNTIME_PM_OPS(zynqmp_dma_runtime_suspend,
1029                           zynqmp_dma_runtime_resume, NULL)
1030};
1031
1032/**
1033 * zynqmp_dma_probe - Driver probe function
1034 * @pdev: Pointer to the platform_device structure
1035 *
1036 * Return: '0' on success and failure value on error
1037 */
1038static int zynqmp_dma_probe(struct platform_device *pdev)
1039{
1040        struct zynqmp_dma_device *zdev;
1041        struct dma_device *p;
1042        int ret;
1043
1044        zdev = devm_kzalloc(&pdev->dev, sizeof(*zdev), GFP_KERNEL);
1045        if (!zdev)
1046                return -ENOMEM;
1047
1048        zdev->dev = &pdev->dev;
1049        INIT_LIST_HEAD(&zdev->common.channels);
1050
1051        dma_set_mask(&pdev->dev, DMA_BIT_MASK(44));
1052        dma_cap_set(DMA_MEMCPY, zdev->common.cap_mask);
1053
1054        p = &zdev->common;
1055        p->device_prep_dma_memcpy = zynqmp_dma_prep_memcpy;
1056        p->device_terminate_all = zynqmp_dma_device_terminate_all;
1057        p->device_issue_pending = zynqmp_dma_issue_pending;
1058        p->device_alloc_chan_resources = zynqmp_dma_alloc_chan_resources;
1059        p->device_free_chan_resources = zynqmp_dma_free_chan_resources;
1060        p->device_tx_status = dma_cookie_status;
1061        p->device_config = zynqmp_dma_device_config;
1062        p->dev = &pdev->dev;
1063
1064        zdev->clk_main = devm_clk_get(&pdev->dev, "clk_main");
1065        if (IS_ERR(zdev->clk_main)) {
1066                dev_err(&pdev->dev, "main clock not found.\n");
1067                return PTR_ERR(zdev->clk_main);
1068        }
1069
1070        zdev->clk_apb = devm_clk_get(&pdev->dev, "clk_apb");
1071        if (IS_ERR(zdev->clk_apb)) {
1072                dev_err(&pdev->dev, "apb clock not found.\n");
1073                return PTR_ERR(zdev->clk_apb);
1074        }
1075
1076        platform_set_drvdata(pdev, zdev);
1077        pm_runtime_set_autosuspend_delay(zdev->dev, ZDMA_PM_TIMEOUT);
1078        pm_runtime_use_autosuspend(zdev->dev);
1079        pm_runtime_enable(zdev->dev);
1080        pm_runtime_get_sync(zdev->dev);
1081        if (!pm_runtime_enabled(zdev->dev)) {
1082                ret = zynqmp_dma_runtime_resume(zdev->dev);
1083                if (ret)
1084                        return ret;
1085        }
1086
1087        ret = zynqmp_dma_chan_probe(zdev, pdev);
1088        if (ret) {
1089                dev_err(&pdev->dev, "Probing channel failed\n");
1090                goto err_disable_pm;
1091        }
1092
1093        p->dst_addr_widths = BIT(zdev->chan->bus_width / 8);
1094        p->src_addr_widths = BIT(zdev->chan->bus_width / 8);
1095
1096        dma_async_device_register(&zdev->common);
1097
1098        ret = of_dma_controller_register(pdev->dev.of_node,
1099                                         of_zynqmp_dma_xlate, zdev);
1100        if (ret) {
1101                dev_err(&pdev->dev, "Unable to register DMA to DT\n");
1102                dma_async_device_unregister(&zdev->common);
1103                goto free_chan_resources;
1104        }
1105
1106        pm_runtime_mark_last_busy(zdev->dev);
1107        pm_runtime_put_sync_autosuspend(zdev->dev);
1108
1109        dev_info(&pdev->dev, "ZynqMP DMA driver Probe success\n");
1110
1111        return 0;
1112
1113free_chan_resources:
1114        zynqmp_dma_chan_remove(zdev->chan);
1115err_disable_pm:
1116        if (!pm_runtime_enabled(zdev->dev))
1117                zynqmp_dma_runtime_suspend(zdev->dev);
1118        pm_runtime_disable(zdev->dev);
1119        return ret;
1120}
1121
1122/**
1123 * zynqmp_dma_remove - Driver remove function
1124 * @pdev: Pointer to the platform_device structure
1125 *
1126 * Return: Always '0'
1127 */
1128static int zynqmp_dma_remove(struct platform_device *pdev)
1129{
1130        struct zynqmp_dma_device *zdev = platform_get_drvdata(pdev);
1131
1132        of_dma_controller_free(pdev->dev.of_node);
1133        dma_async_device_unregister(&zdev->common);
1134
1135        zynqmp_dma_chan_remove(zdev->chan);
1136        pm_runtime_disable(zdev->dev);
1137        if (!pm_runtime_enabled(zdev->dev))
1138                zynqmp_dma_runtime_suspend(zdev->dev);
1139
1140        return 0;
1141}
1142
1143static const struct of_device_id zynqmp_dma_of_match[] = {
1144        { .compatible = "xlnx,zynqmp-dma-1.0", },
1145        {}
1146};
1147MODULE_DEVICE_TABLE(of, zynqmp_dma_of_match);
1148
1149static struct platform_driver zynqmp_dma_driver = {
1150        .driver = {
1151                .name = "xilinx-zynqmp-dma",
1152                .of_match_table = zynqmp_dma_of_match,
1153                .pm = &zynqmp_dma_dev_pm_ops,
1154        },
1155        .probe = zynqmp_dma_probe,
1156        .remove = zynqmp_dma_remove,
1157};
1158
1159module_platform_driver(zynqmp_dma_driver);
1160
1161MODULE_LICENSE("GPL");
1162MODULE_AUTHOR("Xilinx, Inc.");
1163MODULE_DESCRIPTION("Xilinx ZynqMP DMA driver");
1164