linux/drivers/dma/sprd-dma.c
<<
>>
Prefs
   1/*
   2 * Copyright (C) 2017 Spreadtrum Communications Inc.
   3 *
   4 * SPDX-License-Identifier: GPL-2.0
   5 */
   6
   7#include <linux/clk.h>
   8#include <linux/dma-mapping.h>
   9#include <linux/dma/sprd-dma.h>
  10#include <linux/errno.h>
  11#include <linux/init.h>
  12#include <linux/interrupt.h>
  13#include <linux/io.h>
  14#include <linux/kernel.h>
  15#include <linux/module.h>
  16#include <linux/of.h>
  17#include <linux/of_dma.h>
  18#include <linux/of_device.h>
  19#include <linux/pm_runtime.h>
  20#include <linux/slab.h>
  21
  22#include "virt-dma.h"
  23
  24#define SPRD_DMA_CHN_REG_OFFSET         0x1000
  25#define SPRD_DMA_CHN_REG_LENGTH         0x40
  26#define SPRD_DMA_MEMCPY_MIN_SIZE        64
  27
  28/* DMA global registers definition */
  29#define SPRD_DMA_GLB_PAUSE              0x0
  30#define SPRD_DMA_GLB_FRAG_WAIT          0x4
  31#define SPRD_DMA_GLB_REQ_PEND0_EN       0x8
  32#define SPRD_DMA_GLB_REQ_PEND1_EN       0xc
  33#define SPRD_DMA_GLB_INT_RAW_STS        0x10
  34#define SPRD_DMA_GLB_INT_MSK_STS        0x14
  35#define SPRD_DMA_GLB_REQ_STS            0x18
  36#define SPRD_DMA_GLB_CHN_EN_STS         0x1c
  37#define SPRD_DMA_GLB_DEBUG_STS          0x20
  38#define SPRD_DMA_GLB_ARB_SEL_STS        0x24
  39#define SPRD_DMA_GLB_REQ_UID(uid)       (0x4 * ((uid) - 1))
  40#define SPRD_DMA_GLB_REQ_UID_OFFSET     0x2000
  41
  42/* DMA channel registers definition */
  43#define SPRD_DMA_CHN_PAUSE              0x0
  44#define SPRD_DMA_CHN_REQ                0x4
  45#define SPRD_DMA_CHN_CFG                0x8
  46#define SPRD_DMA_CHN_INTC               0xc
  47#define SPRD_DMA_CHN_SRC_ADDR           0x10
  48#define SPRD_DMA_CHN_DES_ADDR           0x14
  49#define SPRD_DMA_CHN_FRG_LEN            0x18
  50#define SPRD_DMA_CHN_BLK_LEN            0x1c
  51#define SPRD_DMA_CHN_TRSC_LEN           0x20
  52#define SPRD_DMA_CHN_TRSF_STEP          0x24
  53#define SPRD_DMA_CHN_WARP_PTR           0x28
  54#define SPRD_DMA_CHN_WARP_TO            0x2c
  55#define SPRD_DMA_CHN_LLIST_PTR          0x30
  56#define SPRD_DMA_CHN_FRAG_STEP          0x34
  57#define SPRD_DMA_CHN_SRC_BLK_STEP       0x38
  58#define SPRD_DMA_CHN_DES_BLK_STEP       0x3c
  59
  60/* SPRD_DMA_CHN_INTC register definition */
  61#define SPRD_DMA_INT_MASK               GENMASK(4, 0)
  62#define SPRD_DMA_INT_CLR_OFFSET         24
  63#define SPRD_DMA_FRAG_INT_EN            BIT(0)
  64#define SPRD_DMA_BLK_INT_EN             BIT(1)
  65#define SPRD_DMA_TRANS_INT_EN           BIT(2)
  66#define SPRD_DMA_LIST_INT_EN            BIT(3)
  67#define SPRD_DMA_CFG_ERR_INT_EN         BIT(4)
  68
  69/* SPRD_DMA_CHN_CFG register definition */
  70#define SPRD_DMA_CHN_EN                 BIT(0)
  71#define SPRD_DMA_WAIT_BDONE_OFFSET      24
  72#define SPRD_DMA_DONOT_WAIT_BDONE       1
  73
  74/* SPRD_DMA_CHN_REQ register definition */
  75#define SPRD_DMA_REQ_EN                 BIT(0)
  76
  77/* SPRD_DMA_CHN_PAUSE register definition */
  78#define SPRD_DMA_PAUSE_EN               BIT(0)
  79#define SPRD_DMA_PAUSE_STS              BIT(2)
  80#define SPRD_DMA_PAUSE_CNT              0x2000
  81
  82/* DMA_CHN_WARP_* register definition */
  83#define SPRD_DMA_HIGH_ADDR_MASK         GENMASK(31, 28)
  84#define SPRD_DMA_LOW_ADDR_MASK          GENMASK(31, 0)
  85#define SPRD_DMA_HIGH_ADDR_OFFSET       4
  86
  87/* SPRD_DMA_CHN_INTC register definition */
  88#define SPRD_DMA_FRAG_INT_STS           BIT(16)
  89#define SPRD_DMA_BLK_INT_STS            BIT(17)
  90#define SPRD_DMA_TRSC_INT_STS           BIT(18)
  91#define SPRD_DMA_LIST_INT_STS           BIT(19)
  92#define SPRD_DMA_CFGERR_INT_STS         BIT(20)
  93#define SPRD_DMA_CHN_INT_STS                                    \
  94        (SPRD_DMA_FRAG_INT_STS | SPRD_DMA_BLK_INT_STS |         \
  95         SPRD_DMA_TRSC_INT_STS | SPRD_DMA_LIST_INT_STS |        \
  96         SPRD_DMA_CFGERR_INT_STS)
  97
  98/* SPRD_DMA_CHN_FRG_LEN register definition */
  99#define SPRD_DMA_SRC_DATAWIDTH_OFFSET   30
 100#define SPRD_DMA_DES_DATAWIDTH_OFFSET   28
 101#define SPRD_DMA_SWT_MODE_OFFSET        26
 102#define SPRD_DMA_REQ_MODE_OFFSET        24
 103#define SPRD_DMA_REQ_MODE_MASK          GENMASK(1, 0)
 104#define SPRD_DMA_FIX_SEL_OFFSET         21
 105#define SPRD_DMA_FIX_EN_OFFSET          20
 106#define SPRD_DMA_LLIST_END_OFFSET       19
 107#define SPRD_DMA_FRG_LEN_MASK           GENMASK(16, 0)
 108
 109/* SPRD_DMA_CHN_BLK_LEN register definition */
 110#define SPRD_DMA_BLK_LEN_MASK           GENMASK(16, 0)
 111
 112/* SPRD_DMA_CHN_TRSC_LEN register definition */
 113#define SPRD_DMA_TRSC_LEN_MASK          GENMASK(27, 0)
 114
 115/* SPRD_DMA_CHN_TRSF_STEP register definition */
 116#define SPRD_DMA_DEST_TRSF_STEP_OFFSET  16
 117#define SPRD_DMA_SRC_TRSF_STEP_OFFSET   0
 118#define SPRD_DMA_TRSF_STEP_MASK         GENMASK(15, 0)
 119
 120/* define the DMA transfer step type */
 121#define SPRD_DMA_NONE_STEP              0
 122#define SPRD_DMA_BYTE_STEP              1
 123#define SPRD_DMA_SHORT_STEP             2
 124#define SPRD_DMA_WORD_STEP              4
 125#define SPRD_DMA_DWORD_STEP             8
 126
 127#define SPRD_DMA_SOFTWARE_UID           0
 128
 129/* dma data width values */
 130enum sprd_dma_datawidth {
 131        SPRD_DMA_DATAWIDTH_1_BYTE,
 132        SPRD_DMA_DATAWIDTH_2_BYTES,
 133        SPRD_DMA_DATAWIDTH_4_BYTES,
 134        SPRD_DMA_DATAWIDTH_8_BYTES,
 135};
 136
 137/* dma channel hardware configuration */
 138struct sprd_dma_chn_hw {
 139        u32 pause;
 140        u32 req;
 141        u32 cfg;
 142        u32 intc;
 143        u32 src_addr;
 144        u32 des_addr;
 145        u32 frg_len;
 146        u32 blk_len;
 147        u32 trsc_len;
 148        u32 trsf_step;
 149        u32 wrap_ptr;
 150        u32 wrap_to;
 151        u32 llist_ptr;
 152        u32 frg_step;
 153        u32 src_blk_step;
 154        u32 des_blk_step;
 155};
 156
 157/* dma request description */
 158struct sprd_dma_desc {
 159        struct virt_dma_desc    vd;
 160        struct sprd_dma_chn_hw  chn_hw;
 161};
 162
 163/* dma channel description */
 164struct sprd_dma_chn {
 165        struct virt_dma_chan    vc;
 166        void __iomem            *chn_base;
 167        struct dma_slave_config slave_cfg;
 168        u32                     chn_num;
 169        u32                     dev_id;
 170        struct sprd_dma_desc    *cur_desc;
 171};
 172
 173/* SPRD dma device */
 174struct sprd_dma_dev {
 175        struct dma_device       dma_dev;
 176        void __iomem            *glb_base;
 177        struct clk              *clk;
 178        struct clk              *ashb_clk;
 179        int                     irq;
 180        u32                     total_chns;
 181        struct sprd_dma_chn     channels[0];
 182};
 183
 184static bool sprd_dma_filter_fn(struct dma_chan *chan, void *param);
 185static struct of_dma_filter_info sprd_dma_info = {
 186        .filter_fn = sprd_dma_filter_fn,
 187};
 188
 189static inline struct sprd_dma_chn *to_sprd_dma_chan(struct dma_chan *c)
 190{
 191        return container_of(c, struct sprd_dma_chn, vc.chan);
 192}
 193
 194static inline struct sprd_dma_dev *to_sprd_dma_dev(struct dma_chan *c)
 195{
 196        struct sprd_dma_chn *schan = to_sprd_dma_chan(c);
 197
 198        return container_of(schan, struct sprd_dma_dev, channels[c->chan_id]);
 199}
 200
 201static inline struct sprd_dma_desc *to_sprd_dma_desc(struct virt_dma_desc *vd)
 202{
 203        return container_of(vd, struct sprd_dma_desc, vd);
 204}
 205
 206static void sprd_dma_chn_update(struct sprd_dma_chn *schan, u32 reg,
 207                                u32 mask, u32 val)
 208{
 209        u32 orig = readl(schan->chn_base + reg);
 210        u32 tmp;
 211
 212        tmp = (orig & ~mask) | val;
 213        writel(tmp, schan->chn_base + reg);
 214}
 215
 216static int sprd_dma_enable(struct sprd_dma_dev *sdev)
 217{
 218        int ret;
 219
 220        ret = clk_prepare_enable(sdev->clk);
 221        if (ret)
 222                return ret;
 223
 224        /*
 225         * The ashb_clk is optional and only for AGCP DMA controller, so we
 226         * need add one condition to check if the ashb_clk need enable.
 227         */
 228        if (!IS_ERR(sdev->ashb_clk))
 229                ret = clk_prepare_enable(sdev->ashb_clk);
 230
 231        return ret;
 232}
 233
 234static void sprd_dma_disable(struct sprd_dma_dev *sdev)
 235{
 236        clk_disable_unprepare(sdev->clk);
 237
 238        /*
 239         * Need to check if we need disable the optional ashb_clk for AGCP DMA.
 240         */
 241        if (!IS_ERR(sdev->ashb_clk))
 242                clk_disable_unprepare(sdev->ashb_clk);
 243}
 244
 245static void sprd_dma_set_uid(struct sprd_dma_chn *schan)
 246{
 247        struct sprd_dma_dev *sdev = to_sprd_dma_dev(&schan->vc.chan);
 248        u32 dev_id = schan->dev_id;
 249
 250        if (dev_id != SPRD_DMA_SOFTWARE_UID) {
 251                u32 uid_offset = SPRD_DMA_GLB_REQ_UID_OFFSET +
 252                                 SPRD_DMA_GLB_REQ_UID(dev_id);
 253
 254                writel(schan->chn_num + 1, sdev->glb_base + uid_offset);
 255        }
 256}
 257
 258static void sprd_dma_unset_uid(struct sprd_dma_chn *schan)
 259{
 260        struct sprd_dma_dev *sdev = to_sprd_dma_dev(&schan->vc.chan);
 261        u32 dev_id = schan->dev_id;
 262
 263        if (dev_id != SPRD_DMA_SOFTWARE_UID) {
 264                u32 uid_offset = SPRD_DMA_GLB_REQ_UID_OFFSET +
 265                                 SPRD_DMA_GLB_REQ_UID(dev_id);
 266
 267                writel(0, sdev->glb_base + uid_offset);
 268        }
 269}
 270
 271static void sprd_dma_clear_int(struct sprd_dma_chn *schan)
 272{
 273        sprd_dma_chn_update(schan, SPRD_DMA_CHN_INTC,
 274                            SPRD_DMA_INT_MASK << SPRD_DMA_INT_CLR_OFFSET,
 275                            SPRD_DMA_INT_MASK << SPRD_DMA_INT_CLR_OFFSET);
 276}
 277
 278static void sprd_dma_enable_chn(struct sprd_dma_chn *schan)
 279{
 280        sprd_dma_chn_update(schan, SPRD_DMA_CHN_CFG, SPRD_DMA_CHN_EN,
 281                            SPRD_DMA_CHN_EN);
 282}
 283
 284static void sprd_dma_disable_chn(struct sprd_dma_chn *schan)
 285{
 286        sprd_dma_chn_update(schan, SPRD_DMA_CHN_CFG, SPRD_DMA_CHN_EN, 0);
 287}
 288
 289static void sprd_dma_soft_request(struct sprd_dma_chn *schan)
 290{
 291        sprd_dma_chn_update(schan, SPRD_DMA_CHN_REQ, SPRD_DMA_REQ_EN,
 292                            SPRD_DMA_REQ_EN);
 293}
 294
 295static void sprd_dma_pause_resume(struct sprd_dma_chn *schan, bool enable)
 296{
 297        struct sprd_dma_dev *sdev = to_sprd_dma_dev(&schan->vc.chan);
 298        u32 pause, timeout = SPRD_DMA_PAUSE_CNT;
 299
 300        if (enable) {
 301                sprd_dma_chn_update(schan, SPRD_DMA_CHN_PAUSE,
 302                                    SPRD_DMA_PAUSE_EN, SPRD_DMA_PAUSE_EN);
 303
 304                do {
 305                        pause = readl(schan->chn_base + SPRD_DMA_CHN_PAUSE);
 306                        if (pause & SPRD_DMA_PAUSE_STS)
 307                                break;
 308
 309                        cpu_relax();
 310                } while (--timeout > 0);
 311
 312                if (!timeout)
 313                        dev_warn(sdev->dma_dev.dev,
 314                                 "pause dma controller timeout\n");
 315        } else {
 316                sprd_dma_chn_update(schan, SPRD_DMA_CHN_PAUSE,
 317                                    SPRD_DMA_PAUSE_EN, 0);
 318        }
 319}
 320
 321static void sprd_dma_stop_and_disable(struct sprd_dma_chn *schan)
 322{
 323        u32 cfg = readl(schan->chn_base + SPRD_DMA_CHN_CFG);
 324
 325        if (!(cfg & SPRD_DMA_CHN_EN))
 326                return;
 327
 328        sprd_dma_pause_resume(schan, true);
 329        sprd_dma_disable_chn(schan);
 330}
 331
 332static unsigned long sprd_dma_get_dst_addr(struct sprd_dma_chn *schan)
 333{
 334        unsigned long addr, addr_high;
 335
 336        addr = readl(schan->chn_base + SPRD_DMA_CHN_DES_ADDR);
 337        addr_high = readl(schan->chn_base + SPRD_DMA_CHN_WARP_TO) &
 338                    SPRD_DMA_HIGH_ADDR_MASK;
 339
 340        return addr | (addr_high << SPRD_DMA_HIGH_ADDR_OFFSET);
 341}
 342
 343static enum sprd_dma_int_type sprd_dma_get_int_type(struct sprd_dma_chn *schan)
 344{
 345        struct sprd_dma_dev *sdev = to_sprd_dma_dev(&schan->vc.chan);
 346        u32 intc_sts = readl(schan->chn_base + SPRD_DMA_CHN_INTC) &
 347                       SPRD_DMA_CHN_INT_STS;
 348
 349        switch (intc_sts) {
 350        case SPRD_DMA_CFGERR_INT_STS:
 351                return SPRD_DMA_CFGERR_INT;
 352
 353        case SPRD_DMA_LIST_INT_STS:
 354                return SPRD_DMA_LIST_INT;
 355
 356        case SPRD_DMA_TRSC_INT_STS:
 357                return SPRD_DMA_TRANS_INT;
 358
 359        case SPRD_DMA_BLK_INT_STS:
 360                return SPRD_DMA_BLK_INT;
 361
 362        case SPRD_DMA_FRAG_INT_STS:
 363                return SPRD_DMA_FRAG_INT;
 364
 365        default:
 366                dev_warn(sdev->dma_dev.dev, "incorrect dma interrupt type\n");
 367                return SPRD_DMA_NO_INT;
 368        }
 369}
 370
 371static enum sprd_dma_req_mode sprd_dma_get_req_type(struct sprd_dma_chn *schan)
 372{
 373        u32 frag_reg = readl(schan->chn_base + SPRD_DMA_CHN_FRG_LEN);
 374
 375        return (frag_reg >> SPRD_DMA_REQ_MODE_OFFSET) & SPRD_DMA_REQ_MODE_MASK;
 376}
 377
 378static void sprd_dma_set_chn_config(struct sprd_dma_chn *schan,
 379                                    struct sprd_dma_desc *sdesc)
 380{
 381        struct sprd_dma_chn_hw *cfg = &sdesc->chn_hw;
 382
 383        writel(cfg->pause, schan->chn_base + SPRD_DMA_CHN_PAUSE);
 384        writel(cfg->cfg, schan->chn_base + SPRD_DMA_CHN_CFG);
 385        writel(cfg->intc, schan->chn_base + SPRD_DMA_CHN_INTC);
 386        writel(cfg->src_addr, schan->chn_base + SPRD_DMA_CHN_SRC_ADDR);
 387        writel(cfg->des_addr, schan->chn_base + SPRD_DMA_CHN_DES_ADDR);
 388        writel(cfg->frg_len, schan->chn_base + SPRD_DMA_CHN_FRG_LEN);
 389        writel(cfg->blk_len, schan->chn_base + SPRD_DMA_CHN_BLK_LEN);
 390        writel(cfg->trsc_len, schan->chn_base + SPRD_DMA_CHN_TRSC_LEN);
 391        writel(cfg->trsf_step, schan->chn_base + SPRD_DMA_CHN_TRSF_STEP);
 392        writel(cfg->wrap_ptr, schan->chn_base + SPRD_DMA_CHN_WARP_PTR);
 393        writel(cfg->wrap_to, schan->chn_base + SPRD_DMA_CHN_WARP_TO);
 394        writel(cfg->llist_ptr, schan->chn_base + SPRD_DMA_CHN_LLIST_PTR);
 395        writel(cfg->frg_step, schan->chn_base + SPRD_DMA_CHN_FRAG_STEP);
 396        writel(cfg->src_blk_step, schan->chn_base + SPRD_DMA_CHN_SRC_BLK_STEP);
 397        writel(cfg->des_blk_step, schan->chn_base + SPRD_DMA_CHN_DES_BLK_STEP);
 398        writel(cfg->req, schan->chn_base + SPRD_DMA_CHN_REQ);
 399}
 400
 401static void sprd_dma_start(struct sprd_dma_chn *schan)
 402{
 403        struct virt_dma_desc *vd = vchan_next_desc(&schan->vc);
 404
 405        if (!vd)
 406                return;
 407
 408        list_del(&vd->node);
 409        schan->cur_desc = to_sprd_dma_desc(vd);
 410
 411        /*
 412         * Copy the DMA configuration from DMA descriptor to this hardware
 413         * channel.
 414         */
 415        sprd_dma_set_chn_config(schan, schan->cur_desc);
 416        sprd_dma_set_uid(schan);
 417        sprd_dma_enable_chn(schan);
 418
 419        if (schan->dev_id == SPRD_DMA_SOFTWARE_UID)
 420                sprd_dma_soft_request(schan);
 421}
 422
 423static void sprd_dma_stop(struct sprd_dma_chn *schan)
 424{
 425        sprd_dma_stop_and_disable(schan);
 426        sprd_dma_unset_uid(schan);
 427        sprd_dma_clear_int(schan);
 428}
 429
 430static bool sprd_dma_check_trans_done(struct sprd_dma_desc *sdesc,
 431                                      enum sprd_dma_int_type int_type,
 432                                      enum sprd_dma_req_mode req_mode)
 433{
 434        if (int_type == SPRD_DMA_NO_INT)
 435                return false;
 436
 437        if (int_type >= req_mode + 1)
 438                return true;
 439        else
 440                return false;
 441}
 442
 443static irqreturn_t dma_irq_handle(int irq, void *dev_id)
 444{
 445        struct sprd_dma_dev *sdev = (struct sprd_dma_dev *)dev_id;
 446        u32 irq_status = readl(sdev->glb_base + SPRD_DMA_GLB_INT_MSK_STS);
 447        struct sprd_dma_chn *schan;
 448        struct sprd_dma_desc *sdesc;
 449        enum sprd_dma_req_mode req_type;
 450        enum sprd_dma_int_type int_type;
 451        bool trans_done = false;
 452        u32 i;
 453
 454        while (irq_status) {
 455                i = __ffs(irq_status);
 456                irq_status &= (irq_status - 1);
 457                schan = &sdev->channels[i];
 458
 459                spin_lock(&schan->vc.lock);
 460                int_type = sprd_dma_get_int_type(schan);
 461                req_type = sprd_dma_get_req_type(schan);
 462                sprd_dma_clear_int(schan);
 463
 464                sdesc = schan->cur_desc;
 465
 466                /* Check if the dma request descriptor is done. */
 467                trans_done = sprd_dma_check_trans_done(sdesc, int_type,
 468                                                       req_type);
 469                if (trans_done == true) {
 470                        vchan_cookie_complete(&sdesc->vd);
 471                        schan->cur_desc = NULL;
 472                        sprd_dma_start(schan);
 473                }
 474                spin_unlock(&schan->vc.lock);
 475        }
 476
 477        return IRQ_HANDLED;
 478}
 479
 480static int sprd_dma_alloc_chan_resources(struct dma_chan *chan)
 481{
 482        struct sprd_dma_chn *schan = to_sprd_dma_chan(chan);
 483        int ret;
 484
 485        ret = pm_runtime_get_sync(chan->device->dev);
 486        if (ret < 0)
 487                return ret;
 488
 489        schan->dev_id = SPRD_DMA_SOFTWARE_UID;
 490        return 0;
 491}
 492
 493static void sprd_dma_free_chan_resources(struct dma_chan *chan)
 494{
 495        struct sprd_dma_chn *schan = to_sprd_dma_chan(chan);
 496        unsigned long flags;
 497
 498        spin_lock_irqsave(&schan->vc.lock, flags);
 499        sprd_dma_stop(schan);
 500        spin_unlock_irqrestore(&schan->vc.lock, flags);
 501
 502        vchan_free_chan_resources(&schan->vc);
 503        pm_runtime_put(chan->device->dev);
 504}
 505
 506static enum dma_status sprd_dma_tx_status(struct dma_chan *chan,
 507                                          dma_cookie_t cookie,
 508                                          struct dma_tx_state *txstate)
 509{
 510        struct sprd_dma_chn *schan = to_sprd_dma_chan(chan);
 511        struct virt_dma_desc *vd;
 512        unsigned long flags;
 513        enum dma_status ret;
 514        u32 pos;
 515
 516        ret = dma_cookie_status(chan, cookie, txstate);
 517        if (ret == DMA_COMPLETE || !txstate)
 518                return ret;
 519
 520        spin_lock_irqsave(&schan->vc.lock, flags);
 521        vd = vchan_find_desc(&schan->vc, cookie);
 522        if (vd) {
 523                struct sprd_dma_desc *sdesc = to_sprd_dma_desc(vd);
 524                struct sprd_dma_chn_hw *hw = &sdesc->chn_hw;
 525
 526                if (hw->trsc_len > 0)
 527                        pos = hw->trsc_len;
 528                else if (hw->blk_len > 0)
 529                        pos = hw->blk_len;
 530                else if (hw->frg_len > 0)
 531                        pos = hw->frg_len;
 532                else
 533                        pos = 0;
 534        } else if (schan->cur_desc && schan->cur_desc->vd.tx.cookie == cookie) {
 535                pos = sprd_dma_get_dst_addr(schan);
 536        } else {
 537                pos = 0;
 538        }
 539        spin_unlock_irqrestore(&schan->vc.lock, flags);
 540
 541        dma_set_residue(txstate, pos);
 542        return ret;
 543}
 544
 545static void sprd_dma_issue_pending(struct dma_chan *chan)
 546{
 547        struct sprd_dma_chn *schan = to_sprd_dma_chan(chan);
 548        unsigned long flags;
 549
 550        spin_lock_irqsave(&schan->vc.lock, flags);
 551        if (vchan_issue_pending(&schan->vc) && !schan->cur_desc)
 552                sprd_dma_start(schan);
 553        spin_unlock_irqrestore(&schan->vc.lock, flags);
 554}
 555
 556static int sprd_dma_get_datawidth(enum dma_slave_buswidth buswidth)
 557{
 558        switch (buswidth) {
 559        case DMA_SLAVE_BUSWIDTH_1_BYTE:
 560        case DMA_SLAVE_BUSWIDTH_2_BYTES:
 561        case DMA_SLAVE_BUSWIDTH_4_BYTES:
 562        case DMA_SLAVE_BUSWIDTH_8_BYTES:
 563                return ffs(buswidth) - 1;
 564
 565        default:
 566                return -EINVAL;
 567        }
 568}
 569
 570static int sprd_dma_get_step(enum dma_slave_buswidth buswidth)
 571{
 572        switch (buswidth) {
 573        case DMA_SLAVE_BUSWIDTH_1_BYTE:
 574        case DMA_SLAVE_BUSWIDTH_2_BYTES:
 575        case DMA_SLAVE_BUSWIDTH_4_BYTES:
 576        case DMA_SLAVE_BUSWIDTH_8_BYTES:
 577                return buswidth;
 578
 579        default:
 580                return -EINVAL;
 581        }
 582}
 583
 584static int sprd_dma_fill_desc(struct dma_chan *chan,
 585                              struct sprd_dma_desc *sdesc,
 586                              dma_addr_t src, dma_addr_t dst, u32 len,
 587                              enum dma_transfer_direction dir,
 588                              unsigned long flags,
 589                              struct dma_slave_config *slave_cfg)
 590{
 591        struct sprd_dma_dev *sdev = to_sprd_dma_dev(chan);
 592        struct sprd_dma_chn *schan = to_sprd_dma_chan(chan);
 593        struct sprd_dma_chn_hw *hw = &sdesc->chn_hw;
 594        u32 req_mode = (flags >> SPRD_DMA_REQ_SHIFT) & SPRD_DMA_REQ_MODE_MASK;
 595        u32 int_mode = flags & SPRD_DMA_INT_MASK;
 596        int src_datawidth, dst_datawidth, src_step, dst_step;
 597        u32 temp, fix_mode = 0, fix_en = 0;
 598
 599        if (dir == DMA_MEM_TO_DEV) {
 600                src_step = sprd_dma_get_step(slave_cfg->src_addr_width);
 601                if (src_step < 0) {
 602                        dev_err(sdev->dma_dev.dev, "invalid source step\n");
 603                        return src_step;
 604                }
 605                dst_step = SPRD_DMA_NONE_STEP;
 606        } else {
 607                dst_step = sprd_dma_get_step(slave_cfg->dst_addr_width);
 608                if (dst_step < 0) {
 609                        dev_err(sdev->dma_dev.dev, "invalid destination step\n");
 610                        return dst_step;
 611                }
 612                src_step = SPRD_DMA_NONE_STEP;
 613        }
 614
 615        src_datawidth = sprd_dma_get_datawidth(slave_cfg->src_addr_width);
 616        if (src_datawidth < 0) {
 617                dev_err(sdev->dma_dev.dev, "invalid source datawidth\n");
 618                return src_datawidth;
 619        }
 620
 621        dst_datawidth = sprd_dma_get_datawidth(slave_cfg->dst_addr_width);
 622        if (dst_datawidth < 0) {
 623                dev_err(sdev->dma_dev.dev, "invalid destination datawidth\n");
 624                return dst_datawidth;
 625        }
 626
 627        if (slave_cfg->slave_id)
 628                schan->dev_id = slave_cfg->slave_id;
 629
 630        hw->cfg = SPRD_DMA_DONOT_WAIT_BDONE << SPRD_DMA_WAIT_BDONE_OFFSET;
 631
 632        /*
 633         * wrap_ptr and wrap_to will save the high 4 bits source address and
 634         * destination address.
 635         */
 636        hw->wrap_ptr = (src >> SPRD_DMA_HIGH_ADDR_OFFSET) & SPRD_DMA_HIGH_ADDR_MASK;
 637        hw->wrap_to = (dst >> SPRD_DMA_HIGH_ADDR_OFFSET) & SPRD_DMA_HIGH_ADDR_MASK;
 638        hw->src_addr = src & SPRD_DMA_LOW_ADDR_MASK;
 639        hw->des_addr = dst & SPRD_DMA_LOW_ADDR_MASK;
 640
 641        /*
 642         * If the src step and dst step both are 0 or both are not 0, that means
 643         * we can not enable the fix mode. If one is 0 and another one is not,
 644         * we can enable the fix mode.
 645         */
 646        if ((src_step != 0 && dst_step != 0) || (src_step | dst_step) == 0) {
 647                fix_en = 0;
 648        } else {
 649                fix_en = 1;
 650                if (src_step)
 651                        fix_mode = 1;
 652                else
 653                        fix_mode = 0;
 654        }
 655
 656        hw->intc = int_mode | SPRD_DMA_CFG_ERR_INT_EN;
 657
 658        temp = src_datawidth << SPRD_DMA_SRC_DATAWIDTH_OFFSET;
 659        temp |= dst_datawidth << SPRD_DMA_DES_DATAWIDTH_OFFSET;
 660        temp |= req_mode << SPRD_DMA_REQ_MODE_OFFSET;
 661        temp |= fix_mode << SPRD_DMA_FIX_SEL_OFFSET;
 662        temp |= fix_en << SPRD_DMA_FIX_EN_OFFSET;
 663        temp |= slave_cfg->src_maxburst & SPRD_DMA_FRG_LEN_MASK;
 664        hw->frg_len = temp;
 665
 666        hw->blk_len = len & SPRD_DMA_BLK_LEN_MASK;
 667        hw->trsc_len = len & SPRD_DMA_TRSC_LEN_MASK;
 668
 669        temp = (dst_step & SPRD_DMA_TRSF_STEP_MASK) << SPRD_DMA_DEST_TRSF_STEP_OFFSET;
 670        temp |= (src_step & SPRD_DMA_TRSF_STEP_MASK) << SPRD_DMA_SRC_TRSF_STEP_OFFSET;
 671        hw->trsf_step = temp;
 672
 673        hw->frg_step = 0;
 674        hw->src_blk_step = 0;
 675        hw->des_blk_step = 0;
 676        return 0;
 677}
 678
 679static struct dma_async_tx_descriptor *
 680sprd_dma_prep_dma_memcpy(struct dma_chan *chan, dma_addr_t dest, dma_addr_t src,
 681                         size_t len, unsigned long flags)
 682{
 683        struct sprd_dma_chn *schan = to_sprd_dma_chan(chan);
 684        struct sprd_dma_desc *sdesc;
 685        struct sprd_dma_chn_hw *hw;
 686        enum sprd_dma_datawidth datawidth;
 687        u32 step, temp;
 688
 689        sdesc = kzalloc(sizeof(*sdesc), GFP_NOWAIT);
 690        if (!sdesc)
 691                return NULL;
 692
 693        hw = &sdesc->chn_hw;
 694
 695        hw->cfg = SPRD_DMA_DONOT_WAIT_BDONE << SPRD_DMA_WAIT_BDONE_OFFSET;
 696        hw->intc = SPRD_DMA_TRANS_INT | SPRD_DMA_CFG_ERR_INT_EN;
 697        hw->src_addr = src & SPRD_DMA_LOW_ADDR_MASK;
 698        hw->des_addr = dest & SPRD_DMA_LOW_ADDR_MASK;
 699        hw->wrap_ptr = (src >> SPRD_DMA_HIGH_ADDR_OFFSET) &
 700                SPRD_DMA_HIGH_ADDR_MASK;
 701        hw->wrap_to = (dest >> SPRD_DMA_HIGH_ADDR_OFFSET) &
 702                SPRD_DMA_HIGH_ADDR_MASK;
 703
 704        if (IS_ALIGNED(len, 8)) {
 705                datawidth = SPRD_DMA_DATAWIDTH_8_BYTES;
 706                step = SPRD_DMA_DWORD_STEP;
 707        } else if (IS_ALIGNED(len, 4)) {
 708                datawidth = SPRD_DMA_DATAWIDTH_4_BYTES;
 709                step = SPRD_DMA_WORD_STEP;
 710        } else if (IS_ALIGNED(len, 2)) {
 711                datawidth = SPRD_DMA_DATAWIDTH_2_BYTES;
 712                step = SPRD_DMA_SHORT_STEP;
 713        } else {
 714                datawidth = SPRD_DMA_DATAWIDTH_1_BYTE;
 715                step = SPRD_DMA_BYTE_STEP;
 716        }
 717
 718        temp = datawidth << SPRD_DMA_SRC_DATAWIDTH_OFFSET;
 719        temp |= datawidth << SPRD_DMA_DES_DATAWIDTH_OFFSET;
 720        temp |= SPRD_DMA_TRANS_REQ << SPRD_DMA_REQ_MODE_OFFSET;
 721        temp |= len & SPRD_DMA_FRG_LEN_MASK;
 722        hw->frg_len = temp;
 723
 724        hw->blk_len = len & SPRD_DMA_BLK_LEN_MASK;
 725        hw->trsc_len = len & SPRD_DMA_TRSC_LEN_MASK;
 726
 727        temp = (step & SPRD_DMA_TRSF_STEP_MASK) << SPRD_DMA_DEST_TRSF_STEP_OFFSET;
 728        temp |= (step & SPRD_DMA_TRSF_STEP_MASK) << SPRD_DMA_SRC_TRSF_STEP_OFFSET;
 729        hw->trsf_step = temp;
 730
 731        return vchan_tx_prep(&schan->vc, &sdesc->vd, flags);
 732}
 733
 734static struct dma_async_tx_descriptor *
 735sprd_dma_prep_slave_sg(struct dma_chan *chan, struct scatterlist *sgl,
 736                       unsigned int sglen, enum dma_transfer_direction dir,
 737                       unsigned long flags, void *context)
 738{
 739        struct sprd_dma_chn *schan = to_sprd_dma_chan(chan);
 740        struct dma_slave_config *slave_cfg = &schan->slave_cfg;
 741        dma_addr_t src = 0, dst = 0;
 742        struct sprd_dma_desc *sdesc;
 743        struct scatterlist *sg;
 744        u32 len = 0;
 745        int ret, i;
 746
 747        /* TODO: now we only support one sg for each DMA configuration. */
 748        if (!is_slave_direction(dir) || sglen > 1)
 749                return NULL;
 750
 751        sdesc = kzalloc(sizeof(*sdesc), GFP_NOWAIT);
 752        if (!sdesc)
 753                return NULL;
 754
 755        for_each_sg(sgl, sg, sglen, i) {
 756                len = sg_dma_len(sg);
 757
 758                if (dir == DMA_MEM_TO_DEV) {
 759                        src = sg_dma_address(sg);
 760                        dst = slave_cfg->dst_addr;
 761                } else {
 762                        src = slave_cfg->src_addr;
 763                        dst = sg_dma_address(sg);
 764                }
 765        }
 766
 767        ret = sprd_dma_fill_desc(chan, sdesc, src, dst, len, dir, flags,
 768                                 slave_cfg);
 769        if (ret) {
 770                kfree(sdesc);
 771                return NULL;
 772        }
 773
 774        return vchan_tx_prep(&schan->vc, &sdesc->vd, flags);
 775}
 776
 777static int sprd_dma_slave_config(struct dma_chan *chan,
 778                                 struct dma_slave_config *config)
 779{
 780        struct sprd_dma_chn *schan = to_sprd_dma_chan(chan);
 781        struct dma_slave_config *slave_cfg = &schan->slave_cfg;
 782
 783        if (!is_slave_direction(config->direction))
 784                return -EINVAL;
 785
 786        memcpy(slave_cfg, config, sizeof(*config));
 787        return 0;
 788}
 789
 790static int sprd_dma_pause(struct dma_chan *chan)
 791{
 792        struct sprd_dma_chn *schan = to_sprd_dma_chan(chan);
 793        unsigned long flags;
 794
 795        spin_lock_irqsave(&schan->vc.lock, flags);
 796        sprd_dma_pause_resume(schan, true);
 797        spin_unlock_irqrestore(&schan->vc.lock, flags);
 798
 799        return 0;
 800}
 801
 802static int sprd_dma_resume(struct dma_chan *chan)
 803{
 804        struct sprd_dma_chn *schan = to_sprd_dma_chan(chan);
 805        unsigned long flags;
 806
 807        spin_lock_irqsave(&schan->vc.lock, flags);
 808        sprd_dma_pause_resume(schan, false);
 809        spin_unlock_irqrestore(&schan->vc.lock, flags);
 810
 811        return 0;
 812}
 813
 814static int sprd_dma_terminate_all(struct dma_chan *chan)
 815{
 816        struct sprd_dma_chn *schan = to_sprd_dma_chan(chan);
 817        unsigned long flags;
 818        LIST_HEAD(head);
 819
 820        spin_lock_irqsave(&schan->vc.lock, flags);
 821        sprd_dma_stop(schan);
 822
 823        vchan_get_all_descriptors(&schan->vc, &head);
 824        spin_unlock_irqrestore(&schan->vc.lock, flags);
 825
 826        vchan_dma_desc_free_list(&schan->vc, &head);
 827        return 0;
 828}
 829
 830static void sprd_dma_free_desc(struct virt_dma_desc *vd)
 831{
 832        struct sprd_dma_desc *sdesc = to_sprd_dma_desc(vd);
 833
 834        kfree(sdesc);
 835}
 836
 837static bool sprd_dma_filter_fn(struct dma_chan *chan, void *param)
 838{
 839        struct sprd_dma_chn *schan = to_sprd_dma_chan(chan);
 840        struct sprd_dma_dev *sdev = to_sprd_dma_dev(&schan->vc.chan);
 841        u32 req = *(u32 *)param;
 842
 843        if (req < sdev->total_chns)
 844                return req == schan->chn_num + 1;
 845        else
 846                return false;
 847}
 848
 849static int sprd_dma_probe(struct platform_device *pdev)
 850{
 851        struct device_node *np = pdev->dev.of_node;
 852        struct sprd_dma_dev *sdev;
 853        struct sprd_dma_chn *dma_chn;
 854        struct resource *res;
 855        u32 chn_count;
 856        int ret, i;
 857
 858        ret = device_property_read_u32(&pdev->dev, "#dma-channels", &chn_count);
 859        if (ret) {
 860                dev_err(&pdev->dev, "get dma channels count failed\n");
 861                return ret;
 862        }
 863
 864        sdev = devm_kzalloc(&pdev->dev,
 865                            struct_size(sdev, channels, chn_count),
 866                            GFP_KERNEL);
 867        if (!sdev)
 868                return -ENOMEM;
 869
 870        sdev->clk = devm_clk_get(&pdev->dev, "enable");
 871        if (IS_ERR(sdev->clk)) {
 872                dev_err(&pdev->dev, "get enable clock failed\n");
 873                return PTR_ERR(sdev->clk);
 874        }
 875
 876        /* ashb clock is optional for AGCP DMA */
 877        sdev->ashb_clk = devm_clk_get(&pdev->dev, "ashb_eb");
 878        if (IS_ERR(sdev->ashb_clk))
 879                dev_warn(&pdev->dev, "no optional ashb eb clock\n");
 880
 881        /*
 882         * We have three DMA controllers: AP DMA, AON DMA and AGCP DMA. For AGCP
 883         * DMA controller, it can or do not request the irq, which will save
 884         * system power without resuming system by DMA interrupts if AGCP DMA
 885         * does not request the irq. Thus the DMA interrupts property should
 886         * be optional.
 887         */
 888        sdev->irq = platform_get_irq(pdev, 0);
 889        if (sdev->irq > 0) {
 890                ret = devm_request_irq(&pdev->dev, sdev->irq, dma_irq_handle,
 891                                       0, "sprd_dma", (void *)sdev);
 892                if (ret < 0) {
 893                        dev_err(&pdev->dev, "request dma irq failed\n");
 894                        return ret;
 895                }
 896        } else {
 897                dev_warn(&pdev->dev, "no interrupts for the dma controller\n");
 898        }
 899
 900        res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
 901        sdev->glb_base = devm_ioremap_resource(&pdev->dev, res);
 902        if (IS_ERR(sdev->glb_base))
 903                return PTR_ERR(sdev->glb_base);
 904
 905        dma_cap_set(DMA_MEMCPY, sdev->dma_dev.cap_mask);
 906        sdev->total_chns = chn_count;
 907        sdev->dma_dev.chancnt = chn_count;
 908        INIT_LIST_HEAD(&sdev->dma_dev.channels);
 909        INIT_LIST_HEAD(&sdev->dma_dev.global_node);
 910        sdev->dma_dev.dev = &pdev->dev;
 911        sdev->dma_dev.device_alloc_chan_resources = sprd_dma_alloc_chan_resources;
 912        sdev->dma_dev.device_free_chan_resources = sprd_dma_free_chan_resources;
 913        sdev->dma_dev.device_tx_status = sprd_dma_tx_status;
 914        sdev->dma_dev.device_issue_pending = sprd_dma_issue_pending;
 915        sdev->dma_dev.device_prep_dma_memcpy = sprd_dma_prep_dma_memcpy;
 916        sdev->dma_dev.device_prep_slave_sg = sprd_dma_prep_slave_sg;
 917        sdev->dma_dev.device_config = sprd_dma_slave_config;
 918        sdev->dma_dev.device_pause = sprd_dma_pause;
 919        sdev->dma_dev.device_resume = sprd_dma_resume;
 920        sdev->dma_dev.device_terminate_all = sprd_dma_terminate_all;
 921
 922        for (i = 0; i < chn_count; i++) {
 923                dma_chn = &sdev->channels[i];
 924                dma_chn->chn_num = i;
 925                dma_chn->cur_desc = NULL;
 926                /* get each channel's registers base address. */
 927                dma_chn->chn_base = sdev->glb_base + SPRD_DMA_CHN_REG_OFFSET +
 928                                    SPRD_DMA_CHN_REG_LENGTH * i;
 929
 930                dma_chn->vc.desc_free = sprd_dma_free_desc;
 931                vchan_init(&dma_chn->vc, &sdev->dma_dev);
 932        }
 933
 934        platform_set_drvdata(pdev, sdev);
 935        ret = sprd_dma_enable(sdev);
 936        if (ret)
 937                return ret;
 938
 939        pm_runtime_set_active(&pdev->dev);
 940        pm_runtime_enable(&pdev->dev);
 941
 942        ret = pm_runtime_get_sync(&pdev->dev);
 943        if (ret < 0)
 944                goto err_rpm;
 945
 946        ret = dma_async_device_register(&sdev->dma_dev);
 947        if (ret < 0) {
 948                dev_err(&pdev->dev, "register dma device failed:%d\n", ret);
 949                goto err_register;
 950        }
 951
 952        sprd_dma_info.dma_cap = sdev->dma_dev.cap_mask;
 953        ret = of_dma_controller_register(np, of_dma_simple_xlate,
 954                                         &sprd_dma_info);
 955        if (ret)
 956                goto err_of_register;
 957
 958        pm_runtime_put(&pdev->dev);
 959        return 0;
 960
 961err_of_register:
 962        dma_async_device_unregister(&sdev->dma_dev);
 963err_register:
 964        pm_runtime_put_noidle(&pdev->dev);
 965        pm_runtime_disable(&pdev->dev);
 966err_rpm:
 967        sprd_dma_disable(sdev);
 968        return ret;
 969}
 970
 971static int sprd_dma_remove(struct platform_device *pdev)
 972{
 973        struct sprd_dma_dev *sdev = platform_get_drvdata(pdev);
 974        struct sprd_dma_chn *c, *cn;
 975        int ret;
 976
 977        ret = pm_runtime_get_sync(&pdev->dev);
 978        if (ret < 0)
 979                return ret;
 980
 981        /* explicitly free the irq */
 982        if (sdev->irq > 0)
 983                devm_free_irq(&pdev->dev, sdev->irq, sdev);
 984
 985        list_for_each_entry_safe(c, cn, &sdev->dma_dev.channels,
 986                                 vc.chan.device_node) {
 987                list_del(&c->vc.chan.device_node);
 988                tasklet_kill(&c->vc.task);
 989        }
 990
 991        of_dma_controller_free(pdev->dev.of_node);
 992        dma_async_device_unregister(&sdev->dma_dev);
 993        sprd_dma_disable(sdev);
 994
 995        pm_runtime_put_noidle(&pdev->dev);
 996        pm_runtime_disable(&pdev->dev);
 997        return 0;
 998}
 999
1000static const struct of_device_id sprd_dma_match[] = {
1001        { .compatible = "sprd,sc9860-dma", },
1002        {},
1003};
1004
1005static int __maybe_unused sprd_dma_runtime_suspend(struct device *dev)
1006{
1007        struct sprd_dma_dev *sdev = dev_get_drvdata(dev);
1008
1009        sprd_dma_disable(sdev);
1010        return 0;
1011}
1012
1013static int __maybe_unused sprd_dma_runtime_resume(struct device *dev)
1014{
1015        struct sprd_dma_dev *sdev = dev_get_drvdata(dev);
1016        int ret;
1017
1018        ret = sprd_dma_enable(sdev);
1019        if (ret)
1020                dev_err(sdev->dma_dev.dev, "enable dma failed\n");
1021
1022        return ret;
1023}
1024
1025static const struct dev_pm_ops sprd_dma_pm_ops = {
1026        SET_RUNTIME_PM_OPS(sprd_dma_runtime_suspend,
1027                           sprd_dma_runtime_resume,
1028                           NULL)
1029};
1030
1031static struct platform_driver sprd_dma_driver = {
1032        .probe = sprd_dma_probe,
1033        .remove = sprd_dma_remove,
1034        .driver = {
1035                .name = "sprd-dma",
1036                .of_match_table = sprd_dma_match,
1037                .pm = &sprd_dma_pm_ops,
1038        },
1039};
1040module_platform_driver(sprd_dma_driver);
1041
1042MODULE_LICENSE("GPL v2");
1043MODULE_DESCRIPTION("DMA driver for Spreadtrum");
1044MODULE_AUTHOR("Baolin Wang <baolin.wang@spreadtrum.com>");
1045MODULE_ALIAS("platform:sprd-dma");
1046