1
2
3
4
5
6
7
8#include <linux/bitops.h>
9#include <linux/dmapool.h>
10#include <linux/dma/xilinx_dma.h>
11#include <linux/init.h>
12#include <linux/interrupt.h>
13#include <linux/io.h>
14#include <linux/module.h>
15#include <linux/of_address.h>
16#include <linux/of_dma.h>
17#include <linux/of_irq.h>
18#include <linux/of_platform.h>
19#include <linux/slab.h>
20#include <linux/clk.h>
21#include <linux/io-64-nonatomic-lo-hi.h>
22#include <linux/pm_runtime.h>
23
24#include "../dmaengine.h"
25
26
27#define ZYNQMP_DMA_ISR 0x100
28#define ZYNQMP_DMA_IMR 0x104
29#define ZYNQMP_DMA_IER 0x108
30#define ZYNQMP_DMA_IDS 0x10C
31#define ZYNQMP_DMA_CTRL0 0x110
32#define ZYNQMP_DMA_CTRL1 0x114
33#define ZYNQMP_DMA_DATA_ATTR 0x120
34#define ZYNQMP_DMA_DSCR_ATTR 0x124
35#define ZYNQMP_DMA_SRC_DSCR_WRD0 0x128
36#define ZYNQMP_DMA_SRC_DSCR_WRD1 0x12C
37#define ZYNQMP_DMA_SRC_DSCR_WRD2 0x130
38#define ZYNQMP_DMA_SRC_DSCR_WRD3 0x134
39#define ZYNQMP_DMA_DST_DSCR_WRD0 0x138
40#define ZYNQMP_DMA_DST_DSCR_WRD1 0x13C
41#define ZYNQMP_DMA_DST_DSCR_WRD2 0x140
42#define ZYNQMP_DMA_DST_DSCR_WRD3 0x144
43#define ZYNQMP_DMA_SRC_START_LSB 0x158
44#define ZYNQMP_DMA_SRC_START_MSB 0x15C
45#define ZYNQMP_DMA_DST_START_LSB 0x160
46#define ZYNQMP_DMA_DST_START_MSB 0x164
47#define ZYNQMP_DMA_TOTAL_BYTE 0x188
48#define ZYNQMP_DMA_RATE_CTRL 0x18C
49#define ZYNQMP_DMA_IRQ_SRC_ACCT 0x190
50#define ZYNQMP_DMA_IRQ_DST_ACCT 0x194
51#define ZYNQMP_DMA_CTRL2 0x200
52
53
54#define ZYNQMP_DMA_DONE BIT(10)
55#define ZYNQMP_DMA_AXI_WR_DATA BIT(9)
56#define ZYNQMP_DMA_AXI_RD_DATA BIT(8)
57#define ZYNQMP_DMA_AXI_RD_DST_DSCR BIT(7)
58#define ZYNQMP_DMA_AXI_RD_SRC_DSCR BIT(6)
59#define ZYNQMP_DMA_IRQ_DST_ACCT_ERR BIT(5)
60#define ZYNQMP_DMA_IRQ_SRC_ACCT_ERR BIT(4)
61#define ZYNQMP_DMA_BYTE_CNT_OVRFL BIT(3)
62#define ZYNQMP_DMA_DST_DSCR_DONE BIT(2)
63#define ZYNQMP_DMA_INV_APB BIT(0)
64
65
66#define ZYNQMP_DMA_OVR_FETCH BIT(7)
67#define ZYNQMP_DMA_POINT_TYPE_SG BIT(6)
68#define ZYNQMP_DMA_RATE_CTRL_EN BIT(3)
69
70
71#define ZYNQMP_DMA_SRC_ISSUE GENMASK(4, 0)
72
73
74#define ZYNQMP_DMA_ARBURST GENMASK(27, 26)
75#define ZYNQMP_DMA_ARCACHE GENMASK(25, 22)
76#define ZYNQMP_DMA_ARCACHE_OFST 22
77#define ZYNQMP_DMA_ARQOS GENMASK(21, 18)
78#define ZYNQMP_DMA_ARQOS_OFST 18
79#define ZYNQMP_DMA_ARLEN GENMASK(17, 14)
80#define ZYNQMP_DMA_ARLEN_OFST 14
81#define ZYNQMP_DMA_AWBURST GENMASK(13, 12)
82#define ZYNQMP_DMA_AWCACHE GENMASK(11, 8)
83#define ZYNQMP_DMA_AWCACHE_OFST 8
84#define ZYNQMP_DMA_AWQOS GENMASK(7, 4)
85#define ZYNQMP_DMA_AWQOS_OFST 4
86#define ZYNQMP_DMA_AWLEN GENMASK(3, 0)
87#define ZYNQMP_DMA_AWLEN_OFST 0
88
89
90#define ZYNQMP_DMA_AXCOHRNT BIT(8)
91#define ZYNQMP_DMA_AXCACHE GENMASK(7, 4)
92#define ZYNQMP_DMA_AXCACHE_OFST 4
93#define ZYNQMP_DMA_AXQOS GENMASK(3, 0)
94#define ZYNQMP_DMA_AXQOS_OFST 0
95
96
97#define ZYNQMP_DMA_ENABLE BIT(0)
98
99
100#define ZYNQMP_DMA_DESC_CTRL_STOP 0x10
101#define ZYNQMP_DMA_DESC_CTRL_COMP_INT 0x4
102#define ZYNQMP_DMA_DESC_CTRL_SIZE_256 0x2
103#define ZYNQMP_DMA_DESC_CTRL_COHRNT 0x1
104
105
106#define ZYNQMP_DMA_INT_ERR (ZYNQMP_DMA_AXI_RD_DATA | \
107 ZYNQMP_DMA_AXI_WR_DATA | \
108 ZYNQMP_DMA_AXI_RD_DST_DSCR | \
109 ZYNQMP_DMA_AXI_RD_SRC_DSCR | \
110 ZYNQMP_DMA_INV_APB)
111#define ZYNQMP_DMA_INT_OVRFL (ZYNQMP_DMA_BYTE_CNT_OVRFL | \
112 ZYNQMP_DMA_IRQ_SRC_ACCT_ERR | \
113 ZYNQMP_DMA_IRQ_DST_ACCT_ERR)
114#define ZYNQMP_DMA_INT_DONE (ZYNQMP_DMA_DONE | ZYNQMP_DMA_DST_DSCR_DONE)
115#define ZYNQMP_DMA_INT_EN_DEFAULT_MASK (ZYNQMP_DMA_INT_DONE | \
116 ZYNQMP_DMA_INT_ERR | \
117 ZYNQMP_DMA_INT_OVRFL | \
118 ZYNQMP_DMA_DST_DSCR_DONE)
119
120
121#define ZYNQMP_DMA_NUM_DESCS 32
122
123
124#define ZYNQMP_DMA_MAX_TRANS_LEN 0x40000000
125
126
127#define ZYNQMP_DMA_AXCACHE_VAL 0xF
128#define ZYNQMP_DMA_ARLEN_RST_VAL 0xF
129#define ZYNQMP_DMA_AWLEN_RST_VAL 0xF
130
131#define ZYNQMP_DMA_SRC_ISSUE_RST_VAL 0x1F
132
133#define ZYNQMP_DMA_IDS_DEFAULT_MASK 0xFFF
134
135
136#define ZYNQMP_DMA_BUS_WIDTH_64 64
137#define ZYNQMP_DMA_BUS_WIDTH_128 128
138
139#define ZDMA_PM_TIMEOUT 100
140
141#define ZYNQMP_DMA_DESC_SIZE(chan) (chan->desc_size)
142
143#define to_chan(chan) container_of(chan, struct zynqmp_dma_chan, \
144 common)
145#define tx_to_desc(tx) container_of(tx, struct zynqmp_dma_desc_sw, \
146 async_tx)
147
148
149
150
151
152
153
154
155
156struct zynqmp_dma_desc_ll {
157 u64 addr;
158 u32 size;
159 u32 ctrl;
160 u64 nxtdscraddr;
161 u64 rsvd;
162};
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177struct zynqmp_dma_desc_sw {
178 u64 src;
179 u64 dst;
180 u32 len;
181 struct list_head node;
182 struct list_head tx_list;
183 struct dma_async_tx_descriptor async_tx;
184 struct zynqmp_dma_desc_ll *src_v;
185 dma_addr_t src_p;
186 struct zynqmp_dma_desc_ll *dst_v;
187 dma_addr_t dst_p;
188};
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215struct zynqmp_dma_chan {
216 struct zynqmp_dma_device *zdev;
217 void __iomem *regs;
218 spinlock_t lock;
219 struct list_head pending_list;
220 struct list_head free_list;
221 struct list_head active_list;
222 struct zynqmp_dma_desc_sw *sw_desc_pool;
223 struct list_head done_list;
224 struct dma_chan common;
225 void *desc_pool_v;
226 dma_addr_t desc_pool_p;
227 u32 desc_free_cnt;
228 struct device *dev;
229 int irq;
230 bool is_dmacoherent;
231 struct tasklet_struct tasklet;
232 bool idle;
233 u32 desc_size;
234 bool err;
235 u32 bus_width;
236 u32 src_burst_len;
237 u32 dst_burst_len;
238};
239
240
241
242
243
244
245
246
247
248struct zynqmp_dma_device {
249 struct device *dev;
250 struct dma_device common;
251 struct zynqmp_dma_chan *chan;
252 struct clk *clk_main;
253 struct clk *clk_apb;
254};
255
256static inline void zynqmp_dma_writeq(struct zynqmp_dma_chan *chan, u32 reg,
257 u64 value)
258{
259 lo_hi_writeq(value, chan->regs + reg);
260}
261
262
263
264
265
266
267static void zynqmp_dma_update_desc_to_ctrlr(struct zynqmp_dma_chan *chan,
268 struct zynqmp_dma_desc_sw *desc)
269{
270 dma_addr_t addr;
271
272 addr = desc->src_p;
273 zynqmp_dma_writeq(chan, ZYNQMP_DMA_SRC_START_LSB, addr);
274 addr = desc->dst_p;
275 zynqmp_dma_writeq(chan, ZYNQMP_DMA_DST_START_LSB, addr);
276}
277
278
279
280
281
282
283static void zynqmp_dma_desc_config_eod(struct zynqmp_dma_chan *chan,
284 void *desc)
285{
286 struct zynqmp_dma_desc_ll *hw = (struct zynqmp_dma_desc_ll *)desc;
287
288 hw->ctrl |= ZYNQMP_DMA_DESC_CTRL_STOP;
289 hw++;
290 hw->ctrl |= ZYNQMP_DMA_DESC_CTRL_COMP_INT | ZYNQMP_DMA_DESC_CTRL_STOP;
291}
292
293
294
295
296
297
298
299
300
301
302static void zynqmp_dma_config_sg_ll_desc(struct zynqmp_dma_chan *chan,
303 struct zynqmp_dma_desc_ll *sdesc,
304 dma_addr_t src, dma_addr_t dst, size_t len,
305 struct zynqmp_dma_desc_ll *prev)
306{
307 struct zynqmp_dma_desc_ll *ddesc = sdesc + 1;
308
309 sdesc->size = ddesc->size = len;
310 sdesc->addr = src;
311 ddesc->addr = dst;
312
313 sdesc->ctrl = ddesc->ctrl = ZYNQMP_DMA_DESC_CTRL_SIZE_256;
314 if (chan->is_dmacoherent) {
315 sdesc->ctrl |= ZYNQMP_DMA_DESC_CTRL_COHRNT;
316 ddesc->ctrl |= ZYNQMP_DMA_DESC_CTRL_COHRNT;
317 }
318
319 if (prev) {
320 dma_addr_t addr = chan->desc_pool_p +
321 ((uintptr_t)sdesc - (uintptr_t)chan->desc_pool_v);
322 ddesc = prev + 1;
323 prev->nxtdscraddr = addr;
324 ddesc->nxtdscraddr = addr + ZYNQMP_DMA_DESC_SIZE(chan);
325 }
326}
327
328
329
330
331
332static void zynqmp_dma_init(struct zynqmp_dma_chan *chan)
333{
334 u32 val;
335
336 writel(ZYNQMP_DMA_IDS_DEFAULT_MASK, chan->regs + ZYNQMP_DMA_IDS);
337 val = readl(chan->regs + ZYNQMP_DMA_ISR);
338 writel(val, chan->regs + ZYNQMP_DMA_ISR);
339
340 if (chan->is_dmacoherent) {
341 val = ZYNQMP_DMA_AXCOHRNT;
342 val = (val & ~ZYNQMP_DMA_AXCACHE) |
343 (ZYNQMP_DMA_AXCACHE_VAL << ZYNQMP_DMA_AXCACHE_OFST);
344 writel(val, chan->regs + ZYNQMP_DMA_DSCR_ATTR);
345 }
346
347 val = readl(chan->regs + ZYNQMP_DMA_DATA_ATTR);
348 if (chan->is_dmacoherent) {
349 val = (val & ~ZYNQMP_DMA_ARCACHE) |
350 (ZYNQMP_DMA_AXCACHE_VAL << ZYNQMP_DMA_ARCACHE_OFST);
351 val = (val & ~ZYNQMP_DMA_AWCACHE) |
352 (ZYNQMP_DMA_AXCACHE_VAL << ZYNQMP_DMA_AWCACHE_OFST);
353 }
354 writel(val, chan->regs + ZYNQMP_DMA_DATA_ATTR);
355
356
357 val = readl(chan->regs + ZYNQMP_DMA_IRQ_SRC_ACCT);
358 val = readl(chan->regs + ZYNQMP_DMA_IRQ_DST_ACCT);
359
360 chan->idle = true;
361}
362
363
364
365
366
367
368
369static dma_cookie_t zynqmp_dma_tx_submit(struct dma_async_tx_descriptor *tx)
370{
371 struct zynqmp_dma_chan *chan = to_chan(tx->chan);
372 struct zynqmp_dma_desc_sw *desc, *new;
373 dma_cookie_t cookie;
374 unsigned long irqflags;
375
376 new = tx_to_desc(tx);
377 spin_lock_irqsave(&chan->lock, irqflags);
378 cookie = dma_cookie_assign(tx);
379
380 if (!list_empty(&chan->pending_list)) {
381 desc = list_last_entry(&chan->pending_list,
382 struct zynqmp_dma_desc_sw, node);
383 if (!list_empty(&desc->tx_list))
384 desc = list_last_entry(&desc->tx_list,
385 struct zynqmp_dma_desc_sw, node);
386 desc->src_v->nxtdscraddr = new->src_p;
387 desc->src_v->ctrl &= ~ZYNQMP_DMA_DESC_CTRL_STOP;
388 desc->dst_v->nxtdscraddr = new->dst_p;
389 desc->dst_v->ctrl &= ~ZYNQMP_DMA_DESC_CTRL_STOP;
390 }
391
392 list_add_tail(&new->node, &chan->pending_list);
393 spin_unlock_irqrestore(&chan->lock, irqflags);
394
395 return cookie;
396}
397
398
399
400
401
402
403
404static struct zynqmp_dma_desc_sw *
405zynqmp_dma_get_descriptor(struct zynqmp_dma_chan *chan)
406{
407 struct zynqmp_dma_desc_sw *desc;
408 unsigned long irqflags;
409
410 spin_lock_irqsave(&chan->lock, irqflags);
411 desc = list_first_entry(&chan->free_list,
412 struct zynqmp_dma_desc_sw, node);
413 list_del(&desc->node);
414 spin_unlock_irqrestore(&chan->lock, irqflags);
415
416 INIT_LIST_HEAD(&desc->tx_list);
417
418 memset((void *)desc->src_v, 0, ZYNQMP_DMA_DESC_SIZE(chan));
419 memset((void *)desc->dst_v, 0, ZYNQMP_DMA_DESC_SIZE(chan));
420
421 return desc;
422}
423
424
425
426
427
428
429static void zynqmp_dma_free_descriptor(struct zynqmp_dma_chan *chan,
430 struct zynqmp_dma_desc_sw *sdesc)
431{
432 struct zynqmp_dma_desc_sw *child, *next;
433
434 chan->desc_free_cnt++;
435 list_add_tail(&sdesc->node, &chan->free_list);
436 list_for_each_entry_safe(child, next, &sdesc->tx_list, node) {
437 chan->desc_free_cnt++;
438 list_move_tail(&child->node, &chan->free_list);
439 }
440}
441
442
443
444
445
446
447static void zynqmp_dma_free_desc_list(struct zynqmp_dma_chan *chan,
448 struct list_head *list)
449{
450 struct zynqmp_dma_desc_sw *desc, *next;
451
452 list_for_each_entry_safe(desc, next, list, node)
453 zynqmp_dma_free_descriptor(chan, desc);
454 INIT_LIST_HEAD(list);
455}
456
457
458
459
460
461
462
463static int zynqmp_dma_alloc_chan_resources(struct dma_chan *dchan)
464{
465 struct zynqmp_dma_chan *chan = to_chan(dchan);
466 struct zynqmp_dma_desc_sw *desc;
467 int i, ret;
468
469 ret = pm_runtime_get_sync(chan->dev);
470 if (ret < 0)
471 return ret;
472
473 chan->sw_desc_pool = kcalloc(ZYNQMP_DMA_NUM_DESCS, sizeof(*desc),
474 GFP_KERNEL);
475 if (!chan->sw_desc_pool)
476 return -ENOMEM;
477
478 chan->idle = true;
479 chan->desc_free_cnt = ZYNQMP_DMA_NUM_DESCS;
480
481 INIT_LIST_HEAD(&chan->free_list);
482
483 for (i = 0; i < ZYNQMP_DMA_NUM_DESCS; i++) {
484 desc = chan->sw_desc_pool + i;
485 dma_async_tx_descriptor_init(&desc->async_tx, &chan->common);
486 desc->async_tx.tx_submit = zynqmp_dma_tx_submit;
487 list_add_tail(&desc->node, &chan->free_list);
488 }
489
490 chan->desc_pool_v = dma_alloc_coherent(chan->dev,
491 (2 * chan->desc_size * ZYNQMP_DMA_NUM_DESCS),
492 &chan->desc_pool_p, GFP_KERNEL);
493 if (!chan->desc_pool_v)
494 return -ENOMEM;
495
496 for (i = 0; i < ZYNQMP_DMA_NUM_DESCS; i++) {
497 desc = chan->sw_desc_pool + i;
498 desc->src_v = (struct zynqmp_dma_desc_ll *) (chan->desc_pool_v +
499 (i * ZYNQMP_DMA_DESC_SIZE(chan) * 2));
500 desc->dst_v = (struct zynqmp_dma_desc_ll *) (desc->src_v + 1);
501 desc->src_p = chan->desc_pool_p +
502 (i * ZYNQMP_DMA_DESC_SIZE(chan) * 2);
503 desc->dst_p = desc->src_p + ZYNQMP_DMA_DESC_SIZE(chan);
504 }
505
506 return ZYNQMP_DMA_NUM_DESCS;
507}
508
509
510
511
512
513static void zynqmp_dma_start(struct zynqmp_dma_chan *chan)
514{
515 writel(ZYNQMP_DMA_INT_EN_DEFAULT_MASK, chan->regs + ZYNQMP_DMA_IER);
516 writel(0, chan->regs + ZYNQMP_DMA_TOTAL_BYTE);
517 chan->idle = false;
518 writel(ZYNQMP_DMA_ENABLE, chan->regs + ZYNQMP_DMA_CTRL2);
519}
520
521
522
523
524
525
526static void zynqmp_dma_handle_ovfl_int(struct zynqmp_dma_chan *chan, u32 status)
527{
528 if (status & ZYNQMP_DMA_BYTE_CNT_OVRFL)
529 writel(0, chan->regs + ZYNQMP_DMA_TOTAL_BYTE);
530 if (status & ZYNQMP_DMA_IRQ_DST_ACCT_ERR)
531 readl(chan->regs + ZYNQMP_DMA_IRQ_DST_ACCT);
532 if (status & ZYNQMP_DMA_IRQ_SRC_ACCT_ERR)
533 readl(chan->regs + ZYNQMP_DMA_IRQ_SRC_ACCT);
534}
535
536static void zynqmp_dma_config(struct zynqmp_dma_chan *chan)
537{
538 u32 val;
539
540 val = readl(chan->regs + ZYNQMP_DMA_CTRL0);
541 val |= ZYNQMP_DMA_POINT_TYPE_SG;
542 writel(val, chan->regs + ZYNQMP_DMA_CTRL0);
543
544 val = readl(chan->regs + ZYNQMP_DMA_DATA_ATTR);
545 val = (val & ~ZYNQMP_DMA_ARLEN) |
546 (chan->src_burst_len << ZYNQMP_DMA_ARLEN_OFST);
547 val = (val & ~ZYNQMP_DMA_AWLEN) |
548 (chan->dst_burst_len << ZYNQMP_DMA_AWLEN_OFST);
549 writel(val, chan->regs + ZYNQMP_DMA_DATA_ATTR);
550}
551
552
553
554
555
556
557
558
559static int zynqmp_dma_device_config(struct dma_chan *dchan,
560 struct dma_slave_config *config)
561{
562 struct zynqmp_dma_chan *chan = to_chan(dchan);
563
564 chan->src_burst_len = config->src_maxburst;
565 chan->dst_burst_len = config->dst_maxburst;
566
567 return 0;
568}
569
570
571
572
573
574static void zynqmp_dma_start_transfer(struct zynqmp_dma_chan *chan)
575{
576 struct zynqmp_dma_desc_sw *desc;
577
578 if (!chan->idle)
579 return;
580
581 zynqmp_dma_config(chan);
582
583 desc = list_first_entry_or_null(&chan->pending_list,
584 struct zynqmp_dma_desc_sw, node);
585 if (!desc)
586 return;
587
588 list_splice_tail_init(&chan->pending_list, &chan->active_list);
589 zynqmp_dma_update_desc_to_ctrlr(chan, desc);
590 zynqmp_dma_start(chan);
591}
592
593
594
595
596
597
598static void zynqmp_dma_chan_desc_cleanup(struct zynqmp_dma_chan *chan)
599{
600 struct zynqmp_dma_desc_sw *desc, *next;
601
602 list_for_each_entry_safe(desc, next, &chan->done_list, node) {
603 dma_async_tx_callback callback;
604 void *callback_param;
605
606 list_del(&desc->node);
607
608 callback = desc->async_tx.callback;
609 callback_param = desc->async_tx.callback_param;
610 if (callback) {
611 spin_unlock(&chan->lock);
612 callback(callback_param);
613 spin_lock(&chan->lock);
614 }
615
616
617 zynqmp_dma_free_descriptor(chan, desc);
618 }
619}
620
621
622
623
624
625static void zynqmp_dma_complete_descriptor(struct zynqmp_dma_chan *chan)
626{
627 struct zynqmp_dma_desc_sw *desc;
628
629 desc = list_first_entry_or_null(&chan->active_list,
630 struct zynqmp_dma_desc_sw, node);
631 if (!desc)
632 return;
633 list_del(&desc->node);
634 dma_cookie_complete(&desc->async_tx);
635 list_add_tail(&desc->node, &chan->done_list);
636}
637
638
639
640
641
642static void zynqmp_dma_issue_pending(struct dma_chan *dchan)
643{
644 struct zynqmp_dma_chan *chan = to_chan(dchan);
645 unsigned long irqflags;
646
647 spin_lock_irqsave(&chan->lock, irqflags);
648 zynqmp_dma_start_transfer(chan);
649 spin_unlock_irqrestore(&chan->lock, irqflags);
650}
651
652
653
654
655
656static void zynqmp_dma_free_descriptors(struct zynqmp_dma_chan *chan)
657{
658 zynqmp_dma_free_desc_list(chan, &chan->active_list);
659 zynqmp_dma_free_desc_list(chan, &chan->pending_list);
660 zynqmp_dma_free_desc_list(chan, &chan->done_list);
661}
662
663
664
665
666
667static void zynqmp_dma_free_chan_resources(struct dma_chan *dchan)
668{
669 struct zynqmp_dma_chan *chan = to_chan(dchan);
670 unsigned long irqflags;
671
672 spin_lock_irqsave(&chan->lock, irqflags);
673 zynqmp_dma_free_descriptors(chan);
674 spin_unlock_irqrestore(&chan->lock, irqflags);
675 dma_free_coherent(chan->dev,
676 (2 * ZYNQMP_DMA_DESC_SIZE(chan) * ZYNQMP_DMA_NUM_DESCS),
677 chan->desc_pool_v, chan->desc_pool_p);
678 kfree(chan->sw_desc_pool);
679 pm_runtime_mark_last_busy(chan->dev);
680 pm_runtime_put_autosuspend(chan->dev);
681}
682
683
684
685
686
687static void zynqmp_dma_reset(struct zynqmp_dma_chan *chan)
688{
689 writel(ZYNQMP_DMA_IDS_DEFAULT_MASK, chan->regs + ZYNQMP_DMA_IDS);
690
691 zynqmp_dma_complete_descriptor(chan);
692 zynqmp_dma_chan_desc_cleanup(chan);
693 zynqmp_dma_free_descriptors(chan);
694 zynqmp_dma_init(chan);
695}
696
697
698
699
700
701
702
703
704static irqreturn_t zynqmp_dma_irq_handler(int irq, void *data)
705{
706 struct zynqmp_dma_chan *chan = (struct zynqmp_dma_chan *)data;
707 u32 isr, imr, status;
708 irqreturn_t ret = IRQ_NONE;
709
710 isr = readl(chan->regs + ZYNQMP_DMA_ISR);
711 imr = readl(chan->regs + ZYNQMP_DMA_IMR);
712 status = isr & ~imr;
713
714 writel(isr, chan->regs + ZYNQMP_DMA_ISR);
715 if (status & ZYNQMP_DMA_INT_DONE) {
716 tasklet_schedule(&chan->tasklet);
717 ret = IRQ_HANDLED;
718 }
719
720 if (status & ZYNQMP_DMA_DONE)
721 chan->idle = true;
722
723 if (status & ZYNQMP_DMA_INT_ERR) {
724 chan->err = true;
725 tasklet_schedule(&chan->tasklet);
726 dev_err(chan->dev, "Channel %p has errors\n", chan);
727 ret = IRQ_HANDLED;
728 }
729
730 if (status & ZYNQMP_DMA_INT_OVRFL) {
731 zynqmp_dma_handle_ovfl_int(chan, status);
732 dev_dbg(chan->dev, "Channel %p overflow interrupt\n", chan);
733 ret = IRQ_HANDLED;
734 }
735
736 return ret;
737}
738
739
740
741
742
743static void zynqmp_dma_do_tasklet(unsigned long data)
744{
745 struct zynqmp_dma_chan *chan = (struct zynqmp_dma_chan *)data;
746 u32 count;
747 unsigned long irqflags;
748
749 spin_lock_irqsave(&chan->lock, irqflags);
750
751 if (chan->err) {
752 zynqmp_dma_reset(chan);
753 chan->err = false;
754 goto unlock;
755 }
756
757 count = readl(chan->regs + ZYNQMP_DMA_IRQ_DST_ACCT);
758
759 while (count) {
760 zynqmp_dma_complete_descriptor(chan);
761 zynqmp_dma_chan_desc_cleanup(chan);
762 count--;
763 }
764
765 if (chan->idle)
766 zynqmp_dma_start_transfer(chan);
767
768unlock:
769 spin_unlock_irqrestore(&chan->lock, irqflags);
770}
771
772
773
774
775
776
777
778static int zynqmp_dma_device_terminate_all(struct dma_chan *dchan)
779{
780 struct zynqmp_dma_chan *chan = to_chan(dchan);
781 unsigned long irqflags;
782
783 spin_lock_irqsave(&chan->lock, irqflags);
784 writel(ZYNQMP_DMA_IDS_DEFAULT_MASK, chan->regs + ZYNQMP_DMA_IDS);
785 zynqmp_dma_free_descriptors(chan);
786 spin_unlock_irqrestore(&chan->lock, irqflags);
787
788 return 0;
789}
790
791
792
793
794
795
796
797
798
799
800
801static struct dma_async_tx_descriptor *zynqmp_dma_prep_memcpy(
802 struct dma_chan *dchan, dma_addr_t dma_dst,
803 dma_addr_t dma_src, size_t len, ulong flags)
804{
805 struct zynqmp_dma_chan *chan;
806 struct zynqmp_dma_desc_sw *new, *first = NULL;
807 void *desc = NULL, *prev = NULL;
808 size_t copy;
809 u32 desc_cnt;
810 unsigned long irqflags;
811
812 chan = to_chan(dchan);
813
814 desc_cnt = DIV_ROUND_UP(len, ZYNQMP_DMA_MAX_TRANS_LEN);
815
816 spin_lock_irqsave(&chan->lock, irqflags);
817 if (desc_cnt > chan->desc_free_cnt) {
818 spin_unlock_irqrestore(&chan->lock, irqflags);
819 dev_dbg(chan->dev, "chan %p descs are not available\n", chan);
820 return NULL;
821 }
822 chan->desc_free_cnt = chan->desc_free_cnt - desc_cnt;
823 spin_unlock_irqrestore(&chan->lock, irqflags);
824
825 do {
826
827 new = zynqmp_dma_get_descriptor(chan);
828
829 copy = min_t(size_t, len, ZYNQMP_DMA_MAX_TRANS_LEN);
830 desc = (struct zynqmp_dma_desc_ll *)new->src_v;
831 zynqmp_dma_config_sg_ll_desc(chan, desc, dma_src,
832 dma_dst, copy, prev);
833 prev = desc;
834 len -= copy;
835 dma_src += copy;
836 dma_dst += copy;
837 if (!first)
838 first = new;
839 else
840 list_add_tail(&new->node, &first->tx_list);
841 } while (len);
842
843 zynqmp_dma_desc_config_eod(chan, desc);
844 async_tx_ack(&first->async_tx);
845 first->async_tx.flags = flags;
846 return &first->async_tx;
847}
848
849
850
851
852
853static void zynqmp_dma_chan_remove(struct zynqmp_dma_chan *chan)
854{
855 if (!chan)
856 return;
857
858 if (chan->irq)
859 devm_free_irq(chan->zdev->dev, chan->irq, chan);
860 tasklet_kill(&chan->tasklet);
861 list_del(&chan->common.device_node);
862}
863
864
865
866
867
868
869
870
871static int zynqmp_dma_chan_probe(struct zynqmp_dma_device *zdev,
872 struct platform_device *pdev)
873{
874 struct zynqmp_dma_chan *chan;
875 struct resource *res;
876 struct device_node *node = pdev->dev.of_node;
877 int err;
878
879 chan = devm_kzalloc(zdev->dev, sizeof(*chan), GFP_KERNEL);
880 if (!chan)
881 return -ENOMEM;
882 chan->dev = zdev->dev;
883 chan->zdev = zdev;
884
885 res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
886 chan->regs = devm_ioremap_resource(&pdev->dev, res);
887 if (IS_ERR(chan->regs))
888 return PTR_ERR(chan->regs);
889
890 chan->bus_width = ZYNQMP_DMA_BUS_WIDTH_64;
891 chan->dst_burst_len = ZYNQMP_DMA_AWLEN_RST_VAL;
892 chan->src_burst_len = ZYNQMP_DMA_ARLEN_RST_VAL;
893 err = of_property_read_u32(node, "xlnx,bus-width", &chan->bus_width);
894 if (err < 0) {
895 dev_err(&pdev->dev, "missing xlnx,bus-width property\n");
896 return err;
897 }
898
899 if (chan->bus_width != ZYNQMP_DMA_BUS_WIDTH_64 &&
900 chan->bus_width != ZYNQMP_DMA_BUS_WIDTH_128) {
901 dev_err(zdev->dev, "invalid bus-width value");
902 return -EINVAL;
903 }
904
905 chan->is_dmacoherent = of_property_read_bool(node, "dma-coherent");
906 zdev->chan = chan;
907 tasklet_init(&chan->tasklet, zynqmp_dma_do_tasklet, (ulong)chan);
908 spin_lock_init(&chan->lock);
909 INIT_LIST_HEAD(&chan->active_list);
910 INIT_LIST_HEAD(&chan->pending_list);
911 INIT_LIST_HEAD(&chan->done_list);
912 INIT_LIST_HEAD(&chan->free_list);
913
914 dma_cookie_init(&chan->common);
915 chan->common.device = &zdev->common;
916 list_add_tail(&chan->common.device_node, &zdev->common.channels);
917
918 zynqmp_dma_init(chan);
919 chan->irq = platform_get_irq(pdev, 0);
920 if (chan->irq < 0)
921 return -ENXIO;
922 err = devm_request_irq(&pdev->dev, chan->irq, zynqmp_dma_irq_handler, 0,
923 "zynqmp-dma", chan);
924 if (err)
925 return err;
926
927 chan->desc_size = sizeof(struct zynqmp_dma_desc_ll);
928 chan->idle = true;
929 return 0;
930}
931
932
933
934
935
936
937
938
939static struct dma_chan *of_zynqmp_dma_xlate(struct of_phandle_args *dma_spec,
940 struct of_dma *ofdma)
941{
942 struct zynqmp_dma_device *zdev = ofdma->of_dma_data;
943
944 return dma_get_slave_channel(&zdev->chan->common);
945}
946
947
948
949
950
951
952
953
954static int __maybe_unused zynqmp_dma_suspend(struct device *dev)
955{
956 if (!device_may_wakeup(dev))
957 return pm_runtime_force_suspend(dev);
958
959 return 0;
960}
961
962
963
964
965
966
967
968
969static int __maybe_unused zynqmp_dma_resume(struct device *dev)
970{
971 if (!device_may_wakeup(dev))
972 return pm_runtime_force_resume(dev);
973
974 return 0;
975}
976
977
978
979
980
981
982
983
984static int __maybe_unused zynqmp_dma_runtime_suspend(struct device *dev)
985{
986 struct zynqmp_dma_device *zdev = dev_get_drvdata(dev);
987
988 clk_disable_unprepare(zdev->clk_main);
989 clk_disable_unprepare(zdev->clk_apb);
990
991 return 0;
992}
993
994
995
996
997
998
999
1000
1001static int __maybe_unused zynqmp_dma_runtime_resume(struct device *dev)
1002{
1003 struct zynqmp_dma_device *zdev = dev_get_drvdata(dev);
1004 int err;
1005
1006 err = clk_prepare_enable(zdev->clk_main);
1007 if (err) {
1008 dev_err(dev, "Unable to enable main clock.\n");
1009 return err;
1010 }
1011
1012 err = clk_prepare_enable(zdev->clk_apb);
1013 if (err) {
1014 dev_err(dev, "Unable to enable apb clock.\n");
1015 clk_disable_unprepare(zdev->clk_main);
1016 return err;
1017 }
1018
1019 return 0;
1020}
1021
1022static const struct dev_pm_ops zynqmp_dma_dev_pm_ops = {
1023 SET_SYSTEM_SLEEP_PM_OPS(zynqmp_dma_suspend, zynqmp_dma_resume)
1024 SET_RUNTIME_PM_OPS(zynqmp_dma_runtime_suspend,
1025 zynqmp_dma_runtime_resume, NULL)
1026};
1027
1028
1029
1030
1031
1032
1033
1034static int zynqmp_dma_probe(struct platform_device *pdev)
1035{
1036 struct zynqmp_dma_device *zdev;
1037 struct dma_device *p;
1038 int ret;
1039
1040 zdev = devm_kzalloc(&pdev->dev, sizeof(*zdev), GFP_KERNEL);
1041 if (!zdev)
1042 return -ENOMEM;
1043
1044 zdev->dev = &pdev->dev;
1045 INIT_LIST_HEAD(&zdev->common.channels);
1046
1047 dma_set_mask(&pdev->dev, DMA_BIT_MASK(44));
1048 dma_cap_set(DMA_MEMCPY, zdev->common.cap_mask);
1049
1050 p = &zdev->common;
1051 p->device_prep_dma_memcpy = zynqmp_dma_prep_memcpy;
1052 p->device_terminate_all = zynqmp_dma_device_terminate_all;
1053 p->device_issue_pending = zynqmp_dma_issue_pending;
1054 p->device_alloc_chan_resources = zynqmp_dma_alloc_chan_resources;
1055 p->device_free_chan_resources = zynqmp_dma_free_chan_resources;
1056 p->device_tx_status = dma_cookie_status;
1057 p->device_config = zynqmp_dma_device_config;
1058 p->dev = &pdev->dev;
1059
1060 zdev->clk_main = devm_clk_get(&pdev->dev, "clk_main");
1061 if (IS_ERR(zdev->clk_main)) {
1062 dev_err(&pdev->dev, "main clock not found.\n");
1063 return PTR_ERR(zdev->clk_main);
1064 }
1065
1066 zdev->clk_apb = devm_clk_get(&pdev->dev, "clk_apb");
1067 if (IS_ERR(zdev->clk_apb)) {
1068 dev_err(&pdev->dev, "apb clock not found.\n");
1069 return PTR_ERR(zdev->clk_apb);
1070 }
1071
1072 platform_set_drvdata(pdev, zdev);
1073 pm_runtime_set_autosuspend_delay(zdev->dev, ZDMA_PM_TIMEOUT);
1074 pm_runtime_use_autosuspend(zdev->dev);
1075 pm_runtime_enable(zdev->dev);
1076 pm_runtime_get_sync(zdev->dev);
1077 if (!pm_runtime_enabled(zdev->dev)) {
1078 ret = zynqmp_dma_runtime_resume(zdev->dev);
1079 if (ret)
1080 return ret;
1081 }
1082
1083 ret = zynqmp_dma_chan_probe(zdev, pdev);
1084 if (ret) {
1085 dev_err(&pdev->dev, "Probing channel failed\n");
1086 goto err_disable_pm;
1087 }
1088
1089 p->dst_addr_widths = BIT(zdev->chan->bus_width / 8);
1090 p->src_addr_widths = BIT(zdev->chan->bus_width / 8);
1091
1092 dma_async_device_register(&zdev->common);
1093
1094 ret = of_dma_controller_register(pdev->dev.of_node,
1095 of_zynqmp_dma_xlate, zdev);
1096 if (ret) {
1097 dev_err(&pdev->dev, "Unable to register DMA to DT\n");
1098 dma_async_device_unregister(&zdev->common);
1099 goto free_chan_resources;
1100 }
1101
1102 pm_runtime_mark_last_busy(zdev->dev);
1103 pm_runtime_put_sync_autosuspend(zdev->dev);
1104
1105 dev_info(&pdev->dev, "ZynqMP DMA driver Probe success\n");
1106
1107 return 0;
1108
1109free_chan_resources:
1110 zynqmp_dma_chan_remove(zdev->chan);
1111err_disable_pm:
1112 if (!pm_runtime_enabled(zdev->dev))
1113 zynqmp_dma_runtime_suspend(zdev->dev);
1114 pm_runtime_disable(zdev->dev);
1115 return ret;
1116}
1117
1118
1119
1120
1121
1122
1123
1124static int zynqmp_dma_remove(struct platform_device *pdev)
1125{
1126 struct zynqmp_dma_device *zdev = platform_get_drvdata(pdev);
1127
1128 of_dma_controller_free(pdev->dev.of_node);
1129 dma_async_device_unregister(&zdev->common);
1130
1131 zynqmp_dma_chan_remove(zdev->chan);
1132 pm_runtime_disable(zdev->dev);
1133 if (!pm_runtime_enabled(zdev->dev))
1134 zynqmp_dma_runtime_suspend(zdev->dev);
1135
1136 return 0;
1137}
1138
1139static const struct of_device_id zynqmp_dma_of_match[] = {
1140 { .compatible = "xlnx,zynqmp-dma-1.0", },
1141 {}
1142};
1143MODULE_DEVICE_TABLE(of, zynqmp_dma_of_match);
1144
1145static struct platform_driver zynqmp_dma_driver = {
1146 .driver = {
1147 .name = "xilinx-zynqmp-dma",
1148 .of_match_table = zynqmp_dma_of_match,
1149 .pm = &zynqmp_dma_dev_pm_ops,
1150 },
1151 .probe = zynqmp_dma_probe,
1152 .remove = zynqmp_dma_remove,
1153};
1154
1155module_platform_driver(zynqmp_dma_driver);
1156
1157MODULE_LICENSE("GPL");
1158MODULE_AUTHOR("Xilinx, Inc.");
1159MODULE_DESCRIPTION("Xilinx ZynqMP DMA driver");
1160