1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20#include <linux/kernel.h>
21#include <linux/init.h>
22#include <linux/module.h>
23#include <linux/interrupt.h>
24#include <linux/platform_device.h>
25#include <linux/io.h>
26#include <linux/slab.h>
27
28#include <mach/edma.h>
29
30
31#define PARM_OPT 0x00
32#define PARM_SRC 0x04
33#define PARM_A_B_CNT 0x08
34#define PARM_DST 0x0c
35#define PARM_SRC_DST_BIDX 0x10
36#define PARM_LINK_BCNTRLD 0x14
37#define PARM_SRC_DST_CIDX 0x18
38#define PARM_CCNT 0x1c
39
40#define PARM_SIZE 0x20
41
42
43#define SH_ER 0x00
44#define SH_ECR 0x08
45#define SH_ESR 0x10
46#define SH_CER 0x18
47#define SH_EER 0x20
48#define SH_EECR 0x28
49#define SH_EESR 0x30
50#define SH_SER 0x38
51#define SH_SECR 0x40
52#define SH_IER 0x50
53#define SH_IECR 0x58
54#define SH_IESR 0x60
55#define SH_IPR 0x68
56#define SH_ICR 0x70
57#define SH_IEVAL 0x78
58#define SH_QER 0x80
59#define SH_QEER 0x84
60#define SH_QEECR 0x88
61#define SH_QEESR 0x8c
62#define SH_QSER 0x90
63#define SH_QSECR 0x94
64#define SH_SIZE 0x200
65
66
67#define EDMA_REV 0x0000
68#define EDMA_CCCFG 0x0004
69#define EDMA_QCHMAP 0x0200
70#define EDMA_DMAQNUM 0x0240
71#define EDMA_QDMAQNUM 0x0260
72#define EDMA_QUETCMAP 0x0280
73#define EDMA_QUEPRI 0x0284
74#define EDMA_EMR 0x0300
75#define EDMA_EMCR 0x0308
76#define EDMA_QEMR 0x0310
77#define EDMA_QEMCR 0x0314
78#define EDMA_CCERR 0x0318
79#define EDMA_CCERRCLR 0x031c
80#define EDMA_EEVAL 0x0320
81#define EDMA_DRAE 0x0340
82#define EDMA_QRAE 0x0380
83#define EDMA_QUEEVTENTRY 0x0400
84#define EDMA_QSTAT 0x0600
85#define EDMA_QWMTHRA 0x0620
86#define EDMA_QWMTHRB 0x0624
87#define EDMA_CCSTAT 0x0640
88
89#define EDMA_M 0x1000
90#define EDMA_ECR 0x1008
91#define EDMA_ECRH 0x100C
92#define EDMA_SHADOW0 0x2000
93#define EDMA_PARM 0x4000
94
95#define PARM_OFFSET(param_no) (EDMA_PARM + ((param_no) << 5))
96
97#define EDMA_DCHMAP 0x0100
98#define CHMAP_EXIST BIT(24)
99
100#define EDMA_MAX_DMACH 64
101#define EDMA_MAX_PARAMENTRY 512
102
103
104
105static void __iomem *edmacc_regs_base[EDMA_MAX_CC];
106
107static inline unsigned int edma_read(unsigned ctlr, int offset)
108{
109 return (unsigned int)__raw_readl(edmacc_regs_base[ctlr] + offset);
110}
111
112static inline void edma_write(unsigned ctlr, int offset, int val)
113{
114 __raw_writel(val, edmacc_regs_base[ctlr] + offset);
115}
116static inline void edma_modify(unsigned ctlr, int offset, unsigned and,
117 unsigned or)
118{
119 unsigned val = edma_read(ctlr, offset);
120 val &= and;
121 val |= or;
122 edma_write(ctlr, offset, val);
123}
124static inline void edma_and(unsigned ctlr, int offset, unsigned and)
125{
126 unsigned val = edma_read(ctlr, offset);
127 val &= and;
128 edma_write(ctlr, offset, val);
129}
130static inline void edma_or(unsigned ctlr, int offset, unsigned or)
131{
132 unsigned val = edma_read(ctlr, offset);
133 val |= or;
134 edma_write(ctlr, offset, val);
135}
136static inline unsigned int edma_read_array(unsigned ctlr, int offset, int i)
137{
138 return edma_read(ctlr, offset + (i << 2));
139}
140static inline void edma_write_array(unsigned ctlr, int offset, int i,
141 unsigned val)
142{
143 edma_write(ctlr, offset + (i << 2), val);
144}
145static inline void edma_modify_array(unsigned ctlr, int offset, int i,
146 unsigned and, unsigned or)
147{
148 edma_modify(ctlr, offset + (i << 2), and, or);
149}
150static inline void edma_or_array(unsigned ctlr, int offset, int i, unsigned or)
151{
152 edma_or(ctlr, offset + (i << 2), or);
153}
154static inline void edma_or_array2(unsigned ctlr, int offset, int i, int j,
155 unsigned or)
156{
157 edma_or(ctlr, offset + ((i*2 + j) << 2), or);
158}
159static inline void edma_write_array2(unsigned ctlr, int offset, int i, int j,
160 unsigned val)
161{
162 edma_write(ctlr, offset + ((i*2 + j) << 2), val);
163}
164static inline unsigned int edma_shadow0_read(unsigned ctlr, int offset)
165{
166 return edma_read(ctlr, EDMA_SHADOW0 + offset);
167}
168static inline unsigned int edma_shadow0_read_array(unsigned ctlr, int offset,
169 int i)
170{
171 return edma_read(ctlr, EDMA_SHADOW0 + offset + (i << 2));
172}
173static inline void edma_shadow0_write(unsigned ctlr, int offset, unsigned val)
174{
175 edma_write(ctlr, EDMA_SHADOW0 + offset, val);
176}
177static inline void edma_shadow0_write_array(unsigned ctlr, int offset, int i,
178 unsigned val)
179{
180 edma_write(ctlr, EDMA_SHADOW0 + offset + (i << 2), val);
181}
182static inline unsigned int edma_parm_read(unsigned ctlr, int offset,
183 int param_no)
184{
185 return edma_read(ctlr, EDMA_PARM + offset + (param_no << 5));
186}
187static inline void edma_parm_write(unsigned ctlr, int offset, int param_no,
188 unsigned val)
189{
190 edma_write(ctlr, EDMA_PARM + offset + (param_no << 5), val);
191}
192static inline void edma_parm_modify(unsigned ctlr, int offset, int param_no,
193 unsigned and, unsigned or)
194{
195 edma_modify(ctlr, EDMA_PARM + offset + (param_no << 5), and, or);
196}
197static inline void edma_parm_and(unsigned ctlr, int offset, int param_no,
198 unsigned and)
199{
200 edma_and(ctlr, EDMA_PARM + offset + (param_no << 5), and);
201}
202static inline void edma_parm_or(unsigned ctlr, int offset, int param_no,
203 unsigned or)
204{
205 edma_or(ctlr, EDMA_PARM + offset + (param_no << 5), or);
206}
207
208static inline void set_bits(int offset, int len, unsigned long *p)
209{
210 for (; len > 0; len--)
211 set_bit(offset + (len - 1), p);
212}
213
214static inline void clear_bits(int offset, int len, unsigned long *p)
215{
216 for (; len > 0; len--)
217 clear_bit(offset + (len - 1), p);
218}
219
220
221
222
223struct edma {
224
225 unsigned num_channels;
226 unsigned num_region;
227 unsigned num_slots;
228 unsigned num_tc;
229 unsigned num_cc;
230 enum dma_event_q default_queue;
231
232
233 const s8 *noevent;
234
235
236
237
238 DECLARE_BITMAP(edma_inuse, EDMA_MAX_PARAMENTRY);
239
240
241
242
243
244 DECLARE_BITMAP(edma_unused, EDMA_MAX_DMACH);
245
246 unsigned irq_res_start;
247 unsigned irq_res_end;
248
249 struct dma_interrupt_data {
250 void (*callback)(unsigned channel, unsigned short ch_status,
251 void *data);
252 void *data;
253 } intr_data[EDMA_MAX_DMACH];
254};
255
256static struct edma *edma_cc[EDMA_MAX_CC];
257static int arch_num_cc;
258
259
260static const struct edmacc_param dummy_paramset = {
261 .link_bcntrld = 0xffff,
262 .ccnt = 1,
263};
264
265
266
267static void map_dmach_queue(unsigned ctlr, unsigned ch_no,
268 enum dma_event_q queue_no)
269{
270 int bit = (ch_no & 0x7) * 4;
271
272
273 if (queue_no == EVENTQ_DEFAULT)
274 queue_no = edma_cc[ctlr]->default_queue;
275
276 queue_no &= 7;
277 edma_modify_array(ctlr, EDMA_DMAQNUM, (ch_no >> 3),
278 ~(0x7 << bit), queue_no << bit);
279}
280
281static void __init map_queue_tc(unsigned ctlr, int queue_no, int tc_no)
282{
283 int bit = queue_no * 4;
284 edma_modify(ctlr, EDMA_QUETCMAP, ~(0x7 << bit), ((tc_no & 0x7) << bit));
285}
286
287static void __init assign_priority_to_queue(unsigned ctlr, int queue_no,
288 int priority)
289{
290 int bit = queue_no * 4;
291 edma_modify(ctlr, EDMA_QUEPRI, ~(0x7 << bit),
292 ((priority & 0x7) << bit));
293}
294
295
296
297
298
299
300
301
302
303
304
305
306static void __init map_dmach_param(unsigned ctlr)
307{
308 int i;
309 for (i = 0; i < EDMA_MAX_DMACH; i++)
310 edma_write_array(ctlr, EDMA_DCHMAP , i , (i << 5));
311}
312
313static inline void
314setup_dma_interrupt(unsigned lch,
315 void (*callback)(unsigned channel, u16 ch_status, void *data),
316 void *data)
317{
318 unsigned ctlr;
319
320 ctlr = EDMA_CTLR(lch);
321 lch = EDMA_CHAN_SLOT(lch);
322
323 if (!callback)
324 edma_shadow0_write_array(ctlr, SH_IECR, lch >> 5,
325 BIT(lch & 0x1f));
326
327 edma_cc[ctlr]->intr_data[lch].callback = callback;
328 edma_cc[ctlr]->intr_data[lch].data = data;
329
330 if (callback) {
331 edma_shadow0_write_array(ctlr, SH_ICR, lch >> 5,
332 BIT(lch & 0x1f));
333 edma_shadow0_write_array(ctlr, SH_IESR, lch >> 5,
334 BIT(lch & 0x1f));
335 }
336}
337
338static int irq2ctlr(int irq)
339{
340 if (irq >= edma_cc[0]->irq_res_start && irq <= edma_cc[0]->irq_res_end)
341 return 0;
342 else if (irq >= edma_cc[1]->irq_res_start &&
343 irq <= edma_cc[1]->irq_res_end)
344 return 1;
345
346 return -1;
347}
348
349
350
351
352
353
354static irqreturn_t dma_irq_handler(int irq, void *data)
355{
356 int ctlr;
357 u32 sh_ier;
358 u32 sh_ipr;
359 u32 bank;
360
361 ctlr = irq2ctlr(irq);
362 if (ctlr < 0)
363 return IRQ_NONE;
364
365 dev_dbg(data, "dma_irq_handler\n");
366
367 sh_ipr = edma_shadow0_read_array(ctlr, SH_IPR, 0);
368 if (!sh_ipr) {
369 sh_ipr = edma_shadow0_read_array(ctlr, SH_IPR, 1);
370 if (!sh_ipr)
371 return IRQ_NONE;
372 sh_ier = edma_shadow0_read_array(ctlr, SH_IER, 1);
373 bank = 1;
374 } else {
375 sh_ier = edma_shadow0_read_array(ctlr, SH_IER, 0);
376 bank = 0;
377 }
378
379 do {
380 u32 slot;
381 u32 channel;
382
383 dev_dbg(data, "IPR%d %08x\n", bank, sh_ipr);
384
385 slot = __ffs(sh_ipr);
386 sh_ipr &= ~(BIT(slot));
387
388 if (sh_ier & BIT(slot)) {
389 channel = (bank << 5) | slot;
390
391 edma_shadow0_write_array(ctlr, SH_ICR, bank,
392 BIT(slot));
393 if (edma_cc[ctlr]->intr_data[channel].callback)
394 edma_cc[ctlr]->intr_data[channel].callback(
395 channel, DMA_COMPLETE,
396 edma_cc[ctlr]->intr_data[channel].data);
397 }
398 } while (sh_ipr);
399
400 edma_shadow0_write(ctlr, SH_IEVAL, 1);
401 return IRQ_HANDLED;
402}
403
404
405
406
407
408
409static irqreturn_t dma_ccerr_handler(int irq, void *data)
410{
411 int i;
412 int ctlr;
413 unsigned int cnt = 0;
414
415 ctlr = irq2ctlr(irq);
416 if (ctlr < 0)
417 return IRQ_NONE;
418
419 dev_dbg(data, "dma_ccerr_handler\n");
420
421 if ((edma_read_array(ctlr, EDMA_EMR, 0) == 0) &&
422 (edma_read_array(ctlr, EDMA_EMR, 1) == 0) &&
423 (edma_read(ctlr, EDMA_QEMR) == 0) &&
424 (edma_read(ctlr, EDMA_CCERR) == 0))
425 return IRQ_NONE;
426
427 while (1) {
428 int j = -1;
429 if (edma_read_array(ctlr, EDMA_EMR, 0))
430 j = 0;
431 else if (edma_read_array(ctlr, EDMA_EMR, 1))
432 j = 1;
433 if (j >= 0) {
434 dev_dbg(data, "EMR%d %08x\n", j,
435 edma_read_array(ctlr, EDMA_EMR, j));
436 for (i = 0; i < 32; i++) {
437 int k = (j << 5) + i;
438 if (edma_read_array(ctlr, EDMA_EMR, j) &
439 BIT(i)) {
440
441 edma_write_array(ctlr, EDMA_EMCR, j,
442 BIT(i));
443
444 edma_shadow0_write_array(ctlr, SH_SECR,
445 j, BIT(i));
446 if (edma_cc[ctlr]->intr_data[k].
447 callback) {
448 edma_cc[ctlr]->intr_data[k].
449 callback(k,
450 DMA_CC_ERROR,
451 edma_cc[ctlr]->intr_data
452 [k].data);
453 }
454 }
455 }
456 } else if (edma_read(ctlr, EDMA_QEMR)) {
457 dev_dbg(data, "QEMR %02x\n",
458 edma_read(ctlr, EDMA_QEMR));
459 for (i = 0; i < 8; i++) {
460 if (edma_read(ctlr, EDMA_QEMR) & BIT(i)) {
461
462 edma_write(ctlr, EDMA_QEMCR, BIT(i));
463 edma_shadow0_write(ctlr, SH_QSECR,
464 BIT(i));
465
466
467 }
468 }
469 } else if (edma_read(ctlr, EDMA_CCERR)) {
470 dev_dbg(data, "CCERR %08x\n",
471 edma_read(ctlr, EDMA_CCERR));
472
473
474
475 for (i = 0; i < 8; i++) {
476 if (edma_read(ctlr, EDMA_CCERR) & BIT(i)) {
477
478 edma_write(ctlr, EDMA_CCERRCLR, BIT(i));
479
480
481 }
482 }
483 }
484 if ((edma_read_array(ctlr, EDMA_EMR, 0) == 0) &&
485 (edma_read_array(ctlr, EDMA_EMR, 1) == 0) &&
486 (edma_read(ctlr, EDMA_QEMR) == 0) &&
487 (edma_read(ctlr, EDMA_CCERR) == 0))
488 break;
489 cnt++;
490 if (cnt > 10)
491 break;
492 }
493 edma_write(ctlr, EDMA_EEVAL, 1);
494 return IRQ_HANDLED;
495}
496
497
498
499
500
501
502
503#define tc_errs_handled false
504
505static irqreturn_t dma_tc0err_handler(int irq, void *data)
506{
507 dev_dbg(data, "dma_tc0err_handler\n");
508 return IRQ_HANDLED;
509}
510
511static irqreturn_t dma_tc1err_handler(int irq, void *data)
512{
513 dev_dbg(data, "dma_tc1err_handler\n");
514 return IRQ_HANDLED;
515}
516
517static int reserve_contiguous_slots(int ctlr, unsigned int id,
518 unsigned int num_slots,
519 unsigned int start_slot)
520{
521 int i, j;
522 unsigned int count = num_slots;
523 int stop_slot = start_slot;
524 DECLARE_BITMAP(tmp_inuse, EDMA_MAX_PARAMENTRY);
525
526 for (i = start_slot; i < edma_cc[ctlr]->num_slots; ++i) {
527 j = EDMA_CHAN_SLOT(i);
528 if (!test_and_set_bit(j, edma_cc[ctlr]->edma_inuse)) {
529
530 if (count == num_slots)
531 stop_slot = i;
532
533 count--;
534 set_bit(j, tmp_inuse);
535
536 if (count == 0)
537 break;
538 } else {
539 clear_bit(j, tmp_inuse);
540
541 if (id == EDMA_CONT_PARAMS_FIXED_EXACT) {
542 stop_slot = i;
543 break;
544 } else {
545 count = num_slots;
546 }
547 }
548 }
549
550
551
552
553
554
555
556 if (i == edma_cc[ctlr]->num_slots)
557 stop_slot = i;
558
559 j = start_slot;
560 for_each_set_bit_from(j, tmp_inuse, stop_slot)
561 clear_bit(j, edma_cc[ctlr]->edma_inuse);
562
563 if (count)
564 return -EBUSY;
565
566 for (j = i - num_slots + 1; j <= i; ++j)
567 memcpy_toio(edmacc_regs_base[ctlr] + PARM_OFFSET(j),
568 &dummy_paramset, PARM_SIZE);
569
570 return EDMA_CTLR_CHAN(ctlr, i - num_slots + 1);
571}
572
573static int prepare_unused_channel_list(struct device *dev, void *data)
574{
575 struct platform_device *pdev = to_platform_device(dev);
576 int i, ctlr;
577
578 for (i = 0; i < pdev->num_resources; i++) {
579 if ((pdev->resource[i].flags & IORESOURCE_DMA) &&
580 (int)pdev->resource[i].start >= 0) {
581 ctlr = EDMA_CTLR(pdev->resource[i].start);
582 clear_bit(EDMA_CHAN_SLOT(pdev->resource[i].start),
583 edma_cc[ctlr]->edma_unused);
584 }
585 }
586
587 return 0;
588}
589
590
591
592static bool unused_chan_list_done;
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626int edma_alloc_channel(int channel,
627 void (*callback)(unsigned channel, u16 ch_status, void *data),
628 void *data,
629 enum dma_event_q eventq_no)
630{
631 unsigned i, done = 0, ctlr = 0;
632 int ret = 0;
633
634 if (!unused_chan_list_done) {
635
636
637
638
639
640 ret = bus_for_each_dev(&platform_bus_type, NULL, NULL,
641 prepare_unused_channel_list);
642 if (ret < 0)
643 return ret;
644
645 unused_chan_list_done = true;
646 }
647
648 if (channel >= 0) {
649 ctlr = EDMA_CTLR(channel);
650 channel = EDMA_CHAN_SLOT(channel);
651 }
652
653 if (channel < 0) {
654 for (i = 0; i < arch_num_cc; i++) {
655 channel = 0;
656 for (;;) {
657 channel = find_next_bit(edma_cc[i]->edma_unused,
658 edma_cc[i]->num_channels,
659 channel);
660 if (channel == edma_cc[i]->num_channels)
661 break;
662 if (!test_and_set_bit(channel,
663 edma_cc[i]->edma_inuse)) {
664 done = 1;
665 ctlr = i;
666 break;
667 }
668 channel++;
669 }
670 if (done)
671 break;
672 }
673 if (!done)
674 return -ENOMEM;
675 } else if (channel >= edma_cc[ctlr]->num_channels) {
676 return -EINVAL;
677 } else if (test_and_set_bit(channel, edma_cc[ctlr]->edma_inuse)) {
678 return -EBUSY;
679 }
680
681
682 edma_or_array2(ctlr, EDMA_DRAE, 0, channel >> 5, BIT(channel & 0x1f));
683
684
685 edma_stop(EDMA_CTLR_CHAN(ctlr, channel));
686 memcpy_toio(edmacc_regs_base[ctlr] + PARM_OFFSET(channel),
687 &dummy_paramset, PARM_SIZE);
688
689 if (callback)
690 setup_dma_interrupt(EDMA_CTLR_CHAN(ctlr, channel),
691 callback, data);
692
693 map_dmach_queue(ctlr, channel, eventq_no);
694
695 return EDMA_CTLR_CHAN(ctlr, channel);
696}
697EXPORT_SYMBOL(edma_alloc_channel);
698
699
700
701
702
703
704
705
706
707
708
709
710
711void edma_free_channel(unsigned channel)
712{
713 unsigned ctlr;
714
715 ctlr = EDMA_CTLR(channel);
716 channel = EDMA_CHAN_SLOT(channel);
717
718 if (channel >= edma_cc[ctlr]->num_channels)
719 return;
720
721 setup_dma_interrupt(channel, NULL, NULL);
722
723
724 memcpy_toio(edmacc_regs_base[ctlr] + PARM_OFFSET(channel),
725 &dummy_paramset, PARM_SIZE);
726 clear_bit(channel, edma_cc[ctlr]->edma_inuse);
727}
728EXPORT_SYMBOL(edma_free_channel);
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744int edma_alloc_slot(unsigned ctlr, int slot)
745{
746 if (!edma_cc[ctlr])
747 return -EINVAL;
748
749 if (slot >= 0)
750 slot = EDMA_CHAN_SLOT(slot);
751
752 if (slot < 0) {
753 slot = edma_cc[ctlr]->num_channels;
754 for (;;) {
755 slot = find_next_zero_bit(edma_cc[ctlr]->edma_inuse,
756 edma_cc[ctlr]->num_slots, slot);
757 if (slot == edma_cc[ctlr]->num_slots)
758 return -ENOMEM;
759 if (!test_and_set_bit(slot, edma_cc[ctlr]->edma_inuse))
760 break;
761 }
762 } else if (slot < edma_cc[ctlr]->num_channels ||
763 slot >= edma_cc[ctlr]->num_slots) {
764 return -EINVAL;
765 } else if (test_and_set_bit(slot, edma_cc[ctlr]->edma_inuse)) {
766 return -EBUSY;
767 }
768
769 memcpy_toio(edmacc_regs_base[ctlr] + PARM_OFFSET(slot),
770 &dummy_paramset, PARM_SIZE);
771
772 return EDMA_CTLR_CHAN(ctlr, slot);
773}
774EXPORT_SYMBOL(edma_alloc_slot);
775
776
777
778
779
780
781
782
783
784void edma_free_slot(unsigned slot)
785{
786 unsigned ctlr;
787
788 ctlr = EDMA_CTLR(slot);
789 slot = EDMA_CHAN_SLOT(slot);
790
791 if (slot < edma_cc[ctlr]->num_channels ||
792 slot >= edma_cc[ctlr]->num_slots)
793 return;
794
795 memcpy_toio(edmacc_regs_base[ctlr] + PARM_OFFSET(slot),
796 &dummy_paramset, PARM_SIZE);
797 clear_bit(slot, edma_cc[ctlr]->edma_inuse);
798}
799EXPORT_SYMBOL(edma_free_slot);
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827int edma_alloc_cont_slots(unsigned ctlr, unsigned int id, int slot, int count)
828{
829
830
831
832
833
834 if ((id != EDMA_CONT_PARAMS_ANY) &&
835 (slot < edma_cc[ctlr]->num_channels ||
836 slot >= edma_cc[ctlr]->num_slots))
837 return -EINVAL;
838
839
840
841
842
843
844 if (count < 1 || count >
845 (edma_cc[ctlr]->num_slots - edma_cc[ctlr]->num_channels))
846 return -EINVAL;
847
848 switch (id) {
849 case EDMA_CONT_PARAMS_ANY:
850 return reserve_contiguous_slots(ctlr, id, count,
851 edma_cc[ctlr]->num_channels);
852 case EDMA_CONT_PARAMS_FIXED_EXACT:
853 case EDMA_CONT_PARAMS_FIXED_NOT_EXACT:
854 return reserve_contiguous_slots(ctlr, id, count, slot);
855 default:
856 return -EINVAL;
857 }
858
859}
860EXPORT_SYMBOL(edma_alloc_cont_slots);
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875int edma_free_cont_slots(unsigned slot, int count)
876{
877 unsigned ctlr, slot_to_free;
878 int i;
879
880 ctlr = EDMA_CTLR(slot);
881 slot = EDMA_CHAN_SLOT(slot);
882
883 if (slot < edma_cc[ctlr]->num_channels ||
884 slot >= edma_cc[ctlr]->num_slots ||
885 count < 1)
886 return -EINVAL;
887
888 for (i = slot; i < slot + count; ++i) {
889 ctlr = EDMA_CTLR(i);
890 slot_to_free = EDMA_CHAN_SLOT(i);
891
892 memcpy_toio(edmacc_regs_base[ctlr] + PARM_OFFSET(slot_to_free),
893 &dummy_paramset, PARM_SIZE);
894 clear_bit(slot_to_free, edma_cc[ctlr]->edma_inuse);
895 }
896
897 return 0;
898}
899EXPORT_SYMBOL(edma_free_cont_slots);
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916void edma_set_src(unsigned slot, dma_addr_t src_port,
917 enum address_mode mode, enum fifo_width width)
918{
919 unsigned ctlr;
920
921 ctlr = EDMA_CTLR(slot);
922 slot = EDMA_CHAN_SLOT(slot);
923
924 if (slot < edma_cc[ctlr]->num_slots) {
925 unsigned int i = edma_parm_read(ctlr, PARM_OPT, slot);
926
927 if (mode) {
928
929 i = (i & ~(EDMA_FWID)) | (SAM | ((width & 0x7) << 8));
930 } else {
931
932 i &= ~SAM;
933 }
934 edma_parm_write(ctlr, PARM_OPT, slot, i);
935
936
937
938 edma_parm_write(ctlr, PARM_SRC, slot, src_port);
939 }
940}
941EXPORT_SYMBOL(edma_set_src);
942
943
944
945
946
947
948
949
950
951
952
953
954void edma_set_dest(unsigned slot, dma_addr_t dest_port,
955 enum address_mode mode, enum fifo_width width)
956{
957 unsigned ctlr;
958
959 ctlr = EDMA_CTLR(slot);
960 slot = EDMA_CHAN_SLOT(slot);
961
962 if (slot < edma_cc[ctlr]->num_slots) {
963 unsigned int i = edma_parm_read(ctlr, PARM_OPT, slot);
964
965 if (mode) {
966
967 i = (i & ~(EDMA_FWID)) | (DAM | ((width & 0x7) << 8));
968 } else {
969
970 i &= ~DAM;
971 }
972 edma_parm_write(ctlr, PARM_OPT, slot, i);
973
974
975 edma_parm_write(ctlr, PARM_DST, slot, dest_port);
976 }
977}
978EXPORT_SYMBOL(edma_set_dest);
979
980
981
982
983
984
985
986
987
988
989void edma_get_position(unsigned slot, dma_addr_t *src, dma_addr_t *dst)
990{
991 struct edmacc_param temp;
992 unsigned ctlr;
993
994 ctlr = EDMA_CTLR(slot);
995 slot = EDMA_CHAN_SLOT(slot);
996
997 edma_read_slot(EDMA_CTLR_CHAN(ctlr, slot), &temp);
998 if (src != NULL)
999 *src = temp.src;
1000 if (dst != NULL)
1001 *dst = temp.dst;
1002}
1003EXPORT_SYMBOL(edma_get_position);
1004
1005
1006
1007
1008
1009
1010
1011
1012
1013
1014
1015void edma_set_src_index(unsigned slot, s16 src_bidx, s16 src_cidx)
1016{
1017 unsigned ctlr;
1018
1019 ctlr = EDMA_CTLR(slot);
1020 slot = EDMA_CHAN_SLOT(slot);
1021
1022 if (slot < edma_cc[ctlr]->num_slots) {
1023 edma_parm_modify(ctlr, PARM_SRC_DST_BIDX, slot,
1024 0xffff0000, src_bidx);
1025 edma_parm_modify(ctlr, PARM_SRC_DST_CIDX, slot,
1026 0xffff0000, src_cidx);
1027 }
1028}
1029EXPORT_SYMBOL(edma_set_src_index);
1030
1031
1032
1033
1034
1035
1036
1037
1038
1039
1040
1041void edma_set_dest_index(unsigned slot, s16 dest_bidx, s16 dest_cidx)
1042{
1043 unsigned ctlr;
1044
1045 ctlr = EDMA_CTLR(slot);
1046 slot = EDMA_CHAN_SLOT(slot);
1047
1048 if (slot < edma_cc[ctlr]->num_slots) {
1049 edma_parm_modify(ctlr, PARM_SRC_DST_BIDX, slot,
1050 0x0000ffff, dest_bidx << 16);
1051 edma_parm_modify(ctlr, PARM_SRC_DST_CIDX, slot,
1052 0x0000ffff, dest_cidx << 16);
1053 }
1054}
1055EXPORT_SYMBOL(edma_set_dest_index);
1056
1057
1058
1059
1060
1061
1062
1063
1064
1065
1066
1067
1068
1069
1070
1071
1072
1073
1074
1075
1076
1077
1078
1079
1080
1081
1082
1083
1084
1085
1086void edma_set_transfer_params(unsigned slot,
1087 u16 acnt, u16 bcnt, u16 ccnt,
1088 u16 bcnt_rld, enum sync_dimension sync_mode)
1089{
1090 unsigned ctlr;
1091
1092 ctlr = EDMA_CTLR(slot);
1093 slot = EDMA_CHAN_SLOT(slot);
1094
1095 if (slot < edma_cc[ctlr]->num_slots) {
1096 edma_parm_modify(ctlr, PARM_LINK_BCNTRLD, slot,
1097 0x0000ffff, bcnt_rld << 16);
1098 if (sync_mode == ASYNC)
1099 edma_parm_and(ctlr, PARM_OPT, slot, ~SYNCDIM);
1100 else
1101 edma_parm_or(ctlr, PARM_OPT, slot, SYNCDIM);
1102
1103 edma_parm_write(ctlr, PARM_A_B_CNT, slot, (bcnt << 16) | acnt);
1104 edma_parm_write(ctlr, PARM_CCNT, slot, ccnt);
1105 }
1106}
1107EXPORT_SYMBOL(edma_set_transfer_params);
1108
1109
1110
1111
1112
1113
1114
1115
1116void edma_link(unsigned from, unsigned to)
1117{
1118 unsigned ctlr_from, ctlr_to;
1119
1120 ctlr_from = EDMA_CTLR(from);
1121 from = EDMA_CHAN_SLOT(from);
1122 ctlr_to = EDMA_CTLR(to);
1123 to = EDMA_CHAN_SLOT(to);
1124
1125 if (from >= edma_cc[ctlr_from]->num_slots)
1126 return;
1127 if (to >= edma_cc[ctlr_to]->num_slots)
1128 return;
1129 edma_parm_modify(ctlr_from, PARM_LINK_BCNTRLD, from, 0xffff0000,
1130 PARM_OFFSET(to));
1131}
1132EXPORT_SYMBOL(edma_link);
1133
1134
1135
1136
1137
1138
1139
1140
1141void edma_unlink(unsigned from)
1142{
1143 unsigned ctlr;
1144
1145 ctlr = EDMA_CTLR(from);
1146 from = EDMA_CHAN_SLOT(from);
1147
1148 if (from >= edma_cc[ctlr]->num_slots)
1149 return;
1150 edma_parm_or(ctlr, PARM_LINK_BCNTRLD, from, 0xffff);
1151}
1152EXPORT_SYMBOL(edma_unlink);
1153
1154
1155
1156
1157
1158
1159
1160
1161
1162
1163
1164
1165
1166
1167
1168void edma_write_slot(unsigned slot, const struct edmacc_param *param)
1169{
1170 unsigned ctlr;
1171
1172 ctlr = EDMA_CTLR(slot);
1173 slot = EDMA_CHAN_SLOT(slot);
1174
1175 if (slot >= edma_cc[ctlr]->num_slots)
1176 return;
1177 memcpy_toio(edmacc_regs_base[ctlr] + PARM_OFFSET(slot), param,
1178 PARM_SIZE);
1179}
1180EXPORT_SYMBOL(edma_write_slot);
1181
1182
1183
1184
1185
1186
1187
1188
1189
1190void edma_read_slot(unsigned slot, struct edmacc_param *param)
1191{
1192 unsigned ctlr;
1193
1194 ctlr = EDMA_CTLR(slot);
1195 slot = EDMA_CHAN_SLOT(slot);
1196
1197 if (slot >= edma_cc[ctlr]->num_slots)
1198 return;
1199 memcpy_fromio(param, edmacc_regs_base[ctlr] + PARM_OFFSET(slot),
1200 PARM_SIZE);
1201}
1202EXPORT_SYMBOL(edma_read_slot);
1203
1204
1205
1206
1207
1208
1209
1210
1211
1212
1213
1214
1215void edma_pause(unsigned channel)
1216{
1217 unsigned ctlr;
1218
1219 ctlr = EDMA_CTLR(channel);
1220 channel = EDMA_CHAN_SLOT(channel);
1221
1222 if (channel < edma_cc[ctlr]->num_channels) {
1223 unsigned int mask = BIT(channel & 0x1f);
1224
1225 edma_shadow0_write_array(ctlr, SH_EECR, channel >> 5, mask);
1226 }
1227}
1228EXPORT_SYMBOL(edma_pause);
1229
1230
1231
1232
1233
1234
1235
1236void edma_resume(unsigned channel)
1237{
1238 unsigned ctlr;
1239
1240 ctlr = EDMA_CTLR(channel);
1241 channel = EDMA_CHAN_SLOT(channel);
1242
1243 if (channel < edma_cc[ctlr]->num_channels) {
1244 unsigned int mask = BIT(channel & 0x1f);
1245
1246 edma_shadow0_write_array(ctlr, SH_EESR, channel >> 5, mask);
1247 }
1248}
1249EXPORT_SYMBOL(edma_resume);
1250
1251
1252
1253
1254
1255
1256
1257
1258
1259
1260
1261
1262int edma_start(unsigned channel)
1263{
1264 unsigned ctlr;
1265
1266 ctlr = EDMA_CTLR(channel);
1267 channel = EDMA_CHAN_SLOT(channel);
1268
1269 if (channel < edma_cc[ctlr]->num_channels) {
1270 int j = channel >> 5;
1271 unsigned int mask = BIT(channel & 0x1f);
1272
1273
1274 if (test_bit(channel, edma_cc[ctlr]->edma_unused)) {
1275 pr_debug("EDMA: ESR%d %08x\n", j,
1276 edma_shadow0_read_array(ctlr, SH_ESR, j));
1277 edma_shadow0_write_array(ctlr, SH_ESR, j, mask);
1278 return 0;
1279 }
1280
1281
1282 pr_debug("EDMA: ER%d %08x\n", j,
1283 edma_shadow0_read_array(ctlr, SH_ER, j));
1284
1285 edma_write_array(ctlr, EDMA_ECR, j, mask);
1286 edma_write_array(ctlr, EDMA_EMCR, j, mask);
1287
1288 edma_shadow0_write_array(ctlr, SH_SECR, j, mask);
1289 edma_shadow0_write_array(ctlr, SH_EESR, j, mask);
1290 pr_debug("EDMA: EER%d %08x\n", j,
1291 edma_shadow0_read_array(ctlr, SH_EER, j));
1292 return 0;
1293 }
1294
1295 return -EINVAL;
1296}
1297EXPORT_SYMBOL(edma_start);
1298
1299
1300
1301
1302
1303
1304
1305
1306
1307
1308void edma_stop(unsigned channel)
1309{
1310 unsigned ctlr;
1311
1312 ctlr = EDMA_CTLR(channel);
1313 channel = EDMA_CHAN_SLOT(channel);
1314
1315 if (channel < edma_cc[ctlr]->num_channels) {
1316 int j = channel >> 5;
1317 unsigned int mask = BIT(channel & 0x1f);
1318
1319 edma_shadow0_write_array(ctlr, SH_EECR, j, mask);
1320 edma_shadow0_write_array(ctlr, SH_ECR, j, mask);
1321 edma_shadow0_write_array(ctlr, SH_SECR, j, mask);
1322 edma_write_array(ctlr, EDMA_EMCR, j, mask);
1323
1324 pr_debug("EDMA: EER%d %08x\n", j,
1325 edma_shadow0_read_array(ctlr, SH_EER, j));
1326
1327
1328
1329
1330 }
1331}
1332EXPORT_SYMBOL(edma_stop);
1333
1334
1335
1336
1337
1338
1339
1340
1341
1342
1343
1344
1345
1346
1347void edma_clean_channel(unsigned channel)
1348{
1349 unsigned ctlr;
1350
1351 ctlr = EDMA_CTLR(channel);
1352 channel = EDMA_CHAN_SLOT(channel);
1353
1354 if (channel < edma_cc[ctlr]->num_channels) {
1355 int j = (channel >> 5);
1356 unsigned int mask = BIT(channel & 0x1f);
1357
1358 pr_debug("EDMA: EMR%d %08x\n", j,
1359 edma_read_array(ctlr, EDMA_EMR, j));
1360 edma_shadow0_write_array(ctlr, SH_ECR, j, mask);
1361
1362 edma_write_array(ctlr, EDMA_EMCR, j, mask);
1363
1364 edma_shadow0_write_array(ctlr, SH_SECR, j, mask);
1365 edma_write(ctlr, EDMA_CCERRCLR, BIT(16) | BIT(1) | BIT(0));
1366 }
1367}
1368EXPORT_SYMBOL(edma_clean_channel);
1369
1370
1371
1372
1373
1374
1375void edma_clear_event(unsigned channel)
1376{
1377 unsigned ctlr;
1378
1379 ctlr = EDMA_CTLR(channel);
1380 channel = EDMA_CHAN_SLOT(channel);
1381
1382 if (channel >= edma_cc[ctlr]->num_channels)
1383 return;
1384 if (channel < 32)
1385 edma_write(ctlr, EDMA_ECR, BIT(channel));
1386 else
1387 edma_write(ctlr, EDMA_ECRH, BIT(channel - 32));
1388}
1389EXPORT_SYMBOL(edma_clear_event);
1390
1391
1392
1393static int __init edma_probe(struct platform_device *pdev)
1394{
1395 struct edma_soc_info **info = pdev->dev.platform_data;
1396 const s8 (*queue_priority_mapping)[2];
1397 const s8 (*queue_tc_mapping)[2];
1398 int i, j, off, ln, found = 0;
1399 int status = -1;
1400 const s16 (*rsv_chans)[2];
1401 const s16 (*rsv_slots)[2];
1402 int irq[EDMA_MAX_CC] = {0, 0};
1403 int err_irq[EDMA_MAX_CC] = {0, 0};
1404 struct resource *r[EDMA_MAX_CC] = {NULL};
1405 resource_size_t len[EDMA_MAX_CC];
1406 char res_name[10];
1407 char irq_name[10];
1408
1409 if (!info)
1410 return -ENODEV;
1411
1412 for (j = 0; j < EDMA_MAX_CC; j++) {
1413 sprintf(res_name, "edma_cc%d", j);
1414 r[j] = platform_get_resource_byname(pdev, IORESOURCE_MEM,
1415 res_name);
1416 if (!r[j] || !info[j]) {
1417 if (found)
1418 break;
1419 else
1420 return -ENODEV;
1421 } else {
1422 found = 1;
1423 }
1424
1425 len[j] = resource_size(r[j]);
1426
1427 r[j] = request_mem_region(r[j]->start, len[j],
1428 dev_name(&pdev->dev));
1429 if (!r[j]) {
1430 status = -EBUSY;
1431 goto fail1;
1432 }
1433
1434 edmacc_regs_base[j] = ioremap(r[j]->start, len[j]);
1435 if (!edmacc_regs_base[j]) {
1436 status = -EBUSY;
1437 goto fail1;
1438 }
1439
1440 edma_cc[j] = kzalloc(sizeof(struct edma), GFP_KERNEL);
1441 if (!edma_cc[j]) {
1442 status = -ENOMEM;
1443 goto fail1;
1444 }
1445
1446 edma_cc[j]->num_channels = min_t(unsigned, info[j]->n_channel,
1447 EDMA_MAX_DMACH);
1448 edma_cc[j]->num_slots = min_t(unsigned, info[j]->n_slot,
1449 EDMA_MAX_PARAMENTRY);
1450 edma_cc[j]->num_cc = min_t(unsigned, info[j]->n_cc,
1451 EDMA_MAX_CC);
1452
1453 edma_cc[j]->default_queue = info[j]->default_queue;
1454
1455 dev_dbg(&pdev->dev, "DMA REG BASE ADDR=%p\n",
1456 edmacc_regs_base[j]);
1457
1458 for (i = 0; i < edma_cc[j]->num_slots; i++)
1459 memcpy_toio(edmacc_regs_base[j] + PARM_OFFSET(i),
1460 &dummy_paramset, PARM_SIZE);
1461
1462
1463 memset(edma_cc[j]->edma_unused, 0xff,
1464 sizeof(edma_cc[j]->edma_unused));
1465
1466 if (info[j]->rsv) {
1467
1468
1469 rsv_chans = info[j]->rsv->rsv_chans;
1470 if (rsv_chans) {
1471 for (i = 0; rsv_chans[i][0] != -1; i++) {
1472 off = rsv_chans[i][0];
1473 ln = rsv_chans[i][1];
1474 clear_bits(off, ln,
1475 edma_cc[j]->edma_unused);
1476 }
1477 }
1478
1479
1480 rsv_slots = info[j]->rsv->rsv_slots;
1481 if (rsv_slots) {
1482 for (i = 0; rsv_slots[i][0] != -1; i++) {
1483 off = rsv_slots[i][0];
1484 ln = rsv_slots[i][1];
1485 set_bits(off, ln,
1486 edma_cc[j]->edma_inuse);
1487 }
1488 }
1489 }
1490
1491 sprintf(irq_name, "edma%d", j);
1492 irq[j] = platform_get_irq_byname(pdev, irq_name);
1493 edma_cc[j]->irq_res_start = irq[j];
1494 status = request_irq(irq[j], dma_irq_handler, 0, "edma",
1495 &pdev->dev);
1496 if (status < 0) {
1497 dev_dbg(&pdev->dev, "request_irq %d failed --> %d\n",
1498 irq[j], status);
1499 goto fail;
1500 }
1501
1502 sprintf(irq_name, "edma%d_err", j);
1503 err_irq[j] = platform_get_irq_byname(pdev, irq_name);
1504 edma_cc[j]->irq_res_end = err_irq[j];
1505 status = request_irq(err_irq[j], dma_ccerr_handler, 0,
1506 "edma_error", &pdev->dev);
1507 if (status < 0) {
1508 dev_dbg(&pdev->dev, "request_irq %d failed --> %d\n",
1509 err_irq[j], status);
1510 goto fail;
1511 }
1512
1513 for (i = 0; i < edma_cc[j]->num_channels; i++)
1514 map_dmach_queue(j, i, info[j]->default_queue);
1515
1516 queue_tc_mapping = info[j]->queue_tc_mapping;
1517 queue_priority_mapping = info[j]->queue_priority_mapping;
1518
1519
1520 for (i = 0; queue_tc_mapping[i][0] != -1; i++)
1521 map_queue_tc(j, queue_tc_mapping[i][0],
1522 queue_tc_mapping[i][1]);
1523
1524
1525 for (i = 0; queue_priority_mapping[i][0] != -1; i++)
1526 assign_priority_to_queue(j,
1527 queue_priority_mapping[i][0],
1528 queue_priority_mapping[i][1]);
1529
1530
1531
1532
1533 if (edma_read(j, EDMA_CCCFG) & CHMAP_EXIST)
1534 map_dmach_param(j);
1535
1536 for (i = 0; i < info[j]->n_region; i++) {
1537 edma_write_array2(j, EDMA_DRAE, i, 0, 0x0);
1538 edma_write_array2(j, EDMA_DRAE, i, 1, 0x0);
1539 edma_write_array(j, EDMA_QRAE, i, 0x0);
1540 }
1541 arch_num_cc++;
1542 }
1543
1544 if (tc_errs_handled) {
1545 status = request_irq(IRQ_TCERRINT0, dma_tc0err_handler, 0,
1546 "edma_tc0", &pdev->dev);
1547 if (status < 0) {
1548 dev_dbg(&pdev->dev, "request_irq %d failed --> %d\n",
1549 IRQ_TCERRINT0, status);
1550 return status;
1551 }
1552 status = request_irq(IRQ_TCERRINT, dma_tc1err_handler, 0,
1553 "edma_tc1", &pdev->dev);
1554 if (status < 0) {
1555 dev_dbg(&pdev->dev, "request_irq %d --> %d\n",
1556 IRQ_TCERRINT, status);
1557 return status;
1558 }
1559 }
1560
1561 return 0;
1562
1563fail:
1564 for (i = 0; i < EDMA_MAX_CC; i++) {
1565 if (err_irq[i])
1566 free_irq(err_irq[i], &pdev->dev);
1567 if (irq[i])
1568 free_irq(irq[i], &pdev->dev);
1569 }
1570fail1:
1571 for (i = 0; i < EDMA_MAX_CC; i++) {
1572 if (r[i])
1573 release_mem_region(r[i]->start, len[i]);
1574 if (edmacc_regs_base[i])
1575 iounmap(edmacc_regs_base[i]);
1576 kfree(edma_cc[i]);
1577 }
1578 return status;
1579}
1580
1581
1582static struct platform_driver edma_driver = {
1583 .driver.name = "edma",
1584};
1585
1586static int __init edma_init(void)
1587{
1588 return platform_driver_probe(&edma_driver, edma_probe);
1589}
1590arch_initcall(edma_init);
1591
1592