1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20#include <linux/kernel.h>
21#include <linux/sched.h>
22#include <linux/init.h>
23#include <linux/module.h>
24#include <linux/interrupt.h>
25#include <linux/platform_device.h>
26#include <linux/spinlock.h>
27#include <linux/compiler.h>
28#include <linux/io.h>
29
30#include <mach/cputype.h>
31#include <mach/memory.h>
32#include <mach/hardware.h>
33#include <mach/irqs.h>
34#include <mach/edma.h>
35#include <mach/mux.h>
36
37
38
39#define PARM_OPT 0x00
40#define PARM_SRC 0x04
41#define PARM_A_B_CNT 0x08
42#define PARM_DST 0x0c
43#define PARM_SRC_DST_BIDX 0x10
44#define PARM_LINK_BCNTRLD 0x14
45#define PARM_SRC_DST_CIDX 0x18
46#define PARM_CCNT 0x1c
47
48#define PARM_SIZE 0x20
49
50
51#define SH_ER 0x00
52#define SH_ECR 0x08
53#define SH_ESR 0x10
54#define SH_CER 0x18
55#define SH_EER 0x20
56#define SH_EECR 0x28
57#define SH_EESR 0x30
58#define SH_SER 0x38
59#define SH_SECR 0x40
60#define SH_IER 0x50
61#define SH_IECR 0x58
62#define SH_IESR 0x60
63#define SH_IPR 0x68
64#define SH_ICR 0x70
65#define SH_IEVAL 0x78
66#define SH_QER 0x80
67#define SH_QEER 0x84
68#define SH_QEECR 0x88
69#define SH_QEESR 0x8c
70#define SH_QSER 0x90
71#define SH_QSECR 0x94
72#define SH_SIZE 0x200
73
74
75#define EDMA_REV 0x0000
76#define EDMA_CCCFG 0x0004
77#define EDMA_QCHMAP 0x0200
78#define EDMA_DMAQNUM 0x0240
79#define EDMA_QDMAQNUM 0x0260
80#define EDMA_QUETCMAP 0x0280
81#define EDMA_QUEPRI 0x0284
82#define EDMA_EMR 0x0300
83#define EDMA_EMCR 0x0308
84#define EDMA_QEMR 0x0310
85#define EDMA_QEMCR 0x0314
86#define EDMA_CCERR 0x0318
87#define EDMA_CCERRCLR 0x031c
88#define EDMA_EEVAL 0x0320
89#define EDMA_DRAE 0x0340
90#define EDMA_QRAE 0x0380
91#define EDMA_QUEEVTENTRY 0x0400
92#define EDMA_QSTAT 0x0600
93#define EDMA_QWMTHRA 0x0620
94#define EDMA_QWMTHRB 0x0624
95#define EDMA_CCSTAT 0x0640
96
97#define EDMA_M 0x1000
98#define EDMA_ECR 0x1008
99#define EDMA_ECRH 0x100C
100#define EDMA_SHADOW0 0x2000
101#define EDMA_PARM 0x4000
102
103#define PARM_OFFSET(param_no) (EDMA_PARM + ((param_no) << 5))
104
105#define EDMA_DCHMAP 0x0100
106#define CHMAP_EXIST BIT(24)
107
108#define EDMA_MAX_DMACH 64
109#define EDMA_MAX_PARAMENTRY 512
110#define EDMA_MAX_CC 2
111
112
113
114
115static void __iomem *edmacc_regs_base[EDMA_MAX_CC];
116
117static inline unsigned int edma_read(unsigned ctlr, int offset)
118{
119 return (unsigned int)__raw_readl(edmacc_regs_base[ctlr] + offset);
120}
121
122static inline void edma_write(unsigned ctlr, int offset, int val)
123{
124 __raw_writel(val, edmacc_regs_base[ctlr] + offset);
125}
126static inline void edma_modify(unsigned ctlr, int offset, unsigned and,
127 unsigned or)
128{
129 unsigned val = edma_read(ctlr, offset);
130 val &= and;
131 val |= or;
132 edma_write(ctlr, offset, val);
133}
134static inline void edma_and(unsigned ctlr, int offset, unsigned and)
135{
136 unsigned val = edma_read(ctlr, offset);
137 val &= and;
138 edma_write(ctlr, offset, val);
139}
140static inline void edma_or(unsigned ctlr, int offset, unsigned or)
141{
142 unsigned val = edma_read(ctlr, offset);
143 val |= or;
144 edma_write(ctlr, offset, val);
145}
146static inline unsigned int edma_read_array(unsigned ctlr, int offset, int i)
147{
148 return edma_read(ctlr, offset + (i << 2));
149}
150static inline void edma_write_array(unsigned ctlr, int offset, int i,
151 unsigned val)
152{
153 edma_write(ctlr, offset + (i << 2), val);
154}
155static inline void edma_modify_array(unsigned ctlr, int offset, int i,
156 unsigned and, unsigned or)
157{
158 edma_modify(ctlr, offset + (i << 2), and, or);
159}
160static inline void edma_or_array(unsigned ctlr, int offset, int i, unsigned or)
161{
162 edma_or(ctlr, offset + (i << 2), or);
163}
164static inline void edma_or_array2(unsigned ctlr, int offset, int i, int j,
165 unsigned or)
166{
167 edma_or(ctlr, offset + ((i*2 + j) << 2), or);
168}
169static inline void edma_write_array2(unsigned ctlr, int offset, int i, int j,
170 unsigned val)
171{
172 edma_write(ctlr, offset + ((i*2 + j) << 2), val);
173}
174static inline unsigned int edma_shadow0_read(unsigned ctlr, int offset)
175{
176 return edma_read(ctlr, EDMA_SHADOW0 + offset);
177}
178static inline unsigned int edma_shadow0_read_array(unsigned ctlr, int offset,
179 int i)
180{
181 return edma_read(ctlr, EDMA_SHADOW0 + offset + (i << 2));
182}
183static inline void edma_shadow0_write(unsigned ctlr, int offset, unsigned val)
184{
185 edma_write(ctlr, EDMA_SHADOW0 + offset, val);
186}
187static inline void edma_shadow0_write_array(unsigned ctlr, int offset, int i,
188 unsigned val)
189{
190 edma_write(ctlr, EDMA_SHADOW0 + offset + (i << 2), val);
191}
192static inline unsigned int edma_parm_read(unsigned ctlr, int offset,
193 int param_no)
194{
195 return edma_read(ctlr, EDMA_PARM + offset + (param_no << 5));
196}
197static inline void edma_parm_write(unsigned ctlr, int offset, int param_no,
198 unsigned val)
199{
200 edma_write(ctlr, EDMA_PARM + offset + (param_no << 5), val);
201}
202static inline void edma_parm_modify(unsigned ctlr, int offset, int param_no,
203 unsigned and, unsigned or)
204{
205 edma_modify(ctlr, EDMA_PARM + offset + (param_no << 5), and, or);
206}
207static inline void edma_parm_and(unsigned ctlr, int offset, int param_no,
208 unsigned and)
209{
210 edma_and(ctlr, EDMA_PARM + offset + (param_no << 5), and);
211}
212static inline void edma_parm_or(unsigned ctlr, int offset, int param_no,
213 unsigned or)
214{
215 edma_or(ctlr, EDMA_PARM + offset + (param_no << 5), or);
216}
217
218
219
220
221struct edma {
222
223 unsigned num_channels;
224 unsigned num_region;
225 unsigned num_slots;
226 unsigned num_tc;
227 unsigned num_cc;
228 enum dma_event_q default_queue;
229
230
231 const s8 *noevent;
232
233
234
235
236 DECLARE_BITMAP(edma_inuse, EDMA_MAX_PARAMENTRY);
237
238
239
240
241
242 DECLARE_BITMAP(edma_noevent, EDMA_MAX_DMACH);
243
244 unsigned irq_res_start;
245 unsigned irq_res_end;
246
247 struct dma_interrupt_data {
248 void (*callback)(unsigned channel, unsigned short ch_status,
249 void *data);
250 void *data;
251 } intr_data[EDMA_MAX_DMACH];
252};
253
254static struct edma *edma_info[EDMA_MAX_CC];
255
256
257static const struct edmacc_param dummy_paramset = {
258 .link_bcntrld = 0xffff,
259 .ccnt = 1,
260};
261
262
263
264static void map_dmach_queue(unsigned ctlr, unsigned ch_no,
265 enum dma_event_q queue_no)
266{
267 int bit = (ch_no & 0x7) * 4;
268
269
270 if (queue_no == EVENTQ_DEFAULT)
271 queue_no = edma_info[ctlr]->default_queue;
272
273 queue_no &= 7;
274 edma_modify_array(ctlr, EDMA_DMAQNUM, (ch_no >> 3),
275 ~(0x7 << bit), queue_no << bit);
276}
277
278static void __init map_queue_tc(unsigned ctlr, int queue_no, int tc_no)
279{
280 int bit = queue_no * 4;
281 edma_modify(ctlr, EDMA_QUETCMAP, ~(0x7 << bit), ((tc_no & 0x7) << bit));
282}
283
284static void __init assign_priority_to_queue(unsigned ctlr, int queue_no,
285 int priority)
286{
287 int bit = queue_no * 4;
288 edma_modify(ctlr, EDMA_QUEPRI, ~(0x7 << bit),
289 ((priority & 0x7) << bit));
290}
291
292
293
294
295
296
297
298
299
300
301
302
303static void __init map_dmach_param(unsigned ctlr)
304{
305 int i;
306 for (i = 0; i < EDMA_MAX_DMACH; i++)
307 edma_write_array(ctlr, EDMA_DCHMAP , i , (i << 5));
308}
309
310static inline void
311setup_dma_interrupt(unsigned lch,
312 void (*callback)(unsigned channel, u16 ch_status, void *data),
313 void *data)
314{
315 unsigned ctlr;
316
317 ctlr = EDMA_CTLR(lch);
318 lch = EDMA_CHAN_SLOT(lch);
319
320 if (!callback) {
321 edma_shadow0_write_array(ctlr, SH_IECR, lch >> 5,
322 (1 << (lch & 0x1f)));
323 }
324
325 edma_info[ctlr]->intr_data[lch].callback = callback;
326 edma_info[ctlr]->intr_data[lch].data = data;
327
328 if (callback) {
329 edma_shadow0_write_array(ctlr, SH_ICR, lch >> 5,
330 (1 << (lch & 0x1f)));
331 edma_shadow0_write_array(ctlr, SH_IESR, lch >> 5,
332 (1 << (lch & 0x1f)));
333 }
334}
335
336static int irq2ctlr(int irq)
337{
338 if (irq >= edma_info[0]->irq_res_start &&
339 irq <= edma_info[0]->irq_res_end)
340 return 0;
341 else if (irq >= edma_info[1]->irq_res_start &&
342 irq <= edma_info[1]->irq_res_end)
343 return 1;
344
345 return -1;
346}
347
348
349
350
351
352
353static irqreturn_t dma_irq_handler(int irq, void *data)
354{
355 int i;
356 unsigned ctlr;
357 unsigned int cnt = 0;
358
359 ctlr = irq2ctlr(irq);
360
361 dev_dbg(data, "dma_irq_handler\n");
362
363 if ((edma_shadow0_read_array(ctlr, SH_IPR, 0) == 0)
364 && (edma_shadow0_read_array(ctlr, SH_IPR, 1) == 0))
365 return IRQ_NONE;
366
367 while (1) {
368 int j;
369 if (edma_shadow0_read_array(ctlr, SH_IPR, 0))
370 j = 0;
371 else if (edma_shadow0_read_array(ctlr, SH_IPR, 1))
372 j = 1;
373 else
374 break;
375 dev_dbg(data, "IPR%d %08x\n", j,
376 edma_shadow0_read_array(ctlr, SH_IPR, j));
377 for (i = 0; i < 32; i++) {
378 int k = (j << 5) + i;
379 if (edma_shadow0_read_array(ctlr, SH_IPR, j) &
380 (1 << i)) {
381
382 edma_shadow0_write_array(ctlr, SH_ICR, j,
383 (1 << i));
384 if (edma_info[ctlr]->intr_data[k].callback) {
385 edma_info[ctlr]->intr_data[k].callback(
386 k, DMA_COMPLETE,
387 edma_info[ctlr]->intr_data[k].
388 data);
389 }
390 }
391 }
392 cnt++;
393 if (cnt > 10)
394 break;
395 }
396 edma_shadow0_write(ctlr, SH_IEVAL, 1);
397 return IRQ_HANDLED;
398}
399
400
401
402
403
404
405static irqreturn_t dma_ccerr_handler(int irq, void *data)
406{
407 int i;
408 unsigned ctlr;
409 unsigned int cnt = 0;
410
411 ctlr = irq2ctlr(irq);
412
413 dev_dbg(data, "dma_ccerr_handler\n");
414
415 if ((edma_read_array(ctlr, EDMA_EMR, 0) == 0) &&
416 (edma_read_array(ctlr, EDMA_EMR, 1) == 0) &&
417 (edma_read(ctlr, EDMA_QEMR) == 0) &&
418 (edma_read(ctlr, EDMA_CCERR) == 0))
419 return IRQ_NONE;
420
421 while (1) {
422 int j = -1;
423 if (edma_read_array(ctlr, EDMA_EMR, 0))
424 j = 0;
425 else if (edma_read_array(ctlr, EDMA_EMR, 1))
426 j = 1;
427 if (j >= 0) {
428 dev_dbg(data, "EMR%d %08x\n", j,
429 edma_read_array(ctlr, EDMA_EMR, j));
430 for (i = 0; i < 32; i++) {
431 int k = (j << 5) + i;
432 if (edma_read_array(ctlr, EDMA_EMR, j) &
433 (1 << i)) {
434
435 edma_write_array(ctlr, EDMA_EMCR, j,
436 1 << i);
437
438 edma_shadow0_write_array(ctlr, SH_SECR,
439 j, (1 << i));
440 if (edma_info[ctlr]->intr_data[k].
441 callback) {
442 edma_info[ctlr]->intr_data[k].
443 callback(k,
444 DMA_CC_ERROR,
445 edma_info[ctlr]->intr_data
446 [k].data);
447 }
448 }
449 }
450 } else if (edma_read(ctlr, EDMA_QEMR)) {
451 dev_dbg(data, "QEMR %02x\n",
452 edma_read(ctlr, EDMA_QEMR));
453 for (i = 0; i < 8; i++) {
454 if (edma_read(ctlr, EDMA_QEMR) & (1 << i)) {
455
456 edma_write(ctlr, EDMA_QEMCR, 1 << i);
457 edma_shadow0_write(ctlr, SH_QSECR,
458 (1 << i));
459
460
461 }
462 }
463 } else if (edma_read(ctlr, EDMA_CCERR)) {
464 dev_dbg(data, "CCERR %08x\n",
465 edma_read(ctlr, EDMA_CCERR));
466
467
468
469 for (i = 0; i < 8; i++) {
470 if (edma_read(ctlr, EDMA_CCERR) & (1 << i)) {
471
472 edma_write(ctlr, EDMA_CCERRCLR, 1 << i);
473
474
475 }
476 }
477 }
478 if ((edma_read_array(ctlr, EDMA_EMR, 0) == 0)
479 && (edma_read_array(ctlr, EDMA_EMR, 1) == 0)
480 && (edma_read(ctlr, EDMA_QEMR) == 0)
481 && (edma_read(ctlr, EDMA_CCERR) == 0)) {
482 break;
483 }
484 cnt++;
485 if (cnt > 10)
486 break;
487 }
488 edma_write(ctlr, EDMA_EEVAL, 1);
489 return IRQ_HANDLED;
490}
491
492
493
494
495
496
497
498#define tc_errs_handled false
499
500static irqreturn_t dma_tc0err_handler(int irq, void *data)
501{
502 dev_dbg(data, "dma_tc0err_handler\n");
503 return IRQ_HANDLED;
504}
505
506static irqreturn_t dma_tc1err_handler(int irq, void *data)
507{
508 dev_dbg(data, "dma_tc1err_handler\n");
509 return IRQ_HANDLED;
510}
511
512static int reserve_contiguous_params(int ctlr, unsigned int id,
513 unsigned int num_params,
514 unsigned int start_param)
515{
516 int i, j;
517 unsigned int count = num_params;
518
519 for (i = start_param; i < edma_info[ctlr]->num_slots; ++i) {
520 j = EDMA_CHAN_SLOT(i);
521 if (!test_and_set_bit(j, edma_info[ctlr]->edma_inuse))
522 count--;
523 if (count == 0)
524 break;
525 else if (id == EDMA_CONT_PARAMS_FIXED_EXACT)
526 break;
527 else
528 count = num_params;
529 }
530
531
532
533
534
535
536
537 if (count) {
538 for (j = i - num_params + count + 1; j <= i ; ++j)
539 clear_bit(j, edma_info[ctlr]->edma_inuse);
540
541 return -EBUSY;
542 }
543
544 for (j = i - num_params + 1; j <= i; ++j)
545 memcpy_toio(edmacc_regs_base[ctlr] + PARM_OFFSET(j),
546 &dummy_paramset, PARM_SIZE);
547
548 return EDMA_CTLR_CHAN(ctlr, i - num_params + 1);
549}
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585int edma_alloc_channel(int channel,
586 void (*callback)(unsigned channel, u16 ch_status, void *data),
587 void *data,
588 enum dma_event_q eventq_no)
589{
590 unsigned i, done, ctlr = 0;
591
592 if (channel >= 0) {
593 ctlr = EDMA_CTLR(channel);
594 channel = EDMA_CHAN_SLOT(channel);
595 }
596
597 if (channel < 0) {
598 for (i = 0; i < EDMA_MAX_CC; i++) {
599 channel = 0;
600 for (;;) {
601 channel = find_next_bit(edma_info[i]->
602 edma_noevent,
603 edma_info[i]->num_channels,
604 channel);
605 if (channel == edma_info[i]->num_channels)
606 return -ENOMEM;
607 if (!test_and_set_bit(channel,
608 edma_info[i]->edma_inuse)) {
609 done = 1;
610 ctlr = i;
611 break;
612 }
613 channel++;
614 }
615 if (done)
616 break;
617 }
618 } else if (channel >= edma_info[ctlr]->num_channels) {
619 return -EINVAL;
620 } else if (test_and_set_bit(channel, edma_info[ctlr]->edma_inuse)) {
621 return -EBUSY;
622 }
623
624
625 edma_or_array2(ctlr, EDMA_DRAE, 0, channel >> 5, 1 << (channel & 0x1f));
626
627
628 edma_stop(EDMA_CTLR_CHAN(ctlr, channel));
629 memcpy_toio(edmacc_regs_base[ctlr] + PARM_OFFSET(channel),
630 &dummy_paramset, PARM_SIZE);
631
632 if (callback)
633 setup_dma_interrupt(EDMA_CTLR_CHAN(ctlr, channel),
634 callback, data);
635
636 map_dmach_queue(ctlr, channel, eventq_no);
637
638 return channel;
639}
640EXPORT_SYMBOL(edma_alloc_channel);
641
642
643
644
645
646
647
648
649
650
651
652
653
654void edma_free_channel(unsigned channel)
655{
656 unsigned ctlr;
657
658 ctlr = EDMA_CTLR(channel);
659 channel = EDMA_CHAN_SLOT(channel);
660
661 if (channel >= edma_info[ctlr]->num_channels)
662 return;
663
664 setup_dma_interrupt(channel, NULL, NULL);
665
666
667 memcpy_toio(edmacc_regs_base[ctlr] + PARM_OFFSET(channel),
668 &dummy_paramset, PARM_SIZE);
669 clear_bit(channel, edma_info[ctlr]->edma_inuse);
670}
671EXPORT_SYMBOL(edma_free_channel);
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687int edma_alloc_slot(unsigned ctlr, int slot)
688{
689 if (slot >= 0)
690 slot = EDMA_CHAN_SLOT(slot);
691
692 if (slot < 0) {
693 slot = edma_info[ctlr]->num_channels;
694 for (;;) {
695 slot = find_next_zero_bit(edma_info[ctlr]->edma_inuse,
696 edma_info[ctlr]->num_slots, slot);
697 if (slot == edma_info[ctlr]->num_slots)
698 return -ENOMEM;
699 if (!test_and_set_bit(slot,
700 edma_info[ctlr]->edma_inuse))
701 break;
702 }
703 } else if (slot < edma_info[ctlr]->num_channels ||
704 slot >= edma_info[ctlr]->num_slots) {
705 return -EINVAL;
706 } else if (test_and_set_bit(slot, edma_info[ctlr]->edma_inuse)) {
707 return -EBUSY;
708 }
709
710 memcpy_toio(edmacc_regs_base[ctlr] + PARM_OFFSET(slot),
711 &dummy_paramset, PARM_SIZE);
712
713 return EDMA_CTLR_CHAN(ctlr, slot);
714}
715EXPORT_SYMBOL(edma_alloc_slot);
716
717
718
719
720
721
722
723
724
725void edma_free_slot(unsigned slot)
726{
727 unsigned ctlr;
728
729 ctlr = EDMA_CTLR(slot);
730 slot = EDMA_CHAN_SLOT(slot);
731
732 if (slot < edma_info[ctlr]->num_channels ||
733 slot >= edma_info[ctlr]->num_slots)
734 return;
735
736 memcpy_toio(edmacc_regs_base[ctlr] + PARM_OFFSET(slot),
737 &dummy_paramset, PARM_SIZE);
738 clear_bit(slot, edma_info[ctlr]->edma_inuse);
739}
740EXPORT_SYMBOL(edma_free_slot);
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767int edma_alloc_cont_slots(unsigned ctlr, unsigned int id, int slot, int count)
768{
769
770
771
772
773
774 if (slot < edma_info[ctlr]->num_channels ||
775 slot >= edma_info[ctlr]->num_slots)
776 return -EINVAL;
777
778
779
780
781
782
783 if (count < 1 || count >
784 (edma_info[ctlr]->num_slots - edma_info[ctlr]->num_channels))
785 return -EINVAL;
786
787 switch (id) {
788 case EDMA_CONT_PARAMS_ANY:
789 return reserve_contiguous_params(ctlr, id, count,
790 edma_info[ctlr]->num_channels);
791 case EDMA_CONT_PARAMS_FIXED_EXACT:
792 case EDMA_CONT_PARAMS_FIXED_NOT_EXACT:
793 return reserve_contiguous_params(ctlr, id, count, slot);
794 default:
795 return -EINVAL;
796 }
797
798}
799EXPORT_SYMBOL(edma_alloc_cont_slots);
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814int edma_free_cont_slots(unsigned slot, int count)
815{
816 unsigned ctlr;
817 int i;
818
819 ctlr = EDMA_CTLR(slot);
820 slot = EDMA_CHAN_SLOT(slot);
821
822 if (slot < edma_info[ctlr]->num_channels ||
823 slot >= edma_info[ctlr]->num_slots ||
824 count < 1)
825 return -EINVAL;
826
827 for (i = slot; i < slot + count; ++i) {
828 ctlr = EDMA_CTLR(i);
829 slot = EDMA_CHAN_SLOT(i);
830
831 memcpy_toio(edmacc_regs_base[ctlr] + PARM_OFFSET(slot),
832 &dummy_paramset, PARM_SIZE);
833 clear_bit(slot, edma_info[ctlr]->edma_inuse);
834 }
835
836 return 0;
837}
838EXPORT_SYMBOL(edma_free_cont_slots);
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855void edma_set_src(unsigned slot, dma_addr_t src_port,
856 enum address_mode mode, enum fifo_width width)
857{
858 unsigned ctlr;
859
860 ctlr = EDMA_CTLR(slot);
861 slot = EDMA_CHAN_SLOT(slot);
862
863 if (slot < edma_info[ctlr]->num_slots) {
864 unsigned int i = edma_parm_read(ctlr, PARM_OPT, slot);
865
866 if (mode) {
867
868 i = (i & ~(EDMA_FWID)) | (SAM | ((width & 0x7) << 8));
869 } else {
870
871 i &= ~SAM;
872 }
873 edma_parm_write(ctlr, PARM_OPT, slot, i);
874
875
876
877 edma_parm_write(ctlr, PARM_SRC, slot, src_port);
878 }
879}
880EXPORT_SYMBOL(edma_set_src);
881
882
883
884
885
886
887
888
889
890
891
892
893void edma_set_dest(unsigned slot, dma_addr_t dest_port,
894 enum address_mode mode, enum fifo_width width)
895{
896 unsigned ctlr;
897
898 ctlr = EDMA_CTLR(slot);
899 slot = EDMA_CHAN_SLOT(slot);
900
901 if (slot < edma_info[ctlr]->num_slots) {
902 unsigned int i = edma_parm_read(ctlr, PARM_OPT, slot);
903
904 if (mode) {
905
906 i = (i & ~(EDMA_FWID)) | (DAM | ((width & 0x7) << 8));
907 } else {
908
909 i &= ~DAM;
910 }
911 edma_parm_write(ctlr, PARM_OPT, slot, i);
912
913
914 edma_parm_write(ctlr, PARM_DST, slot, dest_port);
915 }
916}
917EXPORT_SYMBOL(edma_set_dest);
918
919
920
921
922
923
924
925
926
927
928void edma_get_position(unsigned slot, dma_addr_t *src, dma_addr_t *dst)
929{
930 struct edmacc_param temp;
931 unsigned ctlr;
932
933 ctlr = EDMA_CTLR(slot);
934 slot = EDMA_CHAN_SLOT(slot);
935
936 edma_read_slot(EDMA_CTLR_CHAN(ctlr, slot), &temp);
937 if (src != NULL)
938 *src = temp.src;
939 if (dst != NULL)
940 *dst = temp.dst;
941}
942EXPORT_SYMBOL(edma_get_position);
943
944
945
946
947
948
949
950
951
952
953
954void edma_set_src_index(unsigned slot, s16 src_bidx, s16 src_cidx)
955{
956 unsigned ctlr;
957
958 ctlr = EDMA_CTLR(slot);
959 slot = EDMA_CHAN_SLOT(slot);
960
961 if (slot < edma_info[ctlr]->num_slots) {
962 edma_parm_modify(ctlr, PARM_SRC_DST_BIDX, slot,
963 0xffff0000, src_bidx);
964 edma_parm_modify(ctlr, PARM_SRC_DST_CIDX, slot,
965 0xffff0000, src_cidx);
966 }
967}
968EXPORT_SYMBOL(edma_set_src_index);
969
970
971
972
973
974
975
976
977
978
979
980void edma_set_dest_index(unsigned slot, s16 dest_bidx, s16 dest_cidx)
981{
982 unsigned ctlr;
983
984 ctlr = EDMA_CTLR(slot);
985 slot = EDMA_CHAN_SLOT(slot);
986
987 if (slot < edma_info[ctlr]->num_slots) {
988 edma_parm_modify(ctlr, PARM_SRC_DST_BIDX, slot,
989 0x0000ffff, dest_bidx << 16);
990 edma_parm_modify(ctlr, PARM_SRC_DST_CIDX, slot,
991 0x0000ffff, dest_cidx << 16);
992 }
993}
994EXPORT_SYMBOL(edma_set_dest_index);
995
996
997
998
999
1000
1001
1002
1003
1004
1005
1006
1007
1008
1009
1010
1011
1012
1013
1014
1015
1016
1017
1018
1019
1020
1021
1022
1023
1024
1025void edma_set_transfer_params(unsigned slot,
1026 u16 acnt, u16 bcnt, u16 ccnt,
1027 u16 bcnt_rld, enum sync_dimension sync_mode)
1028{
1029 unsigned ctlr;
1030
1031 ctlr = EDMA_CTLR(slot);
1032 slot = EDMA_CHAN_SLOT(slot);
1033
1034 if (slot < edma_info[ctlr]->num_slots) {
1035 edma_parm_modify(ctlr, PARM_LINK_BCNTRLD, slot,
1036 0x0000ffff, bcnt_rld << 16);
1037 if (sync_mode == ASYNC)
1038 edma_parm_and(ctlr, PARM_OPT, slot, ~SYNCDIM);
1039 else
1040 edma_parm_or(ctlr, PARM_OPT, slot, SYNCDIM);
1041
1042 edma_parm_write(ctlr, PARM_A_B_CNT, slot, (bcnt << 16) | acnt);
1043 edma_parm_write(ctlr, PARM_CCNT, slot, ccnt);
1044 }
1045}
1046EXPORT_SYMBOL(edma_set_transfer_params);
1047
1048
1049
1050
1051
1052
1053
1054
1055void edma_link(unsigned from, unsigned to)
1056{
1057 unsigned ctlr_from, ctlr_to;
1058
1059 ctlr_from = EDMA_CTLR(from);
1060 from = EDMA_CHAN_SLOT(from);
1061 ctlr_to = EDMA_CTLR(to);
1062 to = EDMA_CHAN_SLOT(to);
1063
1064 if (from >= edma_info[ctlr_from]->num_slots)
1065 return;
1066 if (to >= edma_info[ctlr_to]->num_slots)
1067 return;
1068 edma_parm_modify(ctlr_from, PARM_LINK_BCNTRLD, from, 0xffff0000,
1069 PARM_OFFSET(to));
1070}
1071EXPORT_SYMBOL(edma_link);
1072
1073
1074
1075
1076
1077
1078
1079
1080void edma_unlink(unsigned from)
1081{
1082 unsigned ctlr;
1083
1084 ctlr = EDMA_CTLR(from);
1085 from = EDMA_CHAN_SLOT(from);
1086
1087 if (from >= edma_info[ctlr]->num_slots)
1088 return;
1089 edma_parm_or(ctlr, PARM_LINK_BCNTRLD, from, 0xffff);
1090}
1091EXPORT_SYMBOL(edma_unlink);
1092
1093
1094
1095
1096
1097
1098
1099
1100
1101
1102
1103
1104
1105
1106
1107void edma_write_slot(unsigned slot, const struct edmacc_param *param)
1108{
1109 unsigned ctlr;
1110
1111 ctlr = EDMA_CTLR(slot);
1112 slot = EDMA_CHAN_SLOT(slot);
1113
1114 if (slot >= edma_info[ctlr]->num_slots)
1115 return;
1116 memcpy_toio(edmacc_regs_base[ctlr] + PARM_OFFSET(slot), param,
1117 PARM_SIZE);
1118}
1119EXPORT_SYMBOL(edma_write_slot);
1120
1121
1122
1123
1124
1125
1126
1127
1128
1129void edma_read_slot(unsigned slot, struct edmacc_param *param)
1130{
1131 unsigned ctlr;
1132
1133 ctlr = EDMA_CTLR(slot);
1134 slot = EDMA_CHAN_SLOT(slot);
1135
1136 if (slot >= edma_info[ctlr]->num_slots)
1137 return;
1138 memcpy_fromio(param, edmacc_regs_base[ctlr] + PARM_OFFSET(slot),
1139 PARM_SIZE);
1140}
1141EXPORT_SYMBOL(edma_read_slot);
1142
1143
1144
1145
1146
1147
1148
1149
1150
1151
1152
1153
1154void edma_pause(unsigned channel)
1155{
1156 unsigned ctlr;
1157
1158 ctlr = EDMA_CTLR(channel);
1159 channel = EDMA_CHAN_SLOT(channel);
1160
1161 if (channel < edma_info[ctlr]->num_channels) {
1162 unsigned int mask = (1 << (channel & 0x1f));
1163
1164 edma_shadow0_write_array(ctlr, SH_EECR, channel >> 5, mask);
1165 }
1166}
1167EXPORT_SYMBOL(edma_pause);
1168
1169
1170
1171
1172
1173
1174
1175void edma_resume(unsigned channel)
1176{
1177 unsigned ctlr;
1178
1179 ctlr = EDMA_CTLR(channel);
1180 channel = EDMA_CHAN_SLOT(channel);
1181
1182 if (channel < edma_info[ctlr]->num_channels) {
1183 unsigned int mask = (1 << (channel & 0x1f));
1184
1185 edma_shadow0_write_array(ctlr, SH_EESR, channel >> 5, mask);
1186 }
1187}
1188EXPORT_SYMBOL(edma_resume);
1189
1190
1191
1192
1193
1194
1195
1196
1197
1198
1199
1200
1201int edma_start(unsigned channel)
1202{
1203 unsigned ctlr;
1204
1205 ctlr = EDMA_CTLR(channel);
1206 channel = EDMA_CHAN_SLOT(channel);
1207
1208 if (channel < edma_info[ctlr]->num_channels) {
1209 int j = channel >> 5;
1210 unsigned int mask = (1 << (channel & 0x1f));
1211
1212
1213 if (test_bit(channel, edma_info[ctlr]->edma_noevent)) {
1214 pr_debug("EDMA: ESR%d %08x\n", j,
1215 edma_shadow0_read_array(ctlr, SH_ESR, j));
1216 edma_shadow0_write_array(ctlr, SH_ESR, j, mask);
1217 return 0;
1218 }
1219
1220
1221 pr_debug("EDMA: ER%d %08x\n", j,
1222 edma_shadow0_read_array(ctlr, SH_ER, j));
1223
1224 edma_write_array(ctlr, EDMA_EMCR, j, mask);
1225
1226 edma_shadow0_write_array(ctlr, SH_SECR, j, mask);
1227 edma_shadow0_write_array(ctlr, SH_EESR, j, mask);
1228 pr_debug("EDMA: EER%d %08x\n", j,
1229 edma_shadow0_read_array(ctlr, SH_EER, j));
1230 return 0;
1231 }
1232
1233 return -EINVAL;
1234}
1235EXPORT_SYMBOL(edma_start);
1236
1237
1238
1239
1240
1241
1242
1243
1244
1245
1246void edma_stop(unsigned channel)
1247{
1248 unsigned ctlr;
1249
1250 ctlr = EDMA_CTLR(channel);
1251 channel = EDMA_CHAN_SLOT(channel);
1252
1253 if (channel < edma_info[ctlr]->num_channels) {
1254 int j = channel >> 5;
1255 unsigned int mask = (1 << (channel & 0x1f));
1256
1257 edma_shadow0_write_array(ctlr, SH_EECR, j, mask);
1258 edma_shadow0_write_array(ctlr, SH_ECR, j, mask);
1259 edma_shadow0_write_array(ctlr, SH_SECR, j, mask);
1260 edma_write_array(ctlr, EDMA_EMCR, j, mask);
1261
1262 pr_debug("EDMA: EER%d %08x\n", j,
1263 edma_shadow0_read_array(ctlr, SH_EER, j));
1264
1265
1266
1267
1268 }
1269}
1270EXPORT_SYMBOL(edma_stop);
1271
1272
1273
1274
1275
1276
1277
1278
1279
1280
1281
1282
1283
1284
1285void edma_clean_channel(unsigned channel)
1286{
1287 unsigned ctlr;
1288
1289 ctlr = EDMA_CTLR(channel);
1290 channel = EDMA_CHAN_SLOT(channel);
1291
1292 if (channel < edma_info[ctlr]->num_channels) {
1293 int j = (channel >> 5);
1294 unsigned int mask = 1 << (channel & 0x1f);
1295
1296 pr_debug("EDMA: EMR%d %08x\n", j,
1297 edma_read_array(ctlr, EDMA_EMR, j));
1298 edma_shadow0_write_array(ctlr, SH_ECR, j, mask);
1299
1300 edma_write_array(ctlr, EDMA_EMCR, j, mask);
1301
1302 edma_shadow0_write_array(ctlr, SH_SECR, j, mask);
1303 edma_write(ctlr, EDMA_CCERRCLR, (1 << 16) | 0x3);
1304 }
1305}
1306EXPORT_SYMBOL(edma_clean_channel);
1307
1308
1309
1310
1311
1312
1313void edma_clear_event(unsigned channel)
1314{
1315 unsigned ctlr;
1316
1317 ctlr = EDMA_CTLR(channel);
1318 channel = EDMA_CHAN_SLOT(channel);
1319
1320 if (channel >= edma_info[ctlr]->num_channels)
1321 return;
1322 if (channel < 32)
1323 edma_write(ctlr, EDMA_ECR, 1 << channel);
1324 else
1325 edma_write(ctlr, EDMA_ECRH, 1 << (channel - 32));
1326}
1327EXPORT_SYMBOL(edma_clear_event);
1328
1329
1330
1331static int __init edma_probe(struct platform_device *pdev)
1332{
1333 struct edma_soc_info *info = pdev->dev.platform_data;
1334 const s8 (*queue_priority_mapping)[2];
1335 const s8 (*queue_tc_mapping)[2];
1336 int i, j, found = 0;
1337 int status = -1;
1338 const s8 *noevent;
1339 int irq[EDMA_MAX_CC] = {0, 0};
1340 int err_irq[EDMA_MAX_CC] = {0, 0};
1341 struct resource *r[EDMA_MAX_CC] = {NULL};
1342 resource_size_t len[EDMA_MAX_CC];
1343 char res_name[10];
1344 char irq_name[10];
1345
1346 if (!info)
1347 return -ENODEV;
1348
1349 for (j = 0; j < EDMA_MAX_CC; j++) {
1350 sprintf(res_name, "edma_cc%d", j);
1351 r[j] = platform_get_resource_byname(pdev, IORESOURCE_MEM,
1352 res_name);
1353 if (!r[j]) {
1354 if (found)
1355 break;
1356 else
1357 return -ENODEV;
1358 } else
1359 found = 1;
1360
1361 len[j] = resource_size(r[j]);
1362
1363 r[j] = request_mem_region(r[j]->start, len[j],
1364 dev_name(&pdev->dev));
1365 if (!r[j]) {
1366 status = -EBUSY;
1367 goto fail1;
1368 }
1369
1370 edmacc_regs_base[j] = ioremap(r[j]->start, len[j]);
1371 if (!edmacc_regs_base[j]) {
1372 status = -EBUSY;
1373 goto fail1;
1374 }
1375
1376 edma_info[j] = kmalloc(sizeof(struct edma), GFP_KERNEL);
1377 if (!edma_info[j]) {
1378 status = -ENOMEM;
1379 goto fail1;
1380 }
1381 memset(edma_info[j], 0, sizeof(struct edma));
1382
1383 edma_info[j]->num_channels = min_t(unsigned, info[j].n_channel,
1384 EDMA_MAX_DMACH);
1385 edma_info[j]->num_slots = min_t(unsigned, info[j].n_slot,
1386 EDMA_MAX_PARAMENTRY);
1387 edma_info[j]->num_cc = min_t(unsigned, info[j].n_cc,
1388 EDMA_MAX_CC);
1389
1390 edma_info[j]->default_queue = info[j].default_queue;
1391 if (!edma_info[j]->default_queue)
1392 edma_info[j]->default_queue = EVENTQ_1;
1393
1394 dev_dbg(&pdev->dev, "DMA REG BASE ADDR=%p\n",
1395 edmacc_regs_base[j]);
1396
1397 for (i = 0; i < edma_info[j]->num_slots; i++)
1398 memcpy_toio(edmacc_regs_base[j] + PARM_OFFSET(i),
1399 &dummy_paramset, PARM_SIZE);
1400
1401 noevent = info[j].noevent;
1402 if (noevent) {
1403 while (*noevent != -1)
1404 set_bit(*noevent++, edma_info[j]->edma_noevent);
1405 }
1406
1407 sprintf(irq_name, "edma%d", j);
1408 irq[j] = platform_get_irq_byname(pdev, irq_name);
1409 edma_info[j]->irq_res_start = irq[j];
1410 status = request_irq(irq[j], dma_irq_handler, 0, "edma",
1411 &pdev->dev);
1412 if (status < 0) {
1413 dev_dbg(&pdev->dev, "request_irq %d failed --> %d\n",
1414 irq[j], status);
1415 goto fail;
1416 }
1417
1418 sprintf(irq_name, "edma%d_err", j);
1419 err_irq[j] = platform_get_irq_byname(pdev, irq_name);
1420 edma_info[j]->irq_res_end = err_irq[j];
1421 status = request_irq(err_irq[j], dma_ccerr_handler, 0,
1422 "edma_error", &pdev->dev);
1423 if (status < 0) {
1424 dev_dbg(&pdev->dev, "request_irq %d failed --> %d\n",
1425 err_irq[j], status);
1426 goto fail;
1427 }
1428
1429
1430
1431
1432
1433 for (i = 0; i < edma_info[j]->num_channels; i++)
1434 map_dmach_queue(j, i, EVENTQ_1);
1435
1436 queue_tc_mapping = info[j].queue_tc_mapping;
1437 queue_priority_mapping = info[j].queue_priority_mapping;
1438
1439
1440 for (i = 0; queue_tc_mapping[i][0] != -1; i++)
1441 map_queue_tc(j, queue_tc_mapping[i][0],
1442 queue_tc_mapping[i][1]);
1443
1444
1445 for (i = 0; queue_priority_mapping[i][0] != -1; i++)
1446 assign_priority_to_queue(j,
1447 queue_priority_mapping[i][0],
1448 queue_priority_mapping[i][1]);
1449
1450
1451
1452
1453 if (edma_read(j, EDMA_CCCFG) & CHMAP_EXIST)
1454 map_dmach_param(j);
1455
1456 for (i = 0; i < info[j].n_region; i++) {
1457 edma_write_array2(j, EDMA_DRAE, i, 0, 0x0);
1458 edma_write_array2(j, EDMA_DRAE, i, 1, 0x0);
1459 edma_write_array(j, EDMA_QRAE, i, 0x0);
1460 }
1461 }
1462
1463 if (tc_errs_handled) {
1464 status = request_irq(IRQ_TCERRINT0, dma_tc0err_handler, 0,
1465 "edma_tc0", &pdev->dev);
1466 if (status < 0) {
1467 dev_dbg(&pdev->dev, "request_irq %d failed --> %d\n",
1468 IRQ_TCERRINT0, status);
1469 return status;
1470 }
1471 status = request_irq(IRQ_TCERRINT, dma_tc1err_handler, 0,
1472 "edma_tc1", &pdev->dev);
1473 if (status < 0) {
1474 dev_dbg(&pdev->dev, "request_irq %d --> %d\n",
1475 IRQ_TCERRINT, status);
1476 return status;
1477 }
1478 }
1479
1480 return 0;
1481
1482fail:
1483 for (i = 0; i < EDMA_MAX_CC; i++) {
1484 if (err_irq[i])
1485 free_irq(err_irq[i], &pdev->dev);
1486 if (irq[i])
1487 free_irq(irq[i], &pdev->dev);
1488 }
1489fail1:
1490 for (i = 0; i < EDMA_MAX_CC; i++) {
1491 if (r[i])
1492 release_mem_region(r[i]->start, len[i]);
1493 if (edmacc_regs_base[i])
1494 iounmap(edmacc_regs_base[i]);
1495 kfree(edma_info[i]);
1496 }
1497 return status;
1498}
1499
1500
1501static struct platform_driver edma_driver = {
1502 .driver.name = "edma",
1503};
1504
1505static int __init edma_init(void)
1506{
1507 return platform_driver_probe(&edma_driver, edma_probe);
1508}
1509arch_initcall(edma_init);
1510
1511