1
2
3
4
5
6
7
8
9
10
11#include <asm/cache.h>
12#include <asm/io.h>
13#include <common.h>
14#include <dm.h>
15#include <dma-uclass.h>
16#include <linux/dma-mapping.h>
17#include <asm/omap_common.h>
18#include <asm/ti-common/ti-edma3.h>
19
20#define EDMA3_SL_BASE(slot) (0x4000 + ((slot) << 5))
21#define EDMA3_SL_MAX_NUM 512
22#define EDMA3_SLOPT_FIFO_WIDTH_MASK (0x7 << 8)
23
24#define EDMA3_QCHMAP(ch) 0x0200 + ((ch) << 2)
25#define EDMA3_CHMAP_PARSET_MASK 0x1ff
26#define EDMA3_CHMAP_PARSET_SHIFT 0x5
27#define EDMA3_CHMAP_TRIGWORD_SHIFT 0x2
28
29#define EDMA3_QEMCR 0x314
30#define EDMA3_IPR 0x1068
31#define EDMA3_IPRH 0x106c
32#define EDMA3_ICR 0x1070
33#define EDMA3_ICRH 0x1074
34#define EDMA3_QEECR 0x1088
35#define EDMA3_QEESR 0x108c
36#define EDMA3_QSECR 0x1094
37
38#define EDMA_FILL_BUFFER_SIZE 512
39
40struct ti_edma3_priv {
41 u32 base;
42};
43
44static u8 edma_fill_buffer[EDMA_FILL_BUFFER_SIZE] __aligned(ARCH_DMA_MINALIGN);
45
46
47
48
49
50
51
52
53
54
55
56void qedma3_start(u32 base, struct edma3_channel_config *cfg)
57{
58 u32 qchmap;
59
60
61 if (cfg->complete_code < 32)
62 __raw_writel(1 << cfg->complete_code, base + EDMA3_ICR);
63 else
64 __raw_writel(1 << cfg->complete_code, base + EDMA3_ICRH);
65
66
67 qchmap = ((EDMA3_CHMAP_PARSET_MASK & cfg->slot)
68 << EDMA3_CHMAP_PARSET_SHIFT) |
69 (cfg->trigger_slot_word << EDMA3_CHMAP_TRIGWORD_SHIFT);
70
71 __raw_writel(qchmap, base + EDMA3_QCHMAP(cfg->chnum));
72
73
74 __raw_writel(1 << cfg->chnum, base + EDMA3_QSECR);
75 __raw_writel(1 << cfg->chnum, base + EDMA3_QEMCR);
76
77
78 __raw_writel(1 << cfg->chnum, base + EDMA3_QEESR);
79}
80
81
82
83
84
85
86
87
88
89
90
91
92
93void edma3_set_dest(u32 base, int slot, u32 dst, enum edma3_address_mode mode,
94 enum edma3_fifo_width width)
95{
96 u32 opt;
97 struct edma3_slot_layout *rg;
98
99 rg = (struct edma3_slot_layout *)(base + EDMA3_SL_BASE(slot));
100
101 opt = __raw_readl(&rg->opt);
102 if (mode == FIFO)
103 opt = (opt & EDMA3_SLOPT_FIFO_WIDTH_MASK) |
104 (EDMA3_SLOPT_DST_ADDR_CONST_MODE |
105 EDMA3_SLOPT_FIFO_WIDTH_SET(width));
106 else
107 opt &= ~EDMA3_SLOPT_DST_ADDR_CONST_MODE;
108
109 __raw_writel(opt, &rg->opt);
110 __raw_writel(dst, &rg->dst);
111}
112
113
114
115
116
117
118
119
120
121
122
123
124void edma3_set_dest_index(u32 base, unsigned slot, int bidx, int cidx)
125{
126 u32 src_dst_bidx;
127 u32 src_dst_cidx;
128 struct edma3_slot_layout *rg;
129
130 rg = (struct edma3_slot_layout *)(base + EDMA3_SL_BASE(slot));
131
132 src_dst_bidx = __raw_readl(&rg->src_dst_bidx);
133 src_dst_cidx = __raw_readl(&rg->src_dst_cidx);
134
135 __raw_writel((src_dst_bidx & 0x0000ffff) | (bidx << 16),
136 &rg->src_dst_bidx);
137 __raw_writel((src_dst_cidx & 0x0000ffff) | (cidx << 16),
138 &rg->src_dst_cidx);
139}
140
141
142
143
144void edma3_set_dest_addr(u32 base, int slot, u32 dst)
145{
146 struct edma3_slot_layout *rg;
147
148 rg = (struct edma3_slot_layout *)(base + EDMA3_SL_BASE(slot));
149 __raw_writel(dst, &rg->dst);
150}
151
152
153
154
155
156
157
158
159
160
161
162
163
164void edma3_set_src(u32 base, int slot, u32 src, enum edma3_address_mode mode,
165 enum edma3_fifo_width width)
166{
167 u32 opt;
168 struct edma3_slot_layout *rg;
169
170 rg = (struct edma3_slot_layout *)(base + EDMA3_SL_BASE(slot));
171
172 opt = __raw_readl(&rg->opt);
173 if (mode == FIFO)
174 opt = (opt & EDMA3_SLOPT_FIFO_WIDTH_MASK) |
175 (EDMA3_SLOPT_DST_ADDR_CONST_MODE |
176 EDMA3_SLOPT_FIFO_WIDTH_SET(width));
177 else
178 opt &= ~EDMA3_SLOPT_DST_ADDR_CONST_MODE;
179
180 __raw_writel(opt, &rg->opt);
181 __raw_writel(src, &rg->src);
182}
183
184
185
186
187
188
189
190
191
192
193
194
195void edma3_set_src_index(u32 base, unsigned slot, int bidx, int cidx)
196{
197 u32 src_dst_bidx;
198 u32 src_dst_cidx;
199 struct edma3_slot_layout *rg;
200
201 rg = (struct edma3_slot_layout *)(base + EDMA3_SL_BASE(slot));
202
203 src_dst_bidx = __raw_readl(&rg->src_dst_bidx);
204 src_dst_cidx = __raw_readl(&rg->src_dst_cidx);
205
206 __raw_writel((src_dst_bidx & 0xffff0000) | bidx,
207 &rg->src_dst_bidx);
208 __raw_writel((src_dst_cidx & 0xffff0000) | cidx,
209 &rg->src_dst_cidx);
210}
211
212
213
214
215void edma3_set_src_addr(u32 base, int slot, u32 src)
216{
217 struct edma3_slot_layout *rg;
218
219 rg = (struct edma3_slot_layout *)(base + EDMA3_SL_BASE(slot));
220 __raw_writel(src, &rg->src);
221}
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253void edma3_set_transfer_params(u32 base, int slot, int acnt,
254 int bcnt, int ccnt, u16 bcnt_rld,
255 enum edma3_sync_dimension sync_mode)
256{
257 u32 opt;
258 u32 link_bcntrld;
259 struct edma3_slot_layout *rg;
260
261 rg = (struct edma3_slot_layout *)(base + EDMA3_SL_BASE(slot));
262
263 link_bcntrld = __raw_readl(&rg->link_bcntrld);
264
265 __raw_writel((bcnt_rld << 16) | (0x0000ffff & link_bcntrld),
266 &rg->link_bcntrld);
267
268 opt = __raw_readl(&rg->opt);
269 if (sync_mode == ASYNC)
270 __raw_writel(opt & ~EDMA3_SLOPT_AB_SYNC, &rg->opt);
271 else
272 __raw_writel(opt | EDMA3_SLOPT_AB_SYNC, &rg->opt);
273
274
275 __raw_writel((bcnt << 16) | (acnt & 0xffff), &rg->a_b_cnt);
276 __raw_writel(0xffff & ccnt, &rg->ccnt);
277}
278
279
280
281
282
283
284
285
286
287
288
289
290void edma3_write_slot(u32 base, int slot, struct edma3_slot_layout *param)
291{
292 int i;
293 u32 *p = (u32 *)param;
294 u32 *addr = (u32 *)(base + EDMA3_SL_BASE(slot));
295
296 for (i = 0; i < sizeof(struct edma3_slot_layout)/4; i += 4)
297 __raw_writel(*p++, addr++);
298}
299
300
301
302
303
304
305
306
307
308
309void edma3_read_slot(u32 base, int slot, struct edma3_slot_layout *param)
310{
311 int i;
312 u32 *p = (u32 *)param;
313 u32 *addr = (u32 *)(base + EDMA3_SL_BASE(slot));
314
315 for (i = 0; i < sizeof(struct edma3_slot_layout)/4; i += 4)
316 *p++ = __raw_readl(addr++);
317}
318
319void edma3_slot_configure(u32 base, int slot, struct edma3_slot_config *cfg)
320{
321 struct edma3_slot_layout *rg;
322
323 rg = (struct edma3_slot_layout *)(base + EDMA3_SL_BASE(slot));
324
325 __raw_writel(cfg->opt, &rg->opt);
326 __raw_writel(cfg->src, &rg->src);
327 __raw_writel((cfg->bcnt << 16) | (cfg->acnt & 0xffff), &rg->a_b_cnt);
328 __raw_writel(cfg->dst, &rg->dst);
329 __raw_writel((cfg->dst_bidx << 16) |
330 (cfg->src_bidx & 0xffff), &rg->src_dst_bidx);
331 __raw_writel((cfg->bcntrld << 16) |
332 (cfg->link & 0xffff), &rg->link_bcntrld);
333 __raw_writel((cfg->dst_cidx << 16) |
334 (cfg->src_cidx & 0xffff), &rg->src_dst_cidx);
335 __raw_writel(0xffff & cfg->ccnt, &rg->ccnt);
336}
337
338
339
340
341
342
343
344
345
346
347int edma3_check_for_transfer(u32 base, struct edma3_channel_config *cfg)
348{
349 u32 inum;
350 u32 ipr_base;
351 u32 icr_base;
352
353 if (cfg->complete_code < 32) {
354 ipr_base = base + EDMA3_IPR;
355 icr_base = base + EDMA3_ICR;
356 inum = 1 << cfg->complete_code;
357 } else {
358 ipr_base = base + EDMA3_IPRH;
359 icr_base = base + EDMA3_ICRH;
360 inum = 1 << (cfg->complete_code - 32);
361 }
362
363
364 if (!(__raw_readl(ipr_base) & inum))
365 return 1;
366
367
368 __raw_writel(inum, icr_base);
369
370 return 0;
371}
372
373
374
375
376
377
378
379void qedma3_stop(u32 base, struct edma3_channel_config *cfg)
380{
381
382 __raw_writel(1 << cfg->chnum, base + EDMA3_QEECR);
383
384
385 if (cfg->complete_code < 32)
386 __raw_writel(1 << cfg->complete_code, base + EDMA3_ICR);
387 else
388 __raw_writel(1 << cfg->complete_code, base + EDMA3_ICRH);
389
390
391 __raw_writel(1 << cfg->chnum, base + EDMA3_QSECR);
392 __raw_writel(1 << cfg->chnum, base + EDMA3_QEMCR);
393
394
395 __raw_writel(0, base + EDMA3_QCHMAP(cfg->chnum));
396}
397
398void __edma3_transfer(unsigned long edma3_base_addr, unsigned int edma_slot_num,
399 dma_addr_t dst, dma_addr_t src, size_t len, size_t s_len)
400{
401 struct edma3_slot_config slot;
402 struct edma3_channel_config edma_channel;
403 int b_cnt_value = 1;
404 int rem_bytes = 0;
405 int a_cnt_value = len;
406 unsigned int addr = (unsigned int) (dst);
407 unsigned int max_acnt = 0x7FFFU;
408
409 if (len > s_len) {
410 b_cnt_value = (len / s_len);
411 rem_bytes = (len % s_len);
412 a_cnt_value = s_len;
413 } else if (len > max_acnt) {
414 b_cnt_value = (len / max_acnt);
415 rem_bytes = (len % max_acnt);
416 a_cnt_value = max_acnt;
417 }
418
419 slot.opt = 0;
420 slot.src = ((unsigned int) src);
421 slot.acnt = a_cnt_value;
422 slot.bcnt = b_cnt_value;
423 slot.ccnt = 1;
424 if (len == s_len)
425 slot.src_bidx = a_cnt_value;
426 else
427 slot.src_bidx = 0;
428 slot.dst_bidx = a_cnt_value;
429 slot.src_cidx = 0;
430 slot.dst_cidx = 0;
431 slot.link = EDMA3_PARSET_NULL_LINK;
432 slot.bcntrld = 0;
433 slot.opt = EDMA3_SLOPT_TRANS_COMP_INT_ENB |
434 EDMA3_SLOPT_COMP_CODE(0) |
435 EDMA3_SLOPT_STATIC | EDMA3_SLOPT_AB_SYNC;
436
437 edma3_slot_configure(edma3_base_addr, edma_slot_num, &slot);
438 edma_channel.slot = edma_slot_num;
439 edma_channel.chnum = 0;
440 edma_channel.complete_code = 0;
441
442 edma_channel.trigger_slot_word = EDMA3_TWORD(dst);
443
444 qedma3_start(edma3_base_addr, &edma_channel);
445 edma3_set_dest_addr(edma3_base_addr, edma_channel.slot, addr);
446
447 while (edma3_check_for_transfer(edma3_base_addr, &edma_channel))
448 ;
449 qedma3_stop(edma3_base_addr, &edma_channel);
450
451 if (rem_bytes != 0) {
452 slot.opt = 0;
453 if (len == s_len)
454 slot.src =
455 (b_cnt_value * max_acnt) + ((unsigned int) src);
456 else
457 slot.src = (unsigned int) src;
458 slot.acnt = rem_bytes;
459 slot.bcnt = 1;
460 slot.ccnt = 1;
461 slot.src_bidx = rem_bytes;
462 slot.dst_bidx = rem_bytes;
463 slot.src_cidx = 0;
464 slot.dst_cidx = 0;
465 slot.link = EDMA3_PARSET_NULL_LINK;
466 slot.bcntrld = 0;
467 slot.opt = EDMA3_SLOPT_TRANS_COMP_INT_ENB |
468 EDMA3_SLOPT_COMP_CODE(0) |
469 EDMA3_SLOPT_STATIC | EDMA3_SLOPT_AB_SYNC;
470 edma3_slot_configure(edma3_base_addr, edma_slot_num, &slot);
471 edma_channel.slot = edma_slot_num;
472 edma_channel.chnum = 0;
473 edma_channel.complete_code = 0;
474
475 edma_channel.trigger_slot_word = EDMA3_TWORD(dst);
476
477 qedma3_start(edma3_base_addr, &edma_channel);
478 edma3_set_dest_addr(edma3_base_addr, edma_channel.slot, addr +
479 (max_acnt * b_cnt_value));
480 while (edma3_check_for_transfer(edma3_base_addr, &edma_channel))
481 ;
482 qedma3_stop(edma3_base_addr, &edma_channel);
483 }
484}
485
486void __edma3_fill(unsigned long edma3_base_addr, unsigned int edma_slot_num,
487 dma_addr_t dst, u8 val, size_t len)
488{
489 int xfer_len;
490 int max_xfer = EDMA_FILL_BUFFER_SIZE * 65535;
491 dma_addr_t source;
492
493 memset((void *)edma_fill_buffer, val, sizeof(edma_fill_buffer));
494 source = dma_map_single(edma_fill_buffer, len, DMA_TO_DEVICE);
495
496 while (len) {
497 xfer_len = len;
498 if (xfer_len > max_xfer)
499 xfer_len = max_xfer;
500
501 __edma3_transfer(edma3_base_addr, edma_slot_num, dst,
502 source, xfer_len,
503 EDMA_FILL_BUFFER_SIZE);
504 len -= xfer_len;
505 dst += xfer_len;
506 }
507
508 dma_unmap_single(source, len, DMA_FROM_DEVICE);
509}
510
511#ifndef CONFIG_DMA
512
513void edma3_transfer(unsigned long edma3_base_addr, unsigned int edma_slot_num,
514 void *dst, void *src, size_t len)
515{
516
517 dma_addr_t destination = dma_map_single(dst, len, DMA_FROM_DEVICE);
518 dma_addr_t source = dma_map_single(src, len, DMA_TO_DEVICE);
519
520 __edma3_transfer(edma3_base_addr, edma_slot_num, destination, source, len, len);
521
522
523 dma_unmap_single(destination, len, DMA_FROM_DEVICE);
524 dma_unmap_single(source, len, DMA_TO_DEVICE);
525}
526
527void edma3_fill(unsigned long edma3_base_addr, unsigned int edma_slot_num,
528 void *dst, u8 val, size_t len)
529{
530
531 dma_addr_t destination = dma_map_single(dst, len, DMA_FROM_DEVICE);
532
533 __edma3_fill(edma3_base_addr, edma_slot_num, destination, val, len);
534
535
536 dma_unmap_single(destination, len, DMA_FROM_DEVICE);
537}
538
539#else
540
541static int ti_edma3_transfer(struct udevice *dev, int direction,
542 dma_addr_t dst, dma_addr_t src, size_t len)
543{
544 struct ti_edma3_priv *priv = dev_get_priv(dev);
545
546
547 enable_edma3_clocks();
548
549 switch (direction) {
550 case DMA_MEM_TO_MEM:
551 __edma3_transfer(priv->base, 1, dst, src, len, len);
552 break;
553 default:
554 pr_err("Transfer type not implemented in DMA driver\n");
555 break;
556 }
557
558
559 disable_edma3_clocks();
560
561 return 0;
562}
563
564static int ti_edma3_of_to_plat(struct udevice *dev)
565{
566 struct ti_edma3_priv *priv = dev_get_priv(dev);
567
568 priv->base = dev_read_addr(dev);
569
570 return 0;
571}
572
573static int ti_edma3_probe(struct udevice *dev)
574{
575 struct dma_dev_priv *uc_priv = dev_get_uclass_priv(dev);
576
577 uc_priv->supported = DMA_SUPPORTS_MEM_TO_MEM;
578
579 return 0;
580}
581
582static const struct dma_ops ti_edma3_ops = {
583 .transfer = ti_edma3_transfer,
584};
585
586static const struct udevice_id ti_edma3_ids[] = {
587 { .compatible = "ti,edma3" },
588 { }
589};
590
591U_BOOT_DRIVER(ti_edma3) = {
592 .name = "ti_edma3",
593 .id = UCLASS_DMA,
594 .of_match = ti_edma3_ids,
595 .ops = &ti_edma3_ops,
596 .of_to_plat = ti_edma3_of_to_plat,
597 .probe = ti_edma3_probe,
598 .priv_auto = sizeof(struct ti_edma3_priv),
599};
600#endif
601