1
2
3
4
5
6
7
8
9
10
11
12
13#include <linux/kernel.h>
14#include <linux/errno.h>
15#include <linux/amba/pl330.h>
16#include <linux/scatterlist.h>
17#include <linux/export.h>
18
19#include <mach/dma.h>
20
21static unsigned samsung_dmadev_request(enum dma_ch dma_ch,
22 struct samsung_dma_info *info)
23{
24 struct dma_chan *chan;
25 dma_cap_mask_t mask;
26 struct dma_slave_config slave_config;
27 void *filter_param;
28
29 dma_cap_zero(mask);
30 dma_cap_set(info->cap, mask);
31
32
33
34
35
36 filter_param = (dma_ch == DMACH_DT_PROP) ? (void *)info->dt_dmach_prop :
37 (void *)dma_ch;
38 chan = dma_request_channel(mask, pl330_filter, filter_param);
39
40 if (info->direction == DMA_DEV_TO_MEM) {
41 memset(&slave_config, 0, sizeof(struct dma_slave_config));
42 slave_config.direction = info->direction;
43 slave_config.src_addr = info->fifo;
44 slave_config.src_addr_width = info->width;
45 slave_config.src_maxburst = 1;
46 dmaengine_slave_config(chan, &slave_config);
47 } else if (info->direction == DMA_MEM_TO_DEV) {
48 memset(&slave_config, 0, sizeof(struct dma_slave_config));
49 slave_config.direction = info->direction;
50 slave_config.dst_addr = info->fifo;
51 slave_config.dst_addr_width = info->width;
52 slave_config.dst_maxburst = 1;
53 dmaengine_slave_config(chan, &slave_config);
54 }
55
56 return (unsigned)chan;
57}
58
59static int samsung_dmadev_release(unsigned ch,
60 struct s3c2410_dma_client *client)
61{
62 dma_release_channel((struct dma_chan *)ch);
63
64 return 0;
65}
66
67static int samsung_dmadev_prepare(unsigned ch,
68 struct samsung_dma_prep_info *info)
69{
70 struct scatterlist sg;
71 struct dma_chan *chan = (struct dma_chan *)ch;
72 struct dma_async_tx_descriptor *desc;
73
74 switch (info->cap) {
75 case DMA_SLAVE:
76 sg_init_table(&sg, 1);
77 sg_dma_len(&sg) = info->len;
78 sg_set_page(&sg, pfn_to_page(PFN_DOWN(info->buf)),
79 info->len, offset_in_page(info->buf));
80 sg_dma_address(&sg) = info->buf;
81
82 desc = chan->device->device_prep_slave_sg(chan,
83 &sg, 1, info->direction, DMA_PREP_INTERRUPT);
84 break;
85 case DMA_CYCLIC:
86 desc = chan->device->device_prep_dma_cyclic(chan,
87 info->buf, info->len, info->period, info->direction);
88 break;
89 default:
90 dev_err(&chan->dev->device, "unsupported format\n");
91 return -EFAULT;
92 }
93
94 if (!desc) {
95 dev_err(&chan->dev->device, "cannot prepare cyclic dma\n");
96 return -EFAULT;
97 }
98
99 desc->callback = info->fp;
100 desc->callback_param = info->fp_param;
101
102 dmaengine_submit((struct dma_async_tx_descriptor *)desc);
103
104 return 0;
105}
106
107static inline int samsung_dmadev_trigger(unsigned ch)
108{
109 dma_async_issue_pending((struct dma_chan *)ch);
110
111 return 0;
112}
113
114static inline int samsung_dmadev_flush(unsigned ch)
115{
116 return dmaengine_terminate_all((struct dma_chan *)ch);
117}
118
119struct samsung_dma_ops dmadev_ops = {
120 .request = samsung_dmadev_request,
121 .release = samsung_dmadev_release,
122 .prepare = samsung_dmadev_prepare,
123 .trigger = samsung_dmadev_trigger,
124 .started = NULL,
125 .flush = samsung_dmadev_flush,
126 .stop = samsung_dmadev_flush,
127};
128
129void *samsung_dmadev_get_ops(void)
130{
131 return &dmadev_ops;
132}
133EXPORT_SYMBOL(samsung_dmadev_get_ops);
134