1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18#include <linux/init.h>
19#include <linux/platform_device.h>
20#include <linux/io.h>
21#include <linux/dma-mapping.h>
22#include <linux/module.h>
23#include <linux/clk.h>
24#include <linux/err.h>
25
26#include <lantiq_soc.h>
27#include <xway_dma.h>
28
29#define LTQ_DMA_ID 0x08
30#define LTQ_DMA_CTRL 0x10
31#define LTQ_DMA_CPOLL 0x14
32#define LTQ_DMA_CS 0x18
33#define LTQ_DMA_CCTRL 0x1C
34#define LTQ_DMA_CDBA 0x20
35#define LTQ_DMA_CDLEN 0x24
36#define LTQ_DMA_CIS 0x28
37#define LTQ_DMA_CIE 0x2C
38#define LTQ_DMA_PS 0x40
39#define LTQ_DMA_PCTRL 0x44
40#define LTQ_DMA_IRNEN 0xf4
41
42#define DMA_DESCPT BIT(3)
43#define DMA_TX BIT(8)
44#define DMA_CHAN_ON BIT(0)
45#define DMA_PDEN BIT(6)
46#define DMA_CHAN_RST BIT(1)
47#define DMA_RESET BIT(0)
48#define DMA_IRQ_ACK 0x7e
49#define DMA_POLL BIT(31)
50#define DMA_CLK_DIV4 BIT(6)
51#define DMA_2W_BURST BIT(1)
52#define DMA_MAX_CHANNEL 20
53#define DMA_ETOP_ENDIANNESS (0xf << 8)
54#define DMA_WEIGHT (BIT(17) | BIT(16))
55
56#define ltq_dma_r32(x) ltq_r32(ltq_dma_membase + (x))
57#define ltq_dma_w32(x, y) ltq_w32(x, ltq_dma_membase + (y))
58#define ltq_dma_w32_mask(x, y, z) ltq_w32_mask(x, y, \
59 ltq_dma_membase + (z))
60
61static void __iomem *ltq_dma_membase;
62
63void
64ltq_dma_enable_irq(struct ltq_dma_channel *ch)
65{
66 unsigned long flags;
67
68 local_irq_save(flags);
69 ltq_dma_w32(ch->nr, LTQ_DMA_CS);
70 ltq_dma_w32_mask(0, 1 << ch->nr, LTQ_DMA_IRNEN);
71 local_irq_restore(flags);
72}
73EXPORT_SYMBOL_GPL(ltq_dma_enable_irq);
74
75void
76ltq_dma_disable_irq(struct ltq_dma_channel *ch)
77{
78 unsigned long flags;
79
80 local_irq_save(flags);
81 ltq_dma_w32(ch->nr, LTQ_DMA_CS);
82 ltq_dma_w32_mask(1 << ch->nr, 0, LTQ_DMA_IRNEN);
83 local_irq_restore(flags);
84}
85EXPORT_SYMBOL_GPL(ltq_dma_disable_irq);
86
87void
88ltq_dma_ack_irq(struct ltq_dma_channel *ch)
89{
90 unsigned long flags;
91
92 local_irq_save(flags);
93 ltq_dma_w32(ch->nr, LTQ_DMA_CS);
94 ltq_dma_w32(DMA_IRQ_ACK, LTQ_DMA_CIS);
95 local_irq_restore(flags);
96}
97EXPORT_SYMBOL_GPL(ltq_dma_ack_irq);
98
99void
100ltq_dma_open(struct ltq_dma_channel *ch)
101{
102 unsigned long flag;
103
104 local_irq_save(flag);
105 ltq_dma_w32(ch->nr, LTQ_DMA_CS);
106 ltq_dma_w32_mask(0, DMA_CHAN_ON, LTQ_DMA_CCTRL);
107 ltq_dma_enable_irq(ch);
108 local_irq_restore(flag);
109}
110EXPORT_SYMBOL_GPL(ltq_dma_open);
111
112void
113ltq_dma_close(struct ltq_dma_channel *ch)
114{
115 unsigned long flag;
116
117 local_irq_save(flag);
118 ltq_dma_w32(ch->nr, LTQ_DMA_CS);
119 ltq_dma_w32_mask(DMA_CHAN_ON, 0, LTQ_DMA_CCTRL);
120 ltq_dma_disable_irq(ch);
121 local_irq_restore(flag);
122}
123EXPORT_SYMBOL_GPL(ltq_dma_close);
124
125static void
126ltq_dma_alloc(struct ltq_dma_channel *ch)
127{
128 unsigned long flags;
129
130 ch->desc = 0;
131 ch->desc_base = dma_alloc_coherent(NULL,
132 LTQ_DESC_NUM * LTQ_DESC_SIZE,
133 &ch->phys, GFP_ATOMIC);
134 memset(ch->desc_base, 0, LTQ_DESC_NUM * LTQ_DESC_SIZE);
135
136 local_irq_save(flags);
137 ltq_dma_w32(ch->nr, LTQ_DMA_CS);
138 ltq_dma_w32(ch->phys, LTQ_DMA_CDBA);
139 ltq_dma_w32(LTQ_DESC_NUM, LTQ_DMA_CDLEN);
140 ltq_dma_w32_mask(DMA_CHAN_ON, 0, LTQ_DMA_CCTRL);
141 wmb();
142 ltq_dma_w32_mask(0, DMA_CHAN_RST, LTQ_DMA_CCTRL);
143 while (ltq_dma_r32(LTQ_DMA_CCTRL) & DMA_CHAN_RST)
144 ;
145 local_irq_restore(flags);
146}
147
148void
149ltq_dma_alloc_tx(struct ltq_dma_channel *ch)
150{
151 unsigned long flags;
152
153 ltq_dma_alloc(ch);
154
155 local_irq_save(flags);
156 ltq_dma_w32(DMA_DESCPT, LTQ_DMA_CIE);
157 ltq_dma_w32_mask(0, 1 << ch->nr, LTQ_DMA_IRNEN);
158 ltq_dma_w32(DMA_WEIGHT | DMA_TX, LTQ_DMA_CCTRL);
159 local_irq_restore(flags);
160}
161EXPORT_SYMBOL_GPL(ltq_dma_alloc_tx);
162
163void
164ltq_dma_alloc_rx(struct ltq_dma_channel *ch)
165{
166 unsigned long flags;
167
168 ltq_dma_alloc(ch);
169
170 local_irq_save(flags);
171 ltq_dma_w32(DMA_DESCPT, LTQ_DMA_CIE);
172 ltq_dma_w32_mask(0, 1 << ch->nr, LTQ_DMA_IRNEN);
173 ltq_dma_w32(DMA_WEIGHT, LTQ_DMA_CCTRL);
174 local_irq_restore(flags);
175}
176EXPORT_SYMBOL_GPL(ltq_dma_alloc_rx);
177
178void
179ltq_dma_free(struct ltq_dma_channel *ch)
180{
181 if (!ch->desc_base)
182 return;
183 ltq_dma_close(ch);
184 dma_free_coherent(NULL, LTQ_DESC_NUM * LTQ_DESC_SIZE,
185 ch->desc_base, ch->phys);
186}
187EXPORT_SYMBOL_GPL(ltq_dma_free);
188
189void
190ltq_dma_init_port(int p)
191{
192 ltq_dma_w32(p, LTQ_DMA_PS);
193 switch (p) {
194 case DMA_PORT_ETOP:
195
196
197
198
199 ltq_dma_w32_mask(0, DMA_ETOP_ENDIANNESS | DMA_PDEN,
200 LTQ_DMA_PCTRL);
201 break;
202
203 case DMA_PORT_DEU:
204 ltq_dma_w32((DMA_2W_BURST << 4) | (DMA_2W_BURST << 2),
205 LTQ_DMA_PCTRL);
206 break;
207
208 default:
209 break;
210 }
211}
212EXPORT_SYMBOL_GPL(ltq_dma_init_port);
213
214static int
215ltq_dma_init(struct platform_device *pdev)
216{
217 struct clk *clk;
218 struct resource *res;
219 unsigned id;
220 int i;
221
222 res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
223 ltq_dma_membase = devm_ioremap_resource(&pdev->dev, res);
224 if (IS_ERR(ltq_dma_membase))
225 panic("Failed to remap dma resource");
226
227
228 clk = clk_get(&pdev->dev, NULL);
229 if (IS_ERR(clk))
230 panic("Failed to get dma clock");
231
232 clk_enable(clk);
233 ltq_dma_w32_mask(0, DMA_RESET, LTQ_DMA_CTRL);
234
235
236 ltq_dma_w32(0, LTQ_DMA_IRNEN);
237
238
239 for (i = 0; i < DMA_MAX_CHANNEL; i++) {
240 ltq_dma_w32(i, LTQ_DMA_CS);
241 ltq_dma_w32(DMA_CHAN_RST, LTQ_DMA_CCTRL);
242 ltq_dma_w32(DMA_POLL | DMA_CLK_DIV4, LTQ_DMA_CPOLL);
243 ltq_dma_w32_mask(DMA_CHAN_ON, 0, LTQ_DMA_CCTRL);
244 }
245
246 id = ltq_dma_r32(LTQ_DMA_ID);
247 dev_info(&pdev->dev,
248 "Init done - hw rev: %X, ports: %d, channels: %d\n",
249 id & 0x1f, (id >> 16) & 0xf, id >> 20);
250
251 return 0;
252}
253
254static const struct of_device_id dma_match[] = {
255 { .compatible = "lantiq,dma-xway" },
256 {},
257};
258MODULE_DEVICE_TABLE(of, dma_match);
259
260static struct platform_driver dma_driver = {
261 .probe = ltq_dma_init,
262 .driver = {
263 .name = "dma-xway",
264 .of_match_table = dma_match,
265 },
266};
267
268int __init
269dma_init(void)
270{
271 return platform_driver_register(&dma_driver);
272}
273
274postcore_initcall(dma_init);
275