1
2
3#include <linux/kernel.h>
4#include <linux/spinlock.h>
5#include <asm/dma.h>
6#include <hwregs/reg_map.h>
7#include <hwregs/reg_rdwr.h>
8#include <hwregs/marb_defs.h>
9#include <hwregs/config_defs.h>
10#include <hwregs/strmux_defs.h>
11#include <linux/errno.h>
12#include <mach/arbiter.h>
13
14static char used_dma_channels[MAX_DMA_CHANNELS];
15static const char *used_dma_channels_users[MAX_DMA_CHANNELS];
16
17static DEFINE_SPINLOCK(dma_lock);
18
19int crisv32_request_dma(unsigned int dmanr, const char *device_id,
20 unsigned options, unsigned int bandwidth,
21 enum dma_owner owner)
22{
23 unsigned long flags;
24 reg_config_rw_clk_ctrl clk_ctrl;
25 reg_strmux_rw_cfg strmux_cfg;
26
27 if (crisv32_arbiter_allocate_bandwidth(dmanr,
28 options & DMA_INT_MEM ?
29 INT_REGION : EXT_REGION,
30 bandwidth))
31 return -ENOMEM;
32
33 spin_lock_irqsave(&dma_lock, flags);
34
35 if (used_dma_channels[dmanr]) {
36 spin_unlock_irqrestore(&dma_lock, flags);
37 if (options & DMA_VERBOSE_ON_ERROR) {
38 printk(KERN_ERR "Failed to request DMA %i for %s, "
39 "already allocated by %s\n",
40 dmanr,
41 device_id,
42 used_dma_channels_users[dmanr]);
43 }
44 if (options & DMA_PANIC_ON_ERROR)
45 panic("request_dma error!");
46 spin_unlock_irqrestore(&dma_lock, flags);
47 return -EBUSY;
48 }
49 clk_ctrl = REG_RD(config, regi_config, rw_clk_ctrl);
50 strmux_cfg = REG_RD(strmux, regi_strmux, rw_cfg);
51
52 switch (dmanr) {
53 case 0:
54 case 1:
55 clk_ctrl.dma01_eth0 = 1;
56 break;
57 case 2:
58 case 3:
59 clk_ctrl.dma23 = 1;
60 break;
61 case 4:
62 case 5:
63 clk_ctrl.dma45 = 1;
64 break;
65 case 6:
66 case 7:
67 clk_ctrl.dma67 = 1;
68 break;
69 case 8:
70 case 9:
71 clk_ctrl.dma89_strcop = 1;
72 break;
73#if MAX_DMA_CHANNELS-1 != 9
74#error Check dma.c
75#endif
76 default:
77 spin_unlock_irqrestore(&dma_lock, flags);
78 if (options & DMA_VERBOSE_ON_ERROR) {
79 printk(KERN_ERR "Failed to request DMA %i for %s, "
80 "only 0-%i valid)\n",
81 dmanr, device_id, MAX_DMA_CHANNELS - 1);
82 }
83
84 if (options & DMA_PANIC_ON_ERROR)
85 panic("request_dma error!");
86 return -EINVAL;
87 }
88
89 switch (owner) {
90 case dma_eth0:
91 if (dmanr == 0)
92 strmux_cfg.dma0 = regk_strmux_eth0;
93 else if (dmanr == 1)
94 strmux_cfg.dma1 = regk_strmux_eth0;
95 else
96 panic("Invalid DMA channel for eth0\n");
97 break;
98 case dma_eth1:
99 if (dmanr == 6)
100 strmux_cfg.dma6 = regk_strmux_eth1;
101 else if (dmanr == 7)
102 strmux_cfg.dma7 = regk_strmux_eth1;
103 else
104 panic("Invalid DMA channel for eth1\n");
105 break;
106 case dma_iop0:
107 if (dmanr == 2)
108 strmux_cfg.dma2 = regk_strmux_iop0;
109 else if (dmanr == 3)
110 strmux_cfg.dma3 = regk_strmux_iop0;
111 else
112 panic("Invalid DMA channel for iop0\n");
113 break;
114 case dma_iop1:
115 if (dmanr == 4)
116 strmux_cfg.dma4 = regk_strmux_iop1;
117 else if (dmanr == 5)
118 strmux_cfg.dma5 = regk_strmux_iop1;
119 else
120 panic("Invalid DMA channel for iop1\n");
121 break;
122 case dma_ser0:
123 if (dmanr == 6)
124 strmux_cfg.dma6 = regk_strmux_ser0;
125 else if (dmanr == 7)
126 strmux_cfg.dma7 = regk_strmux_ser0;
127 else
128 panic("Invalid DMA channel for ser0\n");
129 break;
130 case dma_ser1:
131 if (dmanr == 4)
132 strmux_cfg.dma4 = regk_strmux_ser1;
133 else if (dmanr == 5)
134 strmux_cfg.dma5 = regk_strmux_ser1;
135 else
136 panic("Invalid DMA channel for ser1\n");
137 break;
138 case dma_ser2:
139 if (dmanr == 2)
140 strmux_cfg.dma2 = regk_strmux_ser2;
141 else if (dmanr == 3)
142 strmux_cfg.dma3 = regk_strmux_ser2;
143 else
144 panic("Invalid DMA channel for ser2\n");
145 break;
146 case dma_ser3:
147 if (dmanr == 8)
148 strmux_cfg.dma8 = regk_strmux_ser3;
149 else if (dmanr == 9)
150 strmux_cfg.dma9 = regk_strmux_ser3;
151 else
152 panic("Invalid DMA channel for ser3\n");
153 break;
154 case dma_sser0:
155 if (dmanr == 4)
156 strmux_cfg.dma4 = regk_strmux_sser0;
157 else if (dmanr == 5)
158 strmux_cfg.dma5 = regk_strmux_sser0;
159 else
160 panic("Invalid DMA channel for sser0\n");
161 break;
162 case dma_sser1:
163 if (dmanr == 6)
164 strmux_cfg.dma6 = regk_strmux_sser1;
165 else if (dmanr == 7)
166 strmux_cfg.dma7 = regk_strmux_sser1;
167 else
168 panic("Invalid DMA channel for sser1\n");
169 break;
170 case dma_ata:
171 if (dmanr == 2)
172 strmux_cfg.dma2 = regk_strmux_ata;
173 else if (dmanr == 3)
174 strmux_cfg.dma3 = regk_strmux_ata;
175 else
176 panic("Invalid DMA channel for ata\n");
177 break;
178 case dma_strp:
179 if (dmanr == 8)
180 strmux_cfg.dma8 = regk_strmux_strcop;
181 else if (dmanr == 9)
182 strmux_cfg.dma9 = regk_strmux_strcop;
183 else
184 panic("Invalid DMA channel for strp\n");
185 break;
186 case dma_ext0:
187 if (dmanr == 6)
188 strmux_cfg.dma6 = regk_strmux_ext0;
189 else
190 panic("Invalid DMA channel for ext0\n");
191 break;
192 case dma_ext1:
193 if (dmanr == 7)
194 strmux_cfg.dma7 = regk_strmux_ext1;
195 else
196 panic("Invalid DMA channel for ext1\n");
197 break;
198 case dma_ext2:
199 if (dmanr == 2)
200 strmux_cfg.dma2 = regk_strmux_ext2;
201 else if (dmanr == 8)
202 strmux_cfg.dma8 = regk_strmux_ext2;
203 else
204 panic("Invalid DMA channel for ext2\n");
205 break;
206 case dma_ext3:
207 if (dmanr == 3)
208 strmux_cfg.dma3 = regk_strmux_ext3;
209 else if (dmanr == 9)
210 strmux_cfg.dma9 = regk_strmux_ext2;
211 else
212 panic("Invalid DMA channel for ext2\n");
213 break;
214 }
215
216 used_dma_channels[dmanr] = 1;
217 used_dma_channels_users[dmanr] = device_id;
218 REG_WR(config, regi_config, rw_clk_ctrl, clk_ctrl);
219 REG_WR(strmux, regi_strmux, rw_cfg, strmux_cfg);
220 spin_unlock_irqrestore(&dma_lock, flags);
221 return 0;
222}
223
224void crisv32_free_dma(unsigned int dmanr)
225{
226 spin_lock(&dma_lock);
227 used_dma_channels[dmanr] = 0;
228 spin_unlock(&dma_lock);
229}
230