1
2
3
4
5
6
7
8
9
10
11
12#ifndef _ASM_DMA_H
13#define _ASM_DMA_H
14
15#include <asm/io.h>
16#include <linux/spinlock.h>
17#include <linux/delay.h>
18
19
20#ifdef HAVE_REALLY_SLOW_DMA_CONTROLLER
21#define dma_outb outb_p
22#else
23#define dma_outb outb
24#endif
25
26#define dma_inb inb
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76#ifndef CONFIG_GENERIC_ISA_DMA_SUPPORT_BROKEN
77#define MAX_DMA_CHANNELS 8
78#endif
79
80
81
82
83
84
85
86#if defined(CONFIG_SGI_IP22) || defined(CONFIG_SGI_IP28)
87
88#define MAX_DMA_ADDRESS PAGE_OFFSET
89#else
90#define MAX_DMA_ADDRESS (PAGE_OFFSET + 0x01000000)
91#endif
92#define MAX_DMA_PFN PFN_DOWN(virt_to_phys((void *)MAX_DMA_ADDRESS))
93
94#ifndef MAX_DMA32_PFN
95#define MAX_DMA32_PFN (1UL << (32 - PAGE_SHIFT))
96#endif
97
98
99#define IO_DMA1_BASE 0x00
100#define IO_DMA2_BASE 0xC0
101
102
103#define DMA1_CMD_REG 0x08
104#define DMA1_STAT_REG 0x08
105#define DMA1_REQ_REG 0x09
106#define DMA1_MASK_REG 0x0A
107#define DMA1_MODE_REG 0x0B
108#define DMA1_CLEAR_FF_REG 0x0C
109#define DMA1_TEMP_REG 0x0D
110#define DMA1_RESET_REG 0x0D
111#define DMA1_CLR_MASK_REG 0x0E
112#define DMA1_MASK_ALL_REG 0x0F
113
114#define DMA2_CMD_REG 0xD0
115#define DMA2_STAT_REG 0xD0
116#define DMA2_REQ_REG 0xD2
117#define DMA2_MASK_REG 0xD4
118#define DMA2_MODE_REG 0xD6
119#define DMA2_CLEAR_FF_REG 0xD8
120#define DMA2_TEMP_REG 0xDA
121#define DMA2_RESET_REG 0xDA
122#define DMA2_CLR_MASK_REG 0xDC
123#define DMA2_MASK_ALL_REG 0xDE
124
125#define DMA_ADDR_0 0x00
126#define DMA_ADDR_1 0x02
127#define DMA_ADDR_2 0x04
128#define DMA_ADDR_3 0x06
129#define DMA_ADDR_4 0xC0
130#define DMA_ADDR_5 0xC4
131#define DMA_ADDR_6 0xC8
132#define DMA_ADDR_7 0xCC
133
134#define DMA_CNT_0 0x01
135#define DMA_CNT_1 0x03
136#define DMA_CNT_2 0x05
137#define DMA_CNT_3 0x07
138#define DMA_CNT_4 0xC2
139#define DMA_CNT_5 0xC6
140#define DMA_CNT_6 0xCA
141#define DMA_CNT_7 0xCE
142
143#define DMA_PAGE_0 0x87
144#define DMA_PAGE_1 0x83
145#define DMA_PAGE_2 0x81
146#define DMA_PAGE_3 0x82
147#define DMA_PAGE_5 0x8B
148#define DMA_PAGE_6 0x89
149#define DMA_PAGE_7 0x8A
150
151#define DMA_MODE_READ 0x44
152#define DMA_MODE_WRITE 0x48
153#define DMA_MODE_CASCADE 0xC0
154
155#define DMA_AUTOINIT 0x10
156
157extern spinlock_t dma_spin_lock;
158
159static __inline__ unsigned long claim_dma_lock(void)
160{
161 unsigned long flags;
162 spin_lock_irqsave(&dma_spin_lock, flags);
163 return flags;
164}
165
166static __inline__ void release_dma_lock(unsigned long flags)
167{
168 spin_unlock_irqrestore(&dma_spin_lock, flags);
169}
170
171
172static __inline__ void enable_dma(unsigned int dmanr)
173{
174 if (dmanr<=3)
175 dma_outb(dmanr, DMA1_MASK_REG);
176 else
177 dma_outb(dmanr & 3, DMA2_MASK_REG);
178}
179
180static __inline__ void disable_dma(unsigned int dmanr)
181{
182 if (dmanr<=3)
183 dma_outb(dmanr | 4, DMA1_MASK_REG);
184 else
185 dma_outb((dmanr & 3) | 4, DMA2_MASK_REG);
186}
187
188
189
190
191
192
193
194
195static __inline__ void clear_dma_ff(unsigned int dmanr)
196{
197 if (dmanr<=3)
198 dma_outb(0, DMA1_CLEAR_FF_REG);
199 else
200 dma_outb(0, DMA2_CLEAR_FF_REG);
201}
202
203
204static __inline__ void set_dma_mode(unsigned int dmanr, char mode)
205{
206 if (dmanr<=3)
207 dma_outb(mode | dmanr, DMA1_MODE_REG);
208 else
209 dma_outb(mode | (dmanr&3), DMA2_MODE_REG);
210}
211
212
213
214
215
216
217static __inline__ void set_dma_page(unsigned int dmanr, char pagenr)
218{
219 switch(dmanr) {
220 case 0:
221 dma_outb(pagenr, DMA_PAGE_0);
222 break;
223 case 1:
224 dma_outb(pagenr, DMA_PAGE_1);
225 break;
226 case 2:
227 dma_outb(pagenr, DMA_PAGE_2);
228 break;
229 case 3:
230 dma_outb(pagenr, DMA_PAGE_3);
231 break;
232 case 5:
233 dma_outb(pagenr & 0xfe, DMA_PAGE_5);
234 break;
235 case 6:
236 dma_outb(pagenr & 0xfe, DMA_PAGE_6);
237 break;
238 case 7:
239 dma_outb(pagenr & 0xfe, DMA_PAGE_7);
240 break;
241 }
242}
243
244
245
246
247
248static __inline__ void set_dma_addr(unsigned int dmanr, unsigned int a)
249{
250 set_dma_page(dmanr, a>>16);
251 if (dmanr <= 3) {
252 dma_outb( a & 0xff, ((dmanr&3)<<1) + IO_DMA1_BASE );
253 dma_outb( (a>>8) & 0xff, ((dmanr&3)<<1) + IO_DMA1_BASE );
254 } else {
255 dma_outb( (a>>1) & 0xff, ((dmanr&3)<<2) + IO_DMA2_BASE );
256 dma_outb( (a>>9) & 0xff, ((dmanr&3)<<2) + IO_DMA2_BASE );
257 }
258}
259
260
261
262
263
264
265
266
267
268
269static __inline__ void set_dma_count(unsigned int dmanr, unsigned int count)
270{
271 count--;
272 if (dmanr <= 3) {
273 dma_outb( count & 0xff, ((dmanr&3)<<1) + 1 + IO_DMA1_BASE );
274 dma_outb( (count>>8) & 0xff, ((dmanr&3)<<1) + 1 + IO_DMA1_BASE );
275 } else {
276 dma_outb( (count>>1) & 0xff, ((dmanr&3)<<2) + 2 + IO_DMA2_BASE );
277 dma_outb( (count>>9) & 0xff, ((dmanr&3)<<2) + 2 + IO_DMA2_BASE );
278 }
279}
280
281
282
283
284
285
286
287
288
289
290static __inline__ int get_dma_residue(unsigned int dmanr)
291{
292 unsigned int io_port = (dmanr<=3)? ((dmanr&3)<<1) + 1 + IO_DMA1_BASE
293 : ((dmanr&3)<<2) + 2 + IO_DMA2_BASE;
294
295
296 unsigned short count;
297
298 count = 1 + dma_inb(io_port);
299 count += dma_inb(io_port) << 8;
300
301 return (dmanr<=3)? count : (count<<1);
302}
303
304
305
306extern int request_dma(unsigned int dmanr, const char * device_id);
307extern void free_dma(unsigned int dmanr);
308
309
310
311#ifdef CONFIG_PCI
312extern int isa_dma_bridge_buggy;
313#else
314#define isa_dma_bridge_buggy (0)
315#endif
316
317#endif
318