1#ifndef ASMARM_DMA_MAPPING_H
2#define ASMARM_DMA_MAPPING_H
3
4#ifdef __KERNEL__
5
6#include <linux/mm_types.h>
7#include <linux/scatterlist.h>
8#include <linux/dma-attrs.h>
9#include <linux/dma-debug.h>
10
11#include <asm-generic/dma-coherent.h>
12#include <asm/memory.h>
13
14#include <xen/xen.h>
15#include <asm/xen/hypervisor.h>
16
17#define DMA_ERROR_CODE (~0)
18extern struct dma_map_ops arm_dma_ops;
19extern struct dma_map_ops arm_coherent_dma_ops;
20
21static inline struct dma_map_ops *__generic_dma_ops(struct device *dev)
22{
23 if (dev && dev->archdata.dma_ops)
24 return dev->archdata.dma_ops;
25 return &arm_dma_ops;
26}
27
28static inline struct dma_map_ops *get_dma_ops(struct device *dev)
29{
30 if (xen_initial_domain())
31 return xen_dma_ops;
32 else
33 return __generic_dma_ops(dev);
34}
35
36static inline void set_dma_ops(struct device *dev, struct dma_map_ops *ops)
37{
38 BUG_ON(!dev);
39 dev->archdata.dma_ops = ops;
40}
41
42#include <asm-generic/dma-mapping-common.h>
43
44static inline int dma_set_mask(struct device *dev, u64 mask)
45{
46 return get_dma_ops(dev)->set_dma_mask(dev, mask);
47}
48
49#ifdef __arch_page_to_dma
50#error Please update to __arch_pfn_to_dma
51#endif
52
53
54
55
56
57
58#ifndef __arch_pfn_to_dma
59static inline dma_addr_t pfn_to_dma(struct device *dev, unsigned long pfn)
60{
61 return (dma_addr_t)__pfn_to_bus(pfn);
62}
63
64static inline unsigned long dma_to_pfn(struct device *dev, dma_addr_t addr)
65{
66 return __bus_to_pfn(addr);
67}
68
69static inline void *dma_to_virt(struct device *dev, dma_addr_t addr)
70{
71 return (void *)__bus_to_virt((unsigned long)addr);
72}
73
74static inline dma_addr_t virt_to_dma(struct device *dev, void *addr)
75{
76 return (dma_addr_t)__virt_to_bus((unsigned long)(addr));
77}
78
79#else
80static inline dma_addr_t pfn_to_dma(struct device *dev, unsigned long pfn)
81{
82 return __arch_pfn_to_dma(dev, pfn);
83}
84
85static inline unsigned long dma_to_pfn(struct device *dev, dma_addr_t addr)
86{
87 return __arch_dma_to_pfn(dev, addr);
88}
89
90static inline void *dma_to_virt(struct device *dev, dma_addr_t addr)
91{
92 return __arch_dma_to_virt(dev, addr);
93}
94
95static inline dma_addr_t virt_to_dma(struct device *dev, void *addr)
96{
97 return __arch_virt_to_dma(dev, addr);
98}
99#endif
100
101
102static inline unsigned long dma_max_pfn(struct device *dev)
103{
104 return PHYS_PFN_OFFSET + dma_to_pfn(dev, *dev->dma_mask);
105}
106#define dma_max_pfn(dev) dma_max_pfn(dev)
107
108static inline dma_addr_t phys_to_dma(struct device *dev, phys_addr_t paddr)
109{
110 unsigned int offset = paddr & ~PAGE_MASK;
111 return pfn_to_dma(dev, __phys_to_pfn(paddr)) + offset;
112}
113
114static inline phys_addr_t dma_to_phys(struct device *dev, dma_addr_t dev_addr)
115{
116 unsigned int offset = dev_addr & ~PAGE_MASK;
117 return __pfn_to_phys(dma_to_pfn(dev, dev_addr)) + offset;
118}
119
120static inline bool dma_capable(struct device *dev, dma_addr_t addr, size_t size)
121{
122 u64 limit, mask;
123
124 if (!dev->dma_mask)
125 return 0;
126
127 mask = *dev->dma_mask;
128
129 limit = (mask + 1) & ~mask;
130 if (limit && size > limit)
131 return 0;
132
133 if ((addr | (addr + size - 1)) & ~mask)
134 return 0;
135
136 return 1;
137}
138
139static inline void dma_mark_clean(void *addr, size_t size) { }
140
141
142
143
144static inline int dma_mapping_error(struct device *dev, dma_addr_t dma_addr)
145{
146 debug_dma_mapping_error(dev, dma_addr);
147 return dma_addr == DMA_ERROR_CODE;
148}
149
150
151
152
153
154static inline void *dma_alloc_noncoherent(struct device *dev, size_t size,
155 dma_addr_t *handle, gfp_t gfp)
156{
157 return NULL;
158}
159
160static inline void dma_free_noncoherent(struct device *dev, size_t size,
161 void *cpu_addr, dma_addr_t handle)
162{
163}
164
165extern int dma_supported(struct device *dev, u64 mask);
166
167extern int arm_dma_set_mask(struct device *dev, u64 dma_mask);
168
169
170
171
172
173
174
175
176
177
178
179
180extern void *arm_dma_alloc(struct device *dev, size_t size, dma_addr_t *handle,
181 gfp_t gfp, struct dma_attrs *attrs);
182
183#define dma_alloc_coherent(d, s, h, f) dma_alloc_attrs(d, s, h, f, NULL)
184
185static inline void *dma_alloc_attrs(struct device *dev, size_t size,
186 dma_addr_t *dma_handle, gfp_t flag,
187 struct dma_attrs *attrs)
188{
189 struct dma_map_ops *ops = get_dma_ops(dev);
190 void *cpu_addr;
191 BUG_ON(!ops);
192
193 cpu_addr = ops->alloc(dev, size, dma_handle, flag, attrs);
194 debug_dma_alloc_coherent(dev, size, *dma_handle, cpu_addr);
195 return cpu_addr;
196}
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212extern void arm_dma_free(struct device *dev, size_t size, void *cpu_addr,
213 dma_addr_t handle, struct dma_attrs *attrs);
214
215#define dma_free_coherent(d, s, c, h) dma_free_attrs(d, s, c, h, NULL)
216
217static inline void dma_free_attrs(struct device *dev, size_t size,
218 void *cpu_addr, dma_addr_t dma_handle,
219 struct dma_attrs *attrs)
220{
221 struct dma_map_ops *ops = get_dma_ops(dev);
222 BUG_ON(!ops);
223
224 debug_dma_free_coherent(dev, size, cpu_addr, dma_handle);
225 ops->free(dev, size, cpu_addr, dma_handle, attrs);
226}
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241extern int arm_dma_mmap(struct device *dev, struct vm_area_struct *vma,
242 void *cpu_addr, dma_addr_t dma_addr, size_t size,
243 struct dma_attrs *attrs);
244
245static inline void *dma_alloc_writecombine(struct device *dev, size_t size,
246 dma_addr_t *dma_handle, gfp_t flag)
247{
248 DEFINE_DMA_ATTRS(attrs);
249 dma_set_attr(DMA_ATTR_WRITE_COMBINE, &attrs);
250 return dma_alloc_attrs(dev, size, dma_handle, flag, &attrs);
251}
252
253static inline void dma_free_writecombine(struct device *dev, size_t size,
254 void *cpu_addr, dma_addr_t dma_handle)
255{
256 DEFINE_DMA_ATTRS(attrs);
257 dma_set_attr(DMA_ATTR_WRITE_COMBINE, &attrs);
258 return dma_free_attrs(dev, size, cpu_addr, dma_handle, &attrs);
259}
260
261
262
263
264
265
266extern void __init init_dma_coherent_pool_size(unsigned long size);
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292extern int dmabounce_register_dev(struct device *, unsigned long,
293 unsigned long, int (*)(struct device *, dma_addr_t, size_t));
294
295
296
297
298
299
300
301
302
303
304
305extern void dmabounce_unregister_dev(struct device *);
306
307
308
309
310
311
312extern int arm_dma_map_sg(struct device *, struct scatterlist *, int,
313 enum dma_data_direction, struct dma_attrs *attrs);
314extern void arm_dma_unmap_sg(struct device *, struct scatterlist *, int,
315 enum dma_data_direction, struct dma_attrs *attrs);
316extern void arm_dma_sync_sg_for_cpu(struct device *, struct scatterlist *, int,
317 enum dma_data_direction);
318extern void arm_dma_sync_sg_for_device(struct device *, struct scatterlist *, int,
319 enum dma_data_direction);
320extern int arm_dma_get_sgtable(struct device *dev, struct sg_table *sgt,
321 void *cpu_addr, dma_addr_t dma_addr, size_t size,
322 struct dma_attrs *attrs);
323
324#endif
325#endif
326