1#ifndef ASMARM_DMA_MAPPING_H
2#define ASMARM_DMA_MAPPING_H
3
4#ifdef __KERNEL__
5
6#include <linux/mm_types.h>
7#include <linux/scatterlist.h>
8#include <linux/dma-attrs.h>
9#include <linux/dma-debug.h>
10
11#include <asm-generic/dma-coherent.h>
12#include <asm/memory.h>
13
14#define DMA_ERROR_CODE (~0)
15extern struct dma_map_ops arm_dma_ops;
16extern struct dma_map_ops arm_coherent_dma_ops;
17
18static inline struct dma_map_ops *get_dma_ops(struct device *dev)
19{
20 if (dev && dev->archdata.dma_ops)
21 return dev->archdata.dma_ops;
22 return &arm_dma_ops;
23}
24
25static inline void set_dma_ops(struct device *dev, struct dma_map_ops *ops)
26{
27 BUG_ON(!dev);
28 dev->archdata.dma_ops = ops;
29}
30
31#include <asm-generic/dma-mapping-common.h>
32
33static inline int dma_set_mask(struct device *dev, u64 mask)
34{
35 return get_dma_ops(dev)->set_dma_mask(dev, mask);
36}
37
38#ifdef __arch_page_to_dma
39#error Please update to __arch_pfn_to_dma
40#endif
41
42
43
44
45
46
47#ifndef __arch_pfn_to_dma
48static inline dma_addr_t pfn_to_dma(struct device *dev, unsigned long pfn)
49{
50 return (dma_addr_t)__pfn_to_bus(pfn);
51}
52
53static inline unsigned long dma_to_pfn(struct device *dev, dma_addr_t addr)
54{
55 return __bus_to_pfn(addr);
56}
57
58static inline void *dma_to_virt(struct device *dev, dma_addr_t addr)
59{
60 return (void *)__bus_to_virt((unsigned long)addr);
61}
62
63static inline dma_addr_t virt_to_dma(struct device *dev, void *addr)
64{
65 return (dma_addr_t)__virt_to_bus((unsigned long)(addr));
66}
67#else
68static inline dma_addr_t pfn_to_dma(struct device *dev, unsigned long pfn)
69{
70 return __arch_pfn_to_dma(dev, pfn);
71}
72
73static inline unsigned long dma_to_pfn(struct device *dev, dma_addr_t addr)
74{
75 return __arch_dma_to_pfn(dev, addr);
76}
77
78static inline void *dma_to_virt(struct device *dev, dma_addr_t addr)
79{
80 return __arch_dma_to_virt(dev, addr);
81}
82
83static inline dma_addr_t virt_to_dma(struct device *dev, void *addr)
84{
85 return __arch_virt_to_dma(dev, addr);
86}
87#endif
88
89
90
91
92static inline int dma_mapping_error(struct device *dev, dma_addr_t dma_addr)
93{
94 debug_dma_mapping_error(dev, dma_addr);
95 return dma_addr == DMA_ERROR_CODE;
96}
97
98
99
100
101
102static inline void *dma_alloc_noncoherent(struct device *dev, size_t size,
103 dma_addr_t *handle, gfp_t gfp)
104{
105 return NULL;
106}
107
108static inline void dma_free_noncoherent(struct device *dev, size_t size,
109 void *cpu_addr, dma_addr_t handle)
110{
111}
112
113extern int dma_supported(struct device *dev, u64 mask);
114
115extern int arm_dma_set_mask(struct device *dev, u64 dma_mask);
116
117
118
119
120
121
122
123
124
125
126
127
128extern void *arm_dma_alloc(struct device *dev, size_t size, dma_addr_t *handle,
129 gfp_t gfp, struct dma_attrs *attrs);
130
131#define dma_alloc_coherent(d, s, h, f) dma_alloc_attrs(d, s, h, f, NULL)
132
133static inline void *dma_alloc_attrs(struct device *dev, size_t size,
134 dma_addr_t *dma_handle, gfp_t flag,
135 struct dma_attrs *attrs)
136{
137 struct dma_map_ops *ops = get_dma_ops(dev);
138 void *cpu_addr;
139 BUG_ON(!ops);
140
141 cpu_addr = ops->alloc(dev, size, dma_handle, flag, attrs);
142 debug_dma_alloc_coherent(dev, size, *dma_handle, cpu_addr);
143 return cpu_addr;
144}
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160extern void arm_dma_free(struct device *dev, size_t size, void *cpu_addr,
161 dma_addr_t handle, struct dma_attrs *attrs);
162
163#define dma_free_coherent(d, s, c, h) dma_free_attrs(d, s, c, h, NULL)
164
165static inline void dma_free_attrs(struct device *dev, size_t size,
166 void *cpu_addr, dma_addr_t dma_handle,
167 struct dma_attrs *attrs)
168{
169 struct dma_map_ops *ops = get_dma_ops(dev);
170 BUG_ON(!ops);
171
172 debug_dma_free_coherent(dev, size, cpu_addr, dma_handle);
173 ops->free(dev, size, cpu_addr, dma_handle, attrs);
174}
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189extern int arm_dma_mmap(struct device *dev, struct vm_area_struct *vma,
190 void *cpu_addr, dma_addr_t dma_addr, size_t size,
191 struct dma_attrs *attrs);
192
193static inline void *dma_alloc_writecombine(struct device *dev, size_t size,
194 dma_addr_t *dma_handle, gfp_t flag)
195{
196 DEFINE_DMA_ATTRS(attrs);
197 dma_set_attr(DMA_ATTR_WRITE_COMBINE, &attrs);
198 return dma_alloc_attrs(dev, size, dma_handle, flag, &attrs);
199}
200
201static inline void dma_free_writecombine(struct device *dev, size_t size,
202 void *cpu_addr, dma_addr_t dma_handle)
203{
204 DEFINE_DMA_ATTRS(attrs);
205 dma_set_attr(DMA_ATTR_WRITE_COMBINE, &attrs);
206 return dma_free_attrs(dev, size, cpu_addr, dma_handle, &attrs);
207}
208
209
210
211
212
213
214extern void __init init_dma_coherent_pool_size(unsigned long size);
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240extern int dmabounce_register_dev(struct device *, unsigned long,
241 unsigned long, int (*)(struct device *, dma_addr_t, size_t));
242
243
244
245
246
247
248
249
250
251
252
253extern void dmabounce_unregister_dev(struct device *);
254
255
256
257
258
259
260extern int arm_dma_map_sg(struct device *, struct scatterlist *, int,
261 enum dma_data_direction, struct dma_attrs *attrs);
262extern void arm_dma_unmap_sg(struct device *, struct scatterlist *, int,
263 enum dma_data_direction, struct dma_attrs *attrs);
264extern void arm_dma_sync_sg_for_cpu(struct device *, struct scatterlist *, int,
265 enum dma_data_direction);
266extern void arm_dma_sync_sg_for_device(struct device *, struct scatterlist *, int,
267 enum dma_data_direction);
268extern int arm_dma_get_sgtable(struct device *dev, struct sg_table *sgt,
269 void *cpu_addr, dma_addr_t dma_addr, size_t size,
270 struct dma_attrs *attrs);
271
272#endif
273#endif
274