1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24#ifndef __SOUND_MEMALLOC_H
25#define __SOUND_MEMALLOC_H
26
27struct device;
28
29
30
31
32struct snd_dma_device {
33 int type;
34 struct device *dev;
35};
36
37#ifndef snd_dma_pci_data
38#define snd_dma_pci_data(pci) (&(pci)->dev)
39#define snd_dma_isa_data() NULL
40#define snd_dma_continuous_data(x) ((struct device *)(__force unsigned long)(x))
41#endif
42
43
44
45
46
47#define SNDRV_DMA_TYPE_UNKNOWN 0
48#define SNDRV_DMA_TYPE_CONTINUOUS 1
49#define SNDRV_DMA_TYPE_DEV 2
50#ifdef CONFIG_SND_DMA_SGBUF
51#define SNDRV_DMA_TYPE_DEV_SG 3
52#else
53#define SNDRV_DMA_TYPE_DEV_SG SNDRV_DMA_TYPE_DEV
54#endif
55#ifdef CONFIG_GENERIC_ALLOCATOR
56#define SNDRV_DMA_TYPE_DEV_IRAM 4
57#else
58#define SNDRV_DMA_TYPE_DEV_IRAM SNDRV_DMA_TYPE_DEV
59#endif
60
61
62
63
64struct snd_dma_buffer {
65 struct snd_dma_device dev;
66 unsigned char *area;
67 dma_addr_t addr;
68 size_t bytes;
69 void *private_data;
70};
71
72#ifdef CONFIG_SND_DMA_SGBUF
73
74
75
76void *snd_malloc_sgbuf_pages(struct device *device,
77 size_t size, struct snd_dma_buffer *dmab,
78 size_t *res_size);
79int snd_free_sgbuf_pages(struct snd_dma_buffer *dmab);
80
81struct snd_sg_page {
82 void *buf;
83 dma_addr_t addr;
84};
85
86struct snd_sg_buf {
87 int size;
88 int pages;
89 int tblsize;
90 struct snd_sg_page *table;
91 struct page **page_table;
92 struct device *dev;
93};
94
95
96
97
98static inline unsigned int snd_sgbuf_aligned_pages(size_t size)
99{
100 return (size + PAGE_SIZE - 1) >> PAGE_SHIFT;
101}
102
103
104
105
106static inline dma_addr_t snd_sgbuf_get_addr(struct snd_dma_buffer *dmab,
107 size_t offset)
108{
109 struct snd_sg_buf *sgbuf = dmab->private_data;
110 dma_addr_t addr = sgbuf->table[offset >> PAGE_SHIFT].addr;
111 addr &= ~((dma_addr_t)PAGE_SIZE - 1);
112 return addr + offset % PAGE_SIZE;
113}
114
115
116
117
118static inline void *snd_sgbuf_get_ptr(struct snd_dma_buffer *dmab,
119 size_t offset)
120{
121 struct snd_sg_buf *sgbuf = dmab->private_data;
122 return sgbuf->table[offset >> PAGE_SHIFT].buf + offset % PAGE_SIZE;
123}
124
125unsigned int snd_sgbuf_get_chunk_size(struct snd_dma_buffer *dmab,
126 unsigned int ofs, unsigned int size);
127#else
128
129static inline dma_addr_t snd_sgbuf_get_addr(struct snd_dma_buffer *dmab,
130 size_t offset)
131{
132 return dmab->addr + offset;
133}
134
135static inline void *snd_sgbuf_get_ptr(struct snd_dma_buffer *dmab,
136 size_t offset)
137{
138 return dmab->area + offset;
139}
140
141#define snd_sgbuf_get_chunk_size(dmab, ofs, size) (size)
142
143#endif
144
145
146int snd_dma_alloc_pages(int type, struct device *dev, size_t size,
147 struct snd_dma_buffer *dmab);
148int snd_dma_alloc_pages_fallback(int type, struct device *dev, size_t size,
149 struct snd_dma_buffer *dmab);
150void snd_dma_free_pages(struct snd_dma_buffer *dmab);
151
152
153void *snd_malloc_pages(size_t size, gfp_t gfp_flags);
154void snd_free_pages(void *ptr, size_t size);
155
156#endif
157
158