1
2
3
4
5
6
7
8
9
10
11
12#include <linux/module.h>
13#include <linux/init.h>
14#include <linux/dmaengine.h>
15#include <linux/slab.h>
16#include <sound/pcm.h>
17#include <sound/pcm_params.h>
18#include <sound/soc.h>
19
20#include <sound/dmaengine_pcm.h>
21
22struct dmaengine_pcm_runtime_data {
23 struct dma_chan *dma_chan;
24 dma_cookie_t cookie;
25
26 unsigned int pos;
27};
28
29static inline struct dmaengine_pcm_runtime_data *substream_to_prtd(
30 const struct snd_pcm_substream *substream)
31{
32 return substream->runtime->private_data;
33}
34
35struct dma_chan *snd_dmaengine_pcm_get_chan(struct snd_pcm_substream *substream)
36{
37 struct dmaengine_pcm_runtime_data *prtd = substream_to_prtd(substream);
38
39 return prtd->dma_chan;
40}
41EXPORT_SYMBOL_GPL(snd_dmaengine_pcm_get_chan);
42
43
44
45
46
47
48
49
50
51
52int snd_hwparams_to_dma_slave_config(const struct snd_pcm_substream *substream,
53 const struct snd_pcm_hw_params *params,
54 struct dma_slave_config *slave_config)
55{
56 enum dma_slave_buswidth buswidth;
57 int bits;
58
59 bits = params_physical_width(params);
60 if (bits < 8 || bits > 64)
61 return -EINVAL;
62 else if (bits == 8)
63 buswidth = DMA_SLAVE_BUSWIDTH_1_BYTE;
64 else if (bits == 16)
65 buswidth = DMA_SLAVE_BUSWIDTH_2_BYTES;
66 else if (bits == 24)
67 buswidth = DMA_SLAVE_BUSWIDTH_3_BYTES;
68 else if (bits <= 32)
69 buswidth = DMA_SLAVE_BUSWIDTH_4_BYTES;
70 else
71 buswidth = DMA_SLAVE_BUSWIDTH_8_BYTES;
72
73 if (substream->stream == SNDRV_PCM_STREAM_PLAYBACK) {
74 slave_config->direction = DMA_MEM_TO_DEV;
75 slave_config->dst_addr_width = buswidth;
76 } else {
77 slave_config->direction = DMA_DEV_TO_MEM;
78 slave_config->src_addr_width = buswidth;
79 }
80
81 slave_config->device_fc = false;
82
83 return 0;
84}
85EXPORT_SYMBOL_GPL(snd_hwparams_to_dma_slave_config);
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104void snd_dmaengine_pcm_set_config_from_dai_data(
105 const struct snd_pcm_substream *substream,
106 const struct snd_dmaengine_dai_dma_data *dma_data,
107 struct dma_slave_config *slave_config)
108{
109 if (substream->stream == SNDRV_PCM_STREAM_PLAYBACK) {
110 slave_config->dst_addr = dma_data->addr;
111 slave_config->dst_maxburst = dma_data->maxburst;
112 if (dma_data->flags & SND_DMAENGINE_PCM_DAI_FLAG_PACK)
113 slave_config->dst_addr_width =
114 DMA_SLAVE_BUSWIDTH_UNDEFINED;
115 if (dma_data->addr_width != DMA_SLAVE_BUSWIDTH_UNDEFINED)
116 slave_config->dst_addr_width = dma_data->addr_width;
117 } else {
118 slave_config->src_addr = dma_data->addr;
119 slave_config->src_maxburst = dma_data->maxburst;
120 if (dma_data->flags & SND_DMAENGINE_PCM_DAI_FLAG_PACK)
121 slave_config->src_addr_width =
122 DMA_SLAVE_BUSWIDTH_UNDEFINED;
123 if (dma_data->addr_width != DMA_SLAVE_BUSWIDTH_UNDEFINED)
124 slave_config->src_addr_width = dma_data->addr_width;
125 }
126
127 slave_config->slave_id = dma_data->slave_id;
128 slave_config->peripheral_config = dma_data->peripheral_config;
129 slave_config->peripheral_size = dma_data->peripheral_size;
130}
131EXPORT_SYMBOL_GPL(snd_dmaengine_pcm_set_config_from_dai_data);
132
133static void dmaengine_pcm_dma_complete(void *arg)
134{
135 struct snd_pcm_substream *substream = arg;
136 struct dmaengine_pcm_runtime_data *prtd = substream_to_prtd(substream);
137
138 prtd->pos += snd_pcm_lib_period_bytes(substream);
139 if (prtd->pos >= snd_pcm_lib_buffer_bytes(substream))
140 prtd->pos = 0;
141
142 snd_pcm_period_elapsed(substream);
143}
144
145static int dmaengine_pcm_prepare_and_submit(struct snd_pcm_substream *substream)
146{
147 struct dmaengine_pcm_runtime_data *prtd = substream_to_prtd(substream);
148 struct dma_chan *chan = prtd->dma_chan;
149 struct dma_async_tx_descriptor *desc;
150 enum dma_transfer_direction direction;
151 unsigned long flags = DMA_CTRL_ACK;
152
153 direction = snd_pcm_substream_to_dma_direction(substream);
154
155 if (!substream->runtime->no_period_wakeup)
156 flags |= DMA_PREP_INTERRUPT;
157
158 prtd->pos = 0;
159 desc = dmaengine_prep_dma_cyclic(chan,
160 substream->runtime->dma_addr,
161 snd_pcm_lib_buffer_bytes(substream),
162 snd_pcm_lib_period_bytes(substream), direction, flags);
163
164 if (!desc)
165 return -ENOMEM;
166
167 desc->callback = dmaengine_pcm_dma_complete;
168 desc->callback_param = substream;
169 prtd->cookie = dmaengine_submit(desc);
170
171 return 0;
172}
173
174
175
176
177
178
179
180
181
182
183
184int snd_dmaengine_pcm_trigger(struct snd_pcm_substream *substream, int cmd)
185{
186 struct dmaengine_pcm_runtime_data *prtd = substream_to_prtd(substream);
187 struct snd_pcm_runtime *runtime = substream->runtime;
188 int ret;
189
190 switch (cmd) {
191 case SNDRV_PCM_TRIGGER_START:
192 ret = dmaengine_pcm_prepare_and_submit(substream);
193 if (ret)
194 return ret;
195 dma_async_issue_pending(prtd->dma_chan);
196 break;
197 case SNDRV_PCM_TRIGGER_RESUME:
198 case SNDRV_PCM_TRIGGER_PAUSE_RELEASE:
199 dmaengine_resume(prtd->dma_chan);
200 break;
201 case SNDRV_PCM_TRIGGER_SUSPEND:
202 if (runtime->info & SNDRV_PCM_INFO_PAUSE)
203 dmaengine_pause(prtd->dma_chan);
204 else
205 dmaengine_terminate_async(prtd->dma_chan);
206 break;
207 case SNDRV_PCM_TRIGGER_PAUSE_PUSH:
208 dmaengine_pause(prtd->dma_chan);
209 break;
210 case SNDRV_PCM_TRIGGER_STOP:
211 dmaengine_terminate_async(prtd->dma_chan);
212 break;
213 default:
214 return -EINVAL;
215 }
216
217 return 0;
218}
219EXPORT_SYMBOL_GPL(snd_dmaengine_pcm_trigger);
220
221
222
223
224
225
226
227
228snd_pcm_uframes_t snd_dmaengine_pcm_pointer_no_residue(struct snd_pcm_substream *substream)
229{
230 struct dmaengine_pcm_runtime_data *prtd = substream_to_prtd(substream);
231 return bytes_to_frames(substream->runtime, prtd->pos);
232}
233EXPORT_SYMBOL_GPL(snd_dmaengine_pcm_pointer_no_residue);
234
235
236
237
238
239
240
241
242snd_pcm_uframes_t snd_dmaengine_pcm_pointer(struct snd_pcm_substream *substream)
243{
244 struct dmaengine_pcm_runtime_data *prtd = substream_to_prtd(substream);
245 struct snd_pcm_runtime *runtime = substream->runtime;
246 struct dma_tx_state state;
247 enum dma_status status;
248 unsigned int buf_size;
249 unsigned int pos = 0;
250
251 status = dmaengine_tx_status(prtd->dma_chan, prtd->cookie, &state);
252 if (status == DMA_IN_PROGRESS || status == DMA_PAUSED) {
253 buf_size = snd_pcm_lib_buffer_bytes(substream);
254 if (state.residue > 0 && state.residue <= buf_size)
255 pos = buf_size - state.residue;
256
257 runtime->delay = bytes_to_frames(runtime,
258 state.in_flight_bytes);
259 }
260
261 return bytes_to_frames(runtime, pos);
262}
263EXPORT_SYMBOL_GPL(snd_dmaengine_pcm_pointer);
264
265
266
267
268
269
270
271
272
273
274struct dma_chan *snd_dmaengine_pcm_request_channel(dma_filter_fn filter_fn,
275 void *filter_data)
276{
277 dma_cap_mask_t mask;
278
279 dma_cap_zero(mask);
280 dma_cap_set(DMA_SLAVE, mask);
281 dma_cap_set(DMA_CYCLIC, mask);
282
283 return dma_request_channel(mask, filter_fn, filter_data);
284}
285EXPORT_SYMBOL_GPL(snd_dmaengine_pcm_request_channel);
286
287
288
289
290
291
292
293
294
295
296
297
298int snd_dmaengine_pcm_open(struct snd_pcm_substream *substream,
299 struct dma_chan *chan)
300{
301 struct dmaengine_pcm_runtime_data *prtd;
302 int ret;
303
304 if (!chan)
305 return -ENXIO;
306
307 ret = snd_pcm_hw_constraint_integer(substream->runtime,
308 SNDRV_PCM_HW_PARAM_PERIODS);
309 if (ret < 0)
310 return ret;
311
312 prtd = kzalloc(sizeof(*prtd), GFP_KERNEL);
313 if (!prtd)
314 return -ENOMEM;
315
316 prtd->dma_chan = chan;
317
318 substream->runtime->private_data = prtd;
319
320 return 0;
321}
322EXPORT_SYMBOL_GPL(snd_dmaengine_pcm_open);
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337int snd_dmaengine_pcm_open_request_chan(struct snd_pcm_substream *substream,
338 dma_filter_fn filter_fn, void *filter_data)
339{
340 return snd_dmaengine_pcm_open(substream,
341 snd_dmaengine_pcm_request_channel(filter_fn, filter_data));
342}
343EXPORT_SYMBOL_GPL(snd_dmaengine_pcm_open_request_chan);
344
345
346
347
348
349int snd_dmaengine_pcm_close(struct snd_pcm_substream *substream)
350{
351 struct dmaengine_pcm_runtime_data *prtd = substream_to_prtd(substream);
352
353 dmaengine_synchronize(prtd->dma_chan);
354 kfree(prtd);
355
356 return 0;
357}
358EXPORT_SYMBOL_GPL(snd_dmaengine_pcm_close);
359
360
361
362
363
364
365
366
367int snd_dmaengine_pcm_close_release_chan(struct snd_pcm_substream *substream)
368{
369 struct dmaengine_pcm_runtime_data *prtd = substream_to_prtd(substream);
370
371 dmaengine_synchronize(prtd->dma_chan);
372 dma_release_channel(prtd->dma_chan);
373 kfree(prtd);
374
375 return 0;
376}
377EXPORT_SYMBOL_GPL(snd_dmaengine_pcm_close_release_chan);
378
379
380
381
382
383
384
385
386
387
388
389
390
391int snd_dmaengine_pcm_refine_runtime_hwparams(
392 struct snd_pcm_substream *substream,
393 struct snd_dmaengine_dai_dma_data *dma_data,
394 struct snd_pcm_hardware *hw,
395 struct dma_chan *chan)
396{
397 struct dma_slave_caps dma_caps;
398 u32 addr_widths = BIT(DMA_SLAVE_BUSWIDTH_1_BYTE) |
399 BIT(DMA_SLAVE_BUSWIDTH_2_BYTES) |
400 BIT(DMA_SLAVE_BUSWIDTH_4_BYTES);
401 snd_pcm_format_t i;
402 int ret = 0;
403
404 if (!hw || !chan || !dma_data)
405 return -EINVAL;
406
407 ret = dma_get_slave_caps(chan, &dma_caps);
408 if (ret == 0) {
409 if (dma_caps.cmd_pause && dma_caps.cmd_resume)
410 hw->info |= SNDRV_PCM_INFO_PAUSE | SNDRV_PCM_INFO_RESUME;
411 if (dma_caps.residue_granularity <= DMA_RESIDUE_GRANULARITY_SEGMENT)
412 hw->info |= SNDRV_PCM_INFO_BATCH;
413
414 if (substream->stream == SNDRV_PCM_STREAM_PLAYBACK)
415 addr_widths = dma_caps.dst_addr_widths;
416 else
417 addr_widths = dma_caps.src_addr_widths;
418 }
419
420
421
422
423
424
425
426 if (!(dma_data->flags & SND_DMAENGINE_PCM_DAI_FLAG_PACK))
427
428
429
430
431
432
433
434
435
436 pcm_for_each_format(i) {
437 int bits = snd_pcm_format_physical_width(i);
438
439
440
441
442
443 switch (bits) {
444 case 8:
445 case 16:
446 case 24:
447 case 32:
448 case 64:
449 if (addr_widths & (1 << (bits / 8)))
450 hw->formats |= pcm_format_to_bits(i);
451 break;
452 default:
453
454 break;
455 }
456 }
457
458 return ret;
459}
460EXPORT_SYMBOL_GPL(snd_dmaengine_pcm_refine_runtime_hwparams);
461
462MODULE_LICENSE("GPL");
463