1
2
3
4#include <linux/dma-mapping.h>
5#include <linux/dmaengine.h>
6#include <linux/dma/sprd-dma.h>
7#include <linux/kernel.h>
8#include <linux/module.h>
9#include <linux/of_reserved_mem.h>
10#include <linux/platform_device.h>
11#include <sound/pcm.h>
12#include <sound/pcm_params.h>
13#include <sound/soc.h>
14
15#include "sprd-pcm-dma.h"
16
17#define SPRD_PCM_DMA_LINKLIST_SIZE 64
18#define SPRD_PCM_DMA_BRUST_LEN 640
19
20struct sprd_pcm_dma_data {
21 struct dma_chan *chan;
22 struct dma_async_tx_descriptor *desc;
23 dma_cookie_t cookie;
24 dma_addr_t phys;
25 void *virt;
26 int pre_pointer;
27};
28
29struct sprd_pcm_dma_private {
30 struct snd_pcm_substream *substream;
31 struct sprd_pcm_dma_params *params;
32 struct sprd_pcm_dma_data data[SPRD_PCM_CHANNEL_MAX];
33 int hw_chan;
34 int dma_addr_offset;
35};
36
37static const struct snd_pcm_hardware sprd_pcm_hardware = {
38 .info = SNDRV_PCM_INFO_MMAP | SNDRV_PCM_INFO_MMAP_VALID |
39 SNDRV_PCM_INFO_INTERLEAVED | SNDRV_PCM_INFO_PAUSE |
40 SNDRV_PCM_INFO_RESUME | SNDRV_PCM_INFO_NO_PERIOD_WAKEUP,
41 .formats = SNDRV_PCM_FMTBIT_S16_LE | SNDRV_PCM_FMTBIT_S24_LE,
42 .period_bytes_min = 1,
43 .period_bytes_max = 64 * 1024,
44 .periods_min = 1,
45 .periods_max = PAGE_SIZE / SPRD_PCM_DMA_LINKLIST_SIZE,
46 .buffer_bytes_max = 64 * 1024,
47};
48
49static int sprd_pcm_open(struct snd_pcm_substream *substream)
50{
51 struct snd_pcm_runtime *runtime = substream->runtime;
52 struct snd_soc_pcm_runtime *rtd = substream->private_data;
53 struct snd_soc_component *component =
54 snd_soc_rtdcom_lookup(rtd, DRV_NAME);
55 struct device *dev = component->dev;
56 struct sprd_pcm_dma_private *dma_private;
57 int hw_chan = SPRD_PCM_CHANNEL_MAX;
58 int size, ret, i;
59
60 snd_soc_set_runtime_hwparams(substream, &sprd_pcm_hardware);
61
62 ret = snd_pcm_hw_constraint_step(runtime, 0,
63 SNDRV_PCM_HW_PARAM_PERIOD_BYTES,
64 SPRD_PCM_DMA_BRUST_LEN);
65 if (ret < 0)
66 return ret;
67
68 ret = snd_pcm_hw_constraint_step(runtime, 0,
69 SNDRV_PCM_HW_PARAM_BUFFER_BYTES,
70 SPRD_PCM_DMA_BRUST_LEN);
71 if (ret < 0)
72 return ret;
73
74 ret = snd_pcm_hw_constraint_integer(runtime,
75 SNDRV_PCM_HW_PARAM_PERIODS);
76 if (ret < 0)
77 return ret;
78
79 dma_private = devm_kzalloc(dev, sizeof(*dma_private), GFP_KERNEL);
80 if (!dma_private)
81 return -ENOMEM;
82
83 size = runtime->hw.periods_max * SPRD_PCM_DMA_LINKLIST_SIZE;
84
85 for (i = 0; i < hw_chan; i++) {
86 struct sprd_pcm_dma_data *data = &dma_private->data[i];
87
88 data->virt = dmam_alloc_coherent(dev, size, &data->phys,
89 GFP_KERNEL);
90 if (!data->virt) {
91 ret = -ENOMEM;
92 goto error;
93 }
94 }
95
96 dma_private->hw_chan = hw_chan;
97 runtime->private_data = dma_private;
98 dma_private->substream = substream;
99
100 return 0;
101
102error:
103 for (i = 0; i < hw_chan; i++) {
104 struct sprd_pcm_dma_data *data = &dma_private->data[i];
105
106 if (data->virt)
107 dmam_free_coherent(dev, size, data->virt, data->phys);
108 }
109
110 devm_kfree(dev, dma_private);
111 return ret;
112}
113
114static int sprd_pcm_close(struct snd_pcm_substream *substream)
115{
116 struct snd_pcm_runtime *runtime = substream->runtime;
117 struct snd_soc_pcm_runtime *rtd = substream->private_data;
118 struct sprd_pcm_dma_private *dma_private = runtime->private_data;
119 struct snd_soc_component *component =
120 snd_soc_rtdcom_lookup(rtd, DRV_NAME);
121 struct device *dev = component->dev;
122 int size = runtime->hw.periods_max * SPRD_PCM_DMA_LINKLIST_SIZE;
123 int i;
124
125 for (i = 0; i < dma_private->hw_chan; i++) {
126 struct sprd_pcm_dma_data *data = &dma_private->data[i];
127
128 dmam_free_coherent(dev, size, data->virt, data->phys);
129 }
130
131 devm_kfree(dev, dma_private);
132
133 return 0;
134}
135
136static void sprd_pcm_dma_complete(void *data)
137{
138 struct sprd_pcm_dma_private *dma_private = data;
139 struct snd_pcm_substream *substream = dma_private->substream;
140
141 snd_pcm_period_elapsed(substream);
142}
143
144static void sprd_pcm_release_dma_channel(struct snd_pcm_substream *substream)
145{
146 struct snd_pcm_runtime *runtime = substream->runtime;
147 struct sprd_pcm_dma_private *dma_private = runtime->private_data;
148 int i;
149
150 for (i = 0; i < SPRD_PCM_CHANNEL_MAX; i++) {
151 struct sprd_pcm_dma_data *data = &dma_private->data[i];
152
153 if (data->chan) {
154 dma_release_channel(data->chan);
155 data->chan = NULL;
156 }
157 }
158}
159
160static int sprd_pcm_request_dma_channel(struct snd_pcm_substream *substream,
161 int channels)
162{
163 struct snd_pcm_runtime *runtime = substream->runtime;
164 struct sprd_pcm_dma_private *dma_private = runtime->private_data;
165 struct snd_soc_pcm_runtime *rtd = substream->private_data;
166 struct snd_soc_component *component =
167 snd_soc_rtdcom_lookup(rtd, DRV_NAME);
168 struct device *dev = component->dev;
169 struct sprd_pcm_dma_params *dma_params = dma_private->params;
170 int i;
171
172 if (channels > SPRD_PCM_CHANNEL_MAX) {
173 dev_err(dev, "invalid dma channel number:%d\n", channels);
174 return -EINVAL;
175 }
176
177 for (i = 0; i < channels; i++) {
178 struct sprd_pcm_dma_data *data = &dma_private->data[i];
179
180 data->chan = dma_request_slave_channel(dev,
181 dma_params->chan_name[i]);
182 if (!data->chan) {
183 dev_err(dev, "failed to request dma channel:%s\n",
184 dma_params->chan_name[i]);
185 sprd_pcm_release_dma_channel(substream);
186 return -ENODEV;
187 }
188 }
189
190 return 0;
191}
192
193static int sprd_pcm_hw_params(struct snd_pcm_substream *substream,
194 struct snd_pcm_hw_params *params)
195{
196 struct snd_pcm_runtime *runtime = substream->runtime;
197 struct sprd_pcm_dma_private *dma_private = runtime->private_data;
198 struct snd_soc_pcm_runtime *rtd = substream->private_data;
199 struct snd_soc_component *component =
200 snd_soc_rtdcom_lookup(rtd, DRV_NAME);
201 struct sprd_pcm_dma_params *dma_params;
202 size_t totsize = params_buffer_bytes(params);
203 size_t period = params_period_bytes(params);
204 int channels = params_channels(params);
205 int is_playback = substream->stream == SNDRV_PCM_STREAM_PLAYBACK;
206 struct scatterlist *sg;
207 unsigned long flags;
208 int ret, i, j, sg_num;
209
210 dma_params = snd_soc_dai_get_dma_data(rtd->cpu_dai, substream);
211 if (!dma_params) {
212 dev_warn(component->dev, "no dma parameters setting\n");
213 dma_private->params = NULL;
214 snd_pcm_set_runtime_buffer(substream, &substream->dma_buffer);
215 runtime->dma_bytes = totsize;
216 return 0;
217 }
218
219 if (!dma_private->params) {
220 dma_private->params = dma_params;
221 ret = sprd_pcm_request_dma_channel(substream, channels);
222 if (ret)
223 return ret;
224 }
225
226 snd_pcm_set_runtime_buffer(substream, &substream->dma_buffer);
227
228 runtime->dma_bytes = totsize;
229 sg_num = totsize / period;
230 dma_private->dma_addr_offset = totsize / channels;
231
232 sg = devm_kcalloc(component->dev, sg_num, sizeof(*sg), GFP_KERNEL);
233 if (!sg) {
234 ret = -ENOMEM;
235 goto sg_err;
236 }
237
238 for (i = 0; i < channels; i++) {
239 struct sprd_pcm_dma_data *data = &dma_private->data[i];
240 struct dma_chan *chan = data->chan;
241 struct dma_slave_config config = { };
242 struct sprd_dma_linklist link = { };
243 enum dma_transfer_direction dir;
244 struct scatterlist *sgt = sg;
245
246 config.src_maxburst = dma_params->fragment_len[i];
247 config.src_addr_width = dma_params->datawidth[i];
248 config.dst_addr_width = dma_params->datawidth[i];
249 if (is_playback) {
250 config.src_addr = runtime->dma_addr +
251 i * dma_private->dma_addr_offset;
252 config.dst_addr = dma_params->dev_phys[i];
253 dir = DMA_MEM_TO_DEV;
254 } else {
255 config.src_addr = dma_params->dev_phys[i];
256 config.dst_addr = runtime->dma_addr +
257 i * dma_private->dma_addr_offset;
258 dir = DMA_DEV_TO_MEM;
259 }
260
261 sg_init_table(sgt, sg_num);
262 for (j = 0; j < sg_num; j++, sgt++) {
263 u32 sg_len = period / channels;
264
265 sg_dma_len(sgt) = sg_len;
266 sg_dma_address(sgt) = runtime->dma_addr +
267 i * dma_private->dma_addr_offset + sg_len * j;
268 }
269
270
271
272
273
274 link.virt_addr = (unsigned long)data->virt;
275 link.phy_addr = data->phys;
276
277 ret = dmaengine_slave_config(chan, &config);
278 if (ret) {
279 dev_err(component->dev,
280 "failed to set slave configuration: %d\n", ret);
281 goto config_err;
282 }
283
284
285
286
287
288 flags = SPRD_DMA_FLAGS(SPRD_DMA_CHN_MODE_NONE, SPRD_DMA_NO_TRG,
289 SPRD_DMA_FRAG_REQ, SPRD_DMA_TRANS_INT);
290 data->desc = chan->device->device_prep_slave_sg(chan, sg,
291 sg_num, dir,
292 flags, &link);
293 if (!data->desc) {
294 dev_err(component->dev, "failed to prepare slave sg\n");
295 ret = -ENOMEM;
296 goto config_err;
297 }
298
299 if (!runtime->no_period_wakeup) {
300 data->desc->callback = sprd_pcm_dma_complete;
301 data->desc->callback_param = dma_private;
302 }
303 }
304
305 devm_kfree(component->dev, sg);
306
307 return 0;
308
309config_err:
310 devm_kfree(component->dev, sg);
311sg_err:
312 sprd_pcm_release_dma_channel(substream);
313 return ret;
314}
315
316static int sprd_pcm_hw_free(struct snd_pcm_substream *substream)
317{
318 snd_pcm_set_runtime_buffer(substream, NULL);
319 sprd_pcm_release_dma_channel(substream);
320
321 return 0;
322}
323
324static int sprd_pcm_trigger(struct snd_pcm_substream *substream, int cmd)
325{
326 struct sprd_pcm_dma_private *dma_private =
327 substream->runtime->private_data;
328 struct snd_soc_pcm_runtime *rtd = substream->private_data;
329 struct snd_soc_component *component =
330 snd_soc_rtdcom_lookup(rtd, DRV_NAME);
331 int ret = 0, i;
332
333 switch (cmd) {
334 case SNDRV_PCM_TRIGGER_START:
335 for (i = 0; i < dma_private->hw_chan; i++) {
336 struct sprd_pcm_dma_data *data = &dma_private->data[i];
337
338 if (!data->desc)
339 continue;
340
341 data->cookie = dmaengine_submit(data->desc);
342 ret = dma_submit_error(data->cookie);
343 if (ret) {
344 dev_err(component->dev,
345 "failed to submit dma request: %d\n",
346 ret);
347 return ret;
348 }
349
350 dma_async_issue_pending(data->chan);
351 }
352
353 break;
354 case SNDRV_PCM_TRIGGER_RESUME:
355 case SNDRV_PCM_TRIGGER_PAUSE_RELEASE:
356 for (i = 0; i < dma_private->hw_chan; i++) {
357 struct sprd_pcm_dma_data *data = &dma_private->data[i];
358
359 if (data->chan)
360 dmaengine_resume(data->chan);
361 }
362
363 break;
364 case SNDRV_PCM_TRIGGER_STOP:
365 for (i = 0; i < dma_private->hw_chan; i++) {
366 struct sprd_pcm_dma_data *data = &dma_private->data[i];
367
368 if (data->chan)
369 dmaengine_terminate_async(data->chan);
370 }
371
372 break;
373 case SNDRV_PCM_TRIGGER_SUSPEND:
374 case SNDRV_PCM_TRIGGER_PAUSE_PUSH:
375 for (i = 0; i < dma_private->hw_chan; i++) {
376 struct sprd_pcm_dma_data *data = &dma_private->data[i];
377
378 if (data->chan)
379 dmaengine_pause(data->chan);
380 }
381
382 break;
383 default:
384 ret = -EINVAL;
385 }
386
387 return ret;
388}
389
390static snd_pcm_uframes_t sprd_pcm_pointer(struct snd_pcm_substream *substream)
391{
392 struct snd_pcm_runtime *runtime = substream->runtime;
393 struct snd_soc_pcm_runtime *rtd = substream->private_data;
394 struct sprd_pcm_dma_private *dma_private = runtime->private_data;
395 struct snd_soc_component *component =
396 snd_soc_rtdcom_lookup(rtd, DRV_NAME);
397 int pointer[SPRD_PCM_CHANNEL_MAX];
398 int bytes_of_pointer = 0, sel_max = 0, i;
399 snd_pcm_uframes_t x;
400 struct dma_tx_state state;
401 enum dma_status status;
402
403 for (i = 0; i < dma_private->hw_chan; i++) {
404 struct sprd_pcm_dma_data *data = &dma_private->data[i];
405
406 if (!data->chan)
407 continue;
408
409 status = dmaengine_tx_status(data->chan, data->cookie, &state);
410 if (status == DMA_ERROR) {
411 dev_err(component->dev,
412 "failed to get dma channel %d status\n", i);
413 return 0;
414 }
415
416
417
418
419
420 pointer[i] = state.residue - runtime->dma_addr -
421 i * dma_private->dma_addr_offset;
422
423 if (i == 0) {
424 bytes_of_pointer = pointer[i];
425 sel_max = pointer[i] < data->pre_pointer ? 1 : 0;
426 } else {
427 sel_max ^= pointer[i] < data->pre_pointer ? 1 : 0;
428
429 if (sel_max)
430 bytes_of_pointer =
431 max(pointer[i], pointer[i - 1]) << 1;
432 else
433 bytes_of_pointer =
434 min(pointer[i], pointer[i - 1]) << 1;
435 }
436
437 data->pre_pointer = pointer[i];
438 }
439
440 x = bytes_to_frames(runtime, bytes_of_pointer);
441 if (x == runtime->buffer_size)
442 x = 0;
443
444 return x;
445}
446
447static int sprd_pcm_mmap(struct snd_pcm_substream *substream,
448 struct vm_area_struct *vma)
449{
450 struct snd_pcm_runtime *runtime = substream->runtime;
451
452 vma->vm_page_prot = pgprot_writecombine(vma->vm_page_prot);
453 return remap_pfn_range(vma, vma->vm_start,
454 runtime->dma_addr >> PAGE_SHIFT,
455 vma->vm_end - vma->vm_start,
456 vma->vm_page_prot);
457}
458
459static struct snd_pcm_ops sprd_pcm_ops = {
460 .open = sprd_pcm_open,
461 .close = sprd_pcm_close,
462 .ioctl = snd_pcm_lib_ioctl,
463 .hw_params = sprd_pcm_hw_params,
464 .hw_free = sprd_pcm_hw_free,
465 .trigger = sprd_pcm_trigger,
466 .pointer = sprd_pcm_pointer,
467 .mmap = sprd_pcm_mmap,
468};
469
470static int sprd_pcm_new(struct snd_soc_pcm_runtime *rtd)
471{
472 struct snd_card *card = rtd->card->snd_card;
473 struct snd_pcm *pcm = rtd->pcm;
474 struct snd_pcm_substream *substream;
475 int ret;
476
477 ret = dma_coerce_mask_and_coherent(card->dev, DMA_BIT_MASK(32));
478 if (ret)
479 return ret;
480
481 substream = pcm->streams[SNDRV_PCM_STREAM_PLAYBACK].substream;
482 if (substream) {
483 ret = snd_dma_alloc_pages(SNDRV_DMA_TYPE_DEV, card->dev,
484 sprd_pcm_hardware.buffer_bytes_max,
485 &substream->dma_buffer);
486 if (ret) {
487 dev_err(card->dev,
488 "can't alloc playback dma buffer: %d\n", ret);
489 return ret;
490 }
491 }
492
493 substream = pcm->streams[SNDRV_PCM_STREAM_CAPTURE].substream;
494 if (substream) {
495 ret = snd_dma_alloc_pages(SNDRV_DMA_TYPE_DEV, card->dev,
496 sprd_pcm_hardware.buffer_bytes_max,
497 &substream->dma_buffer);
498 if (ret) {
499 dev_err(card->dev,
500 "can't alloc capture dma buffer: %d\n", ret);
501 snd_dma_free_pages(&pcm->streams[SNDRV_PCM_STREAM_PLAYBACK].substream->dma_buffer);
502 return ret;
503 }
504 }
505
506 return 0;
507}
508
509static void sprd_pcm_free(struct snd_pcm *pcm)
510{
511 struct snd_pcm_substream *substream;
512 int i;
513
514 for (i = 0; i < ARRAY_SIZE(pcm->streams); i++) {
515 substream = pcm->streams[i].substream;
516 if (substream) {
517 snd_dma_free_pages(&substream->dma_buffer);
518 substream->dma_buffer.area = NULL;
519 substream->dma_buffer.addr = 0;
520 }
521 }
522}
523
524static const struct snd_soc_component_driver sprd_soc_component = {
525 .name = DRV_NAME,
526 .ops = &sprd_pcm_ops,
527 .compr_ops = &sprd_platform_compr_ops,
528 .pcm_new = sprd_pcm_new,
529 .pcm_free = sprd_pcm_free,
530};
531
532static int sprd_soc_platform_probe(struct platform_device *pdev)
533{
534 struct device_node *np = pdev->dev.of_node;
535 int ret;
536
537 ret = of_reserved_mem_device_init_by_idx(&pdev->dev, np, 0);
538 if (ret)
539 dev_warn(&pdev->dev,
540 "no reserved DMA memory for audio platform device\n");
541
542 ret = devm_snd_soc_register_component(&pdev->dev, &sprd_soc_component,
543 NULL, 0);
544 if (ret)
545 dev_err(&pdev->dev, "could not register platform:%d\n", ret);
546
547 return ret;
548}
549
550static const struct of_device_id sprd_pcm_of_match[] = {
551 { .compatible = "sprd,pcm-platform", },
552 { },
553};
554MODULE_DEVICE_TABLE(of, sprd_pcm_of_match);
555
556static struct platform_driver sprd_pcm_driver = {
557 .driver = {
558 .name = "sprd-pcm-audio",
559 .of_match_table = sprd_pcm_of_match,
560 },
561
562 .probe = sprd_soc_platform_probe,
563};
564
565module_platform_driver(sprd_pcm_driver);
566
567MODULE_DESCRIPTION("Spreadtrum ASoC PCM DMA");
568MODULE_LICENSE("GPL v2");
569MODULE_ALIAS("platform:sprd-audio");
570