1
2
3
4
5
6
7
8
9
10
11
12#include <linux/slab.h>
13#include <linux/pci.h>
14#include <sound/core.h>
15#include <sound/pcm.h>
16#include <uapi/sound/skl-tplg-interface.h>
17#include "skl-sst-dsp.h"
18#include "cnl-sst-dsp.h"
19#include "skl-sst-ipc.h"
20#include "skl.h"
21#include "../common/sst-dsp.h"
22#include "../common/sst-dsp-priv.h"
23#include "skl-topology.h"
24
25static int skl_alloc_dma_buf(struct device *dev,
26 struct snd_dma_buffer *dmab, size_t size)
27{
28 struct hdac_bus *bus = dev_get_drvdata(dev);
29
30 if (!bus)
31 return -ENODEV;
32
33 return bus->io_ops->dma_alloc_pages(bus, SNDRV_DMA_TYPE_DEV, size, dmab);
34}
35
36static int skl_free_dma_buf(struct device *dev, struct snd_dma_buffer *dmab)
37{
38 struct hdac_bus *bus = dev_get_drvdata(dev);
39
40 if (!bus)
41 return -ENODEV;
42
43 bus->io_ops->dma_free_pages(bus, dmab);
44
45 return 0;
46}
47
48#define SKL_ASTATE_PARAM_ID 4
49
50void skl_dsp_set_astate_cfg(struct skl_sst *ctx, u32 cnt, void *data)
51{
52 struct skl_ipc_large_config_msg msg = {0};
53
54 msg.large_param_id = SKL_ASTATE_PARAM_ID;
55 msg.param_data_size = (cnt * sizeof(struct skl_astate_param) +
56 sizeof(cnt));
57
58 skl_ipc_set_large_config(&ctx->ipc, &msg, data);
59}
60
61#define NOTIFICATION_PARAM_ID 3
62#define NOTIFICATION_MASK 0xf
63
64
65void skl_dsp_enable_notification(struct skl_sst *ctx, bool enable)
66{
67 struct notification_mask mask;
68 struct skl_ipc_large_config_msg msg = {0};
69
70 mask.notify = NOTIFICATION_MASK;
71 mask.enable = enable;
72
73 msg.large_param_id = NOTIFICATION_PARAM_ID;
74 msg.param_data_size = sizeof(mask);
75
76 skl_ipc_set_large_config(&ctx->ipc, &msg, (u32 *)&mask);
77}
78
79static int skl_dsp_setup_spib(struct device *dev, unsigned int size,
80 int stream_tag, int enable)
81{
82 struct hdac_bus *bus = dev_get_drvdata(dev);
83 struct hdac_stream *stream = snd_hdac_get_stream(bus,
84 SNDRV_PCM_STREAM_PLAYBACK, stream_tag);
85 struct hdac_ext_stream *estream;
86
87 if (!stream)
88 return -EINVAL;
89
90 estream = stream_to_hdac_ext_stream(stream);
91
92 snd_hdac_ext_stream_spbcap_enable(bus, enable, stream->index);
93
94
95 snd_hdac_ext_stream_set_spib(bus, estream, size);
96
97 return 0;
98}
99
100static int skl_dsp_prepare(struct device *dev, unsigned int format,
101 unsigned int size, struct snd_dma_buffer *dmab)
102{
103 struct hdac_bus *bus = dev_get_drvdata(dev);
104 struct hdac_ext_stream *estream;
105 struct hdac_stream *stream;
106 struct snd_pcm_substream substream;
107 int ret;
108
109 if (!bus)
110 return -ENODEV;
111
112 memset(&substream, 0, sizeof(substream));
113 substream.stream = SNDRV_PCM_STREAM_PLAYBACK;
114
115 estream = snd_hdac_ext_stream_assign(bus, &substream,
116 HDAC_EXT_STREAM_TYPE_HOST);
117 if (!estream)
118 return -ENODEV;
119
120 stream = hdac_stream(estream);
121
122
123 ret = snd_hdac_dsp_prepare(stream, format, size, dmab);
124 if (ret < 0)
125 return ret;
126
127 skl_dsp_setup_spib(dev, size, stream->stream_tag, true);
128
129 return stream->stream_tag;
130}
131
132static int skl_dsp_trigger(struct device *dev, bool start, int stream_tag)
133{
134 struct hdac_bus *bus = dev_get_drvdata(dev);
135 struct hdac_stream *stream;
136
137 if (!bus)
138 return -ENODEV;
139
140 stream = snd_hdac_get_stream(bus,
141 SNDRV_PCM_STREAM_PLAYBACK, stream_tag);
142 if (!stream)
143 return -EINVAL;
144
145 snd_hdac_dsp_trigger(stream, start);
146
147 return 0;
148}
149
150static int skl_dsp_cleanup(struct device *dev,
151 struct snd_dma_buffer *dmab, int stream_tag)
152{
153 struct hdac_bus *bus = dev_get_drvdata(dev);
154 struct hdac_stream *stream;
155 struct hdac_ext_stream *estream;
156
157 if (!bus)
158 return -ENODEV;
159
160 stream = snd_hdac_get_stream(bus,
161 SNDRV_PCM_STREAM_PLAYBACK, stream_tag);
162 if (!stream)
163 return -EINVAL;
164
165 estream = stream_to_hdac_ext_stream(stream);
166 skl_dsp_setup_spib(dev, 0, stream_tag, false);
167 snd_hdac_ext_stream_release(estream, HDAC_EXT_STREAM_TYPE_HOST);
168
169 snd_hdac_dsp_cleanup(stream, dmab);
170
171 return 0;
172}
173
174static struct skl_dsp_loader_ops skl_get_loader_ops(void)
175{
176 struct skl_dsp_loader_ops loader_ops;
177
178 memset(&loader_ops, 0, sizeof(struct skl_dsp_loader_ops));
179
180 loader_ops.alloc_dma_buf = skl_alloc_dma_buf;
181 loader_ops.free_dma_buf = skl_free_dma_buf;
182
183 return loader_ops;
184};
185
186static struct skl_dsp_loader_ops bxt_get_loader_ops(void)
187{
188 struct skl_dsp_loader_ops loader_ops;
189
190 memset(&loader_ops, 0, sizeof(loader_ops));
191
192 loader_ops.alloc_dma_buf = skl_alloc_dma_buf;
193 loader_ops.free_dma_buf = skl_free_dma_buf;
194 loader_ops.prepare = skl_dsp_prepare;
195 loader_ops.trigger = skl_dsp_trigger;
196 loader_ops.cleanup = skl_dsp_cleanup;
197
198 return loader_ops;
199};
200
201static const struct skl_dsp_ops dsp_ops[] = {
202 {
203 .id = 0x9d70,
204 .num_cores = 2,
205 .loader_ops = skl_get_loader_ops,
206 .init = skl_sst_dsp_init,
207 .init_fw = skl_sst_init_fw,
208 .cleanup = skl_sst_dsp_cleanup
209 },
210 {
211 .id = 0x9d71,
212 .num_cores = 2,
213 .loader_ops = skl_get_loader_ops,
214 .init = skl_sst_dsp_init,
215 .init_fw = skl_sst_init_fw,
216 .cleanup = skl_sst_dsp_cleanup
217 },
218 {
219 .id = 0x5a98,
220 .num_cores = 2,
221 .loader_ops = bxt_get_loader_ops,
222 .init = bxt_sst_dsp_init,
223 .init_fw = bxt_sst_init_fw,
224 .cleanup = bxt_sst_dsp_cleanup
225 },
226 {
227 .id = 0x3198,
228 .num_cores = 2,
229 .loader_ops = bxt_get_loader_ops,
230 .init = bxt_sst_dsp_init,
231 .init_fw = bxt_sst_init_fw,
232 .cleanup = bxt_sst_dsp_cleanup
233 },
234 {
235 .id = 0x9dc8,
236 .num_cores = 4,
237 .loader_ops = bxt_get_loader_ops,
238 .init = cnl_sst_dsp_init,
239 .init_fw = cnl_sst_init_fw,
240 .cleanup = cnl_sst_dsp_cleanup
241 },
242 {
243 .id = 0xa348,
244 .num_cores = 4,
245 .loader_ops = bxt_get_loader_ops,
246 .init = cnl_sst_dsp_init,
247 .init_fw = cnl_sst_init_fw,
248 .cleanup = cnl_sst_dsp_cleanup
249 },
250 {
251 .id = 0x02c8,
252 .num_cores = 4,
253 .loader_ops = bxt_get_loader_ops,
254 .init = cnl_sst_dsp_init,
255 .init_fw = cnl_sst_init_fw,
256 .cleanup = cnl_sst_dsp_cleanup
257 },
258 {
259 .id = 0x06c8,
260 .num_cores = 4,
261 .loader_ops = bxt_get_loader_ops,
262 .init = cnl_sst_dsp_init,
263 .init_fw = cnl_sst_init_fw,
264 .cleanup = cnl_sst_dsp_cleanup
265 },
266};
267
268const struct skl_dsp_ops *skl_get_dsp_ops(int pci_id)
269{
270 int i;
271
272 for (i = 0; i < ARRAY_SIZE(dsp_ops); i++) {
273 if (dsp_ops[i].id == pci_id)
274 return &dsp_ops[i];
275 }
276
277 return NULL;
278}
279
280int skl_init_dsp(struct skl *skl)
281{
282 void __iomem *mmio_base;
283 struct hdac_bus *bus = skl_to_bus(skl);
284 struct skl_dsp_loader_ops loader_ops;
285 int irq = bus->irq;
286 const struct skl_dsp_ops *ops;
287 struct skl_dsp_cores *cores;
288 int ret;
289
290
291 snd_hdac_ext_bus_ppcap_enable(bus, true);
292 snd_hdac_ext_bus_ppcap_int_enable(bus, true);
293
294
295 mmio_base = pci_ioremap_bar(skl->pci, 4);
296 if (mmio_base == NULL) {
297 dev_err(bus->dev, "ioremap error\n");
298 return -ENXIO;
299 }
300
301 ops = skl_get_dsp_ops(skl->pci->device);
302 if (!ops) {
303 ret = -EIO;
304 goto unmap_mmio;
305 }
306
307 loader_ops = ops->loader_ops();
308 ret = ops->init(bus->dev, mmio_base, irq,
309 skl->fw_name, loader_ops,
310 &skl->skl_sst);
311
312 if (ret < 0)
313 goto unmap_mmio;
314
315 skl->skl_sst->dsp_ops = ops;
316 cores = &skl->skl_sst->cores;
317 cores->count = ops->num_cores;
318
319 cores->state = kcalloc(cores->count, sizeof(*cores->state), GFP_KERNEL);
320 if (!cores->state) {
321 ret = -ENOMEM;
322 goto unmap_mmio;
323 }
324
325 cores->usage_count = kcalloc(cores->count, sizeof(*cores->usage_count),
326 GFP_KERNEL);
327 if (!cores->usage_count) {
328 ret = -ENOMEM;
329 goto free_core_state;
330 }
331
332 dev_dbg(bus->dev, "dsp registration status=%d\n", ret);
333
334 return 0;
335
336free_core_state:
337 kfree(cores->state);
338
339unmap_mmio:
340 iounmap(mmio_base);
341
342 return ret;
343}
344
345int skl_free_dsp(struct skl *skl)
346{
347 struct hdac_bus *bus = skl_to_bus(skl);
348 struct skl_sst *ctx = skl->skl_sst;
349
350
351 snd_hdac_ext_bus_ppcap_int_enable(bus, false);
352
353 ctx->dsp_ops->cleanup(bus->dev, ctx);
354
355 kfree(ctx->cores.state);
356 kfree(ctx->cores.usage_count);
357
358 if (ctx->dsp->addr.lpe)
359 iounmap(ctx->dsp->addr.lpe);
360
361 return 0;
362}
363
364
365
366
367
368
369
370
371int skl_suspend_late_dsp(struct skl *skl)
372{
373 struct skl_sst *ctx = skl->skl_sst;
374 struct delayed_work *dwork;
375
376 if (!ctx)
377 return 0;
378
379 dwork = &ctx->d0i3.work;
380
381 if (dwork->work.func) {
382 if (skl->supend_active)
383 flush_delayed_work(dwork);
384 else
385 cancel_delayed_work_sync(dwork);
386 }
387
388 return 0;
389}
390
391int skl_suspend_dsp(struct skl *skl)
392{
393 struct skl_sst *ctx = skl->skl_sst;
394 struct hdac_bus *bus = skl_to_bus(skl);
395 int ret;
396
397
398 if (!bus->ppcap)
399 return 0;
400
401 ret = skl_dsp_sleep(ctx->dsp);
402 if (ret < 0)
403 return ret;
404
405
406 snd_hdac_ext_bus_ppcap_int_enable(bus, false);
407 snd_hdac_ext_bus_ppcap_enable(bus, false);
408
409 return 0;
410}
411
412int skl_resume_dsp(struct skl *skl)
413{
414 struct skl_sst *ctx = skl->skl_sst;
415 struct hdac_bus *bus = skl_to_bus(skl);
416 int ret;
417
418
419 if (!bus->ppcap)
420 return 0;
421
422
423 snd_hdac_ext_bus_ppcap_enable(bus, true);
424 snd_hdac_ext_bus_ppcap_int_enable(bus, true);
425
426
427 if (skl->skl_sst->is_first_boot)
428 return 0;
429
430
431
432
433
434 ctx->enable_miscbdcge(ctx->dev, false);
435 ctx->clock_power_gating(ctx->dev, false);
436
437 ret = skl_dsp_wake(ctx->dsp);
438 ctx->enable_miscbdcge(ctx->dev, true);
439 ctx->clock_power_gating(ctx->dev, true);
440 if (ret < 0)
441 return ret;
442
443 skl_dsp_enable_notification(skl->skl_sst, false);
444
445 if (skl->cfg.astate_cfg != NULL) {
446 skl_dsp_set_astate_cfg(skl->skl_sst, skl->cfg.astate_cfg->count,
447 skl->cfg.astate_cfg);
448 }
449 return ret;
450}
451
452enum skl_bitdepth skl_get_bit_depth(int params)
453{
454 switch (params) {
455 case 8:
456 return SKL_DEPTH_8BIT;
457
458 case 16:
459 return SKL_DEPTH_16BIT;
460
461 case 24:
462 return SKL_DEPTH_24BIT;
463
464 case 32:
465 return SKL_DEPTH_32BIT;
466
467 default:
468 return SKL_DEPTH_INVALID;
469
470 }
471}
472
473
474
475
476
477
478
479static void skl_set_base_module_format(struct skl_sst *ctx,
480 struct skl_module_cfg *mconfig,
481 struct skl_base_cfg *base_cfg)
482{
483 struct skl_module *module = mconfig->module;
484 struct skl_module_res *res = &module->resources[mconfig->res_idx];
485 struct skl_module_iface *fmt = &module->formats[mconfig->fmt_idx];
486 struct skl_module_fmt *format = &fmt->inputs[0].fmt;
487
488 base_cfg->audio_fmt.number_of_channels = format->channels;
489
490 base_cfg->audio_fmt.s_freq = format->s_freq;
491 base_cfg->audio_fmt.bit_depth = format->bit_depth;
492 base_cfg->audio_fmt.valid_bit_depth = format->valid_bit_depth;
493 base_cfg->audio_fmt.ch_cfg = format->ch_cfg;
494 base_cfg->audio_fmt.sample_type = format->sample_type;
495
496 dev_dbg(ctx->dev, "bit_depth=%x valid_bd=%x ch_config=%x\n",
497 format->bit_depth, format->valid_bit_depth,
498 format->ch_cfg);
499
500 base_cfg->audio_fmt.channel_map = format->ch_map;
501
502 base_cfg->audio_fmt.interleaving = format->interleaving_style;
503
504 base_cfg->cps = res->cps;
505 base_cfg->ibs = res->ibs;
506 base_cfg->obs = res->obs;
507 base_cfg->is_pages = res->is_pages;
508}
509
510
511
512
513
514static void skl_copy_copier_caps(struct skl_module_cfg *mconfig,
515 struct skl_cpr_cfg *cpr_mconfig)
516{
517 if (mconfig->formats_config.caps_size == 0)
518 return;
519
520 memcpy(cpr_mconfig->gtw_cfg.config_data,
521 mconfig->formats_config.caps,
522 mconfig->formats_config.caps_size);
523
524 cpr_mconfig->gtw_cfg.config_length =
525 (mconfig->formats_config.caps_size) / 4;
526}
527
528#define SKL_NON_GATEWAY_CPR_NODE_ID 0xFFFFFFFF
529
530
531
532
533static u32 skl_get_node_id(struct skl_sst *ctx,
534 struct skl_module_cfg *mconfig)
535{
536 union skl_connector_node_id node_id = {0};
537 union skl_ssp_dma_node ssp_node = {0};
538 struct skl_pipe_params *params = mconfig->pipe->p_params;
539
540 switch (mconfig->dev_type) {
541 case SKL_DEVICE_BT:
542 node_id.node.dma_type =
543 (SKL_CONN_SOURCE == mconfig->hw_conn_type) ?
544 SKL_DMA_I2S_LINK_OUTPUT_CLASS :
545 SKL_DMA_I2S_LINK_INPUT_CLASS;
546 node_id.node.vindex = params->host_dma_id +
547 (mconfig->vbus_id << 3);
548 break;
549
550 case SKL_DEVICE_I2S:
551 node_id.node.dma_type =
552 (SKL_CONN_SOURCE == mconfig->hw_conn_type) ?
553 SKL_DMA_I2S_LINK_OUTPUT_CLASS :
554 SKL_DMA_I2S_LINK_INPUT_CLASS;
555 ssp_node.dma_node.time_slot_index = mconfig->time_slot;
556 ssp_node.dma_node.i2s_instance = mconfig->vbus_id;
557 node_id.node.vindex = ssp_node.val;
558 break;
559
560 case SKL_DEVICE_DMIC:
561 node_id.node.dma_type = SKL_DMA_DMIC_LINK_INPUT_CLASS;
562 node_id.node.vindex = mconfig->vbus_id +
563 (mconfig->time_slot);
564 break;
565
566 case SKL_DEVICE_HDALINK:
567 node_id.node.dma_type =
568 (SKL_CONN_SOURCE == mconfig->hw_conn_type) ?
569 SKL_DMA_HDA_LINK_OUTPUT_CLASS :
570 SKL_DMA_HDA_LINK_INPUT_CLASS;
571 node_id.node.vindex = params->link_dma_id;
572 break;
573
574 case SKL_DEVICE_HDAHOST:
575 node_id.node.dma_type =
576 (SKL_CONN_SOURCE == mconfig->hw_conn_type) ?
577 SKL_DMA_HDA_HOST_OUTPUT_CLASS :
578 SKL_DMA_HDA_HOST_INPUT_CLASS;
579 node_id.node.vindex = params->host_dma_id;
580 break;
581
582 default:
583 node_id.val = 0xFFFFFFFF;
584 break;
585 }
586
587 return node_id.val;
588}
589
590static void skl_setup_cpr_gateway_cfg(struct skl_sst *ctx,
591 struct skl_module_cfg *mconfig,
592 struct skl_cpr_cfg *cpr_mconfig)
593{
594 u32 dma_io_buf;
595 struct skl_module_res *res;
596 int res_idx = mconfig->res_idx;
597 struct skl *skl = get_skl_ctx(ctx->dev);
598
599 cpr_mconfig->gtw_cfg.node_id = skl_get_node_id(ctx, mconfig);
600
601 if (cpr_mconfig->gtw_cfg.node_id == SKL_NON_GATEWAY_CPR_NODE_ID) {
602 cpr_mconfig->cpr_feature_mask = 0;
603 return;
604 }
605
606 if (skl->nr_modules) {
607 res = &mconfig->module->resources[mconfig->res_idx];
608 cpr_mconfig->gtw_cfg.dma_buffer_size = res->dma_buffer_size;
609 goto skip_buf_size_calc;
610 } else {
611 res = &mconfig->module->resources[res_idx];
612 }
613
614 switch (mconfig->hw_conn_type) {
615 case SKL_CONN_SOURCE:
616 if (mconfig->dev_type == SKL_DEVICE_HDAHOST)
617 dma_io_buf = res->ibs;
618 else
619 dma_io_buf = res->obs;
620 break;
621
622 case SKL_CONN_SINK:
623 if (mconfig->dev_type == SKL_DEVICE_HDAHOST)
624 dma_io_buf = res->obs;
625 else
626 dma_io_buf = res->ibs;
627 break;
628
629 default:
630 dev_warn(ctx->dev, "wrong connection type: %d\n",
631 mconfig->hw_conn_type);
632 return;
633 }
634
635 cpr_mconfig->gtw_cfg.dma_buffer_size =
636 mconfig->dma_buffer_size * dma_io_buf;
637
638
639 if (!cpr_mconfig->gtw_cfg.dma_buffer_size) {
640 if (mconfig->hw_conn_type == SKL_CONN_SOURCE)
641 cpr_mconfig->gtw_cfg.dma_buffer_size = 2 * res->obs;
642 else
643 cpr_mconfig->gtw_cfg.dma_buffer_size = 2 * res->ibs;
644 }
645
646skip_buf_size_calc:
647 cpr_mconfig->cpr_feature_mask = 0;
648 cpr_mconfig->gtw_cfg.config_length = 0;
649
650 skl_copy_copier_caps(mconfig, cpr_mconfig);
651}
652
653#define DMA_CONTROL_ID 5
654#define DMA_I2S_BLOB_SIZE 21
655
656int skl_dsp_set_dma_control(struct skl_sst *ctx, u32 *caps,
657 u32 caps_size, u32 node_id)
658{
659 struct skl_dma_control *dma_ctrl;
660 struct skl_ipc_large_config_msg msg = {0};
661 int err = 0;
662
663
664
665
666
667 if (caps_size == 0)
668 return 0;
669
670 msg.large_param_id = DMA_CONTROL_ID;
671 msg.param_data_size = sizeof(struct skl_dma_control) + caps_size;
672
673 dma_ctrl = kzalloc(msg.param_data_size, GFP_KERNEL);
674 if (dma_ctrl == NULL)
675 return -ENOMEM;
676
677 dma_ctrl->node_id = node_id;
678
679
680
681
682
683
684
685 dma_ctrl->config_length = DMA_I2S_BLOB_SIZE;
686
687 memcpy(dma_ctrl->config_data, caps, caps_size);
688
689 err = skl_ipc_set_large_config(&ctx->ipc, &msg, (u32 *)dma_ctrl);
690
691 kfree(dma_ctrl);
692 return err;
693}
694EXPORT_SYMBOL_GPL(skl_dsp_set_dma_control);
695
696static void skl_setup_out_format(struct skl_sst *ctx,
697 struct skl_module_cfg *mconfig,
698 struct skl_audio_data_format *out_fmt)
699{
700 struct skl_module *module = mconfig->module;
701 struct skl_module_iface *fmt = &module->formats[mconfig->fmt_idx];
702 struct skl_module_fmt *format = &fmt->outputs[0].fmt;
703
704 out_fmt->number_of_channels = (u8)format->channels;
705 out_fmt->s_freq = format->s_freq;
706 out_fmt->bit_depth = format->bit_depth;
707 out_fmt->valid_bit_depth = format->valid_bit_depth;
708 out_fmt->ch_cfg = format->ch_cfg;
709
710 out_fmt->channel_map = format->ch_map;
711 out_fmt->interleaving = format->interleaving_style;
712 out_fmt->sample_type = format->sample_type;
713
714 dev_dbg(ctx->dev, "copier out format chan=%d fre=%d bitdepth=%d\n",
715 out_fmt->number_of_channels, format->s_freq, format->bit_depth);
716}
717
718
719
720
721
722
723static void skl_set_src_format(struct skl_sst *ctx,
724 struct skl_module_cfg *mconfig,
725 struct skl_src_module_cfg *src_mconfig)
726{
727 struct skl_module *module = mconfig->module;
728 struct skl_module_iface *iface = &module->formats[mconfig->fmt_idx];
729 struct skl_module_fmt *fmt = &iface->outputs[0].fmt;
730
731 skl_set_base_module_format(ctx, mconfig,
732 (struct skl_base_cfg *)src_mconfig);
733
734 src_mconfig->src_cfg = fmt->s_freq;
735}
736
737
738
739
740
741
742static void skl_set_updown_mixer_format(struct skl_sst *ctx,
743 struct skl_module_cfg *mconfig,
744 struct skl_up_down_mixer_cfg *mixer_mconfig)
745{
746 struct skl_module *module = mconfig->module;
747 struct skl_module_iface *iface = &module->formats[mconfig->fmt_idx];
748 struct skl_module_fmt *fmt = &iface->outputs[0].fmt;
749
750 skl_set_base_module_format(ctx, mconfig,
751 (struct skl_base_cfg *)mixer_mconfig);
752 mixer_mconfig->out_ch_cfg = fmt->ch_cfg;
753 mixer_mconfig->ch_map = fmt->ch_map;
754}
755
756
757
758
759
760
761
762
763static void skl_set_copier_format(struct skl_sst *ctx,
764 struct skl_module_cfg *mconfig,
765 struct skl_cpr_cfg *cpr_mconfig)
766{
767 struct skl_audio_data_format *out_fmt = &cpr_mconfig->out_fmt;
768 struct skl_base_cfg *base_cfg = (struct skl_base_cfg *)cpr_mconfig;
769
770 skl_set_base_module_format(ctx, mconfig, base_cfg);
771
772 skl_setup_out_format(ctx, mconfig, out_fmt);
773 skl_setup_cpr_gateway_cfg(ctx, mconfig, cpr_mconfig);
774}
775
776
777
778
779
780
781static void skl_set_algo_format(struct skl_sst *ctx,
782 struct skl_module_cfg *mconfig,
783 struct skl_algo_cfg *algo_mcfg)
784{
785 struct skl_base_cfg *base_cfg = (struct skl_base_cfg *)algo_mcfg;
786
787 skl_set_base_module_format(ctx, mconfig, base_cfg);
788
789 if (mconfig->formats_config.caps_size == 0)
790 return;
791
792 memcpy(algo_mcfg->params,
793 mconfig->formats_config.caps,
794 mconfig->formats_config.caps_size);
795
796}
797
798
799
800
801
802
803
804
805static void skl_set_base_outfmt_format(struct skl_sst *ctx,
806 struct skl_module_cfg *mconfig,
807 struct skl_base_outfmt_cfg *base_outfmt_mcfg)
808{
809 struct skl_audio_data_format *out_fmt = &base_outfmt_mcfg->out_fmt;
810 struct skl_base_cfg *base_cfg =
811 (struct skl_base_cfg *)base_outfmt_mcfg;
812
813 skl_set_base_module_format(ctx, mconfig, base_cfg);
814 skl_setup_out_format(ctx, mconfig, out_fmt);
815}
816
817static u16 skl_get_module_param_size(struct skl_sst *ctx,
818 struct skl_module_cfg *mconfig)
819{
820 u16 param_size;
821
822 switch (mconfig->m_type) {
823 case SKL_MODULE_TYPE_COPIER:
824 param_size = sizeof(struct skl_cpr_cfg);
825 param_size += mconfig->formats_config.caps_size;
826 return param_size;
827
828 case SKL_MODULE_TYPE_SRCINT:
829 return sizeof(struct skl_src_module_cfg);
830
831 case SKL_MODULE_TYPE_UPDWMIX:
832 return sizeof(struct skl_up_down_mixer_cfg);
833
834 case SKL_MODULE_TYPE_ALGO:
835 param_size = sizeof(struct skl_base_cfg);
836 param_size += mconfig->formats_config.caps_size;
837 return param_size;
838
839 case SKL_MODULE_TYPE_BASE_OUTFMT:
840 case SKL_MODULE_TYPE_MIC_SELECT:
841 case SKL_MODULE_TYPE_KPB:
842 return sizeof(struct skl_base_outfmt_cfg);
843
844 default:
845
846
847
848
849 return sizeof(struct skl_base_cfg);
850 }
851
852 return 0;
853}
854
855
856
857
858
859
860
861
862static int skl_set_module_format(struct skl_sst *ctx,
863 struct skl_module_cfg *module_config,
864 u16 *module_config_size,
865 void **param_data)
866{
867 u16 param_size;
868
869 param_size = skl_get_module_param_size(ctx, module_config);
870
871 *param_data = kzalloc(param_size, GFP_KERNEL);
872 if (NULL == *param_data)
873 return -ENOMEM;
874
875 *module_config_size = param_size;
876
877 switch (module_config->m_type) {
878 case SKL_MODULE_TYPE_COPIER:
879 skl_set_copier_format(ctx, module_config, *param_data);
880 break;
881
882 case SKL_MODULE_TYPE_SRCINT:
883 skl_set_src_format(ctx, module_config, *param_data);
884 break;
885
886 case SKL_MODULE_TYPE_UPDWMIX:
887 skl_set_updown_mixer_format(ctx, module_config, *param_data);
888 break;
889
890 case SKL_MODULE_TYPE_ALGO:
891 skl_set_algo_format(ctx, module_config, *param_data);
892 break;
893
894 case SKL_MODULE_TYPE_BASE_OUTFMT:
895 case SKL_MODULE_TYPE_MIC_SELECT:
896 case SKL_MODULE_TYPE_KPB:
897 skl_set_base_outfmt_format(ctx, module_config, *param_data);
898 break;
899
900 default:
901 skl_set_base_module_format(ctx, module_config, *param_data);
902 break;
903
904 }
905
906 dev_dbg(ctx->dev, "Module type=%d config size: %d bytes\n",
907 module_config->id.module_id, param_size);
908 print_hex_dump_debug("Module params:", DUMP_PREFIX_OFFSET, 8, 4,
909 *param_data, param_size, false);
910 return 0;
911}
912
913static int skl_get_queue_index(struct skl_module_pin *mpin,
914 struct skl_module_inst_id id, int max)
915{
916 int i;
917
918 for (i = 0; i < max; i++) {
919 if (mpin[i].id.module_id == id.module_id &&
920 mpin[i].id.instance_id == id.instance_id)
921 return i;
922 }
923
924 return -EINVAL;
925}
926
927
928
929
930
931
932static int skl_alloc_queue(struct skl_module_pin *mpin,
933 struct skl_module_cfg *tgt_cfg, int max)
934{
935 int i;
936 struct skl_module_inst_id id = tgt_cfg->id;
937
938
939
940
941
942
943 for (i = 0; i < max; i++) {
944 if (mpin[i].is_dynamic) {
945 if (!mpin[i].in_use &&
946 mpin[i].pin_state == SKL_PIN_UNBIND) {
947
948 mpin[i].in_use = true;
949 mpin[i].id.module_id = id.module_id;
950 mpin[i].id.instance_id = id.instance_id;
951 mpin[i].id.pvt_id = id.pvt_id;
952 mpin[i].tgt_mcfg = tgt_cfg;
953 return i;
954 }
955 } else {
956 if (mpin[i].id.module_id == id.module_id &&
957 mpin[i].id.instance_id == id.instance_id &&
958 mpin[i].pin_state == SKL_PIN_UNBIND) {
959
960 mpin[i].tgt_mcfg = tgt_cfg;
961 return i;
962 }
963 }
964 }
965
966 return -EINVAL;
967}
968
969static void skl_free_queue(struct skl_module_pin *mpin, int q_index)
970{
971 if (mpin[q_index].is_dynamic) {
972 mpin[q_index].in_use = false;
973 mpin[q_index].id.module_id = 0;
974 mpin[q_index].id.instance_id = 0;
975 mpin[q_index].id.pvt_id = 0;
976 }
977 mpin[q_index].pin_state = SKL_PIN_UNBIND;
978 mpin[q_index].tgt_mcfg = NULL;
979}
980
981
982
983static void skl_clear_module_state(struct skl_module_pin *mpin, int max,
984 struct skl_module_cfg *mcfg)
985{
986 int i;
987 bool found = false;
988
989 for (i = 0; i < max; i++) {
990 if (mpin[i].pin_state == SKL_PIN_UNBIND)
991 continue;
992 found = true;
993 break;
994 }
995
996 if (!found)
997 mcfg->m_state = SKL_MODULE_INIT_DONE;
998 return;
999}
1000
1001
1002
1003
1004
1005
1006
1007int skl_init_module(struct skl_sst *ctx,
1008 struct skl_module_cfg *mconfig)
1009{
1010 u16 module_config_size = 0;
1011 void *param_data = NULL;
1012 int ret;
1013 struct skl_ipc_init_instance_msg msg;
1014
1015 dev_dbg(ctx->dev, "%s: module_id = %d instance=%d\n", __func__,
1016 mconfig->id.module_id, mconfig->id.pvt_id);
1017
1018 if (mconfig->pipe->state != SKL_PIPE_CREATED) {
1019 dev_err(ctx->dev, "Pipe not created state= %d pipe_id= %d\n",
1020 mconfig->pipe->state, mconfig->pipe->ppl_id);
1021 return -EIO;
1022 }
1023
1024 ret = skl_set_module_format(ctx, mconfig,
1025 &module_config_size, ¶m_data);
1026 if (ret < 0) {
1027 dev_err(ctx->dev, "Failed to set module format ret=%d\n", ret);
1028 return ret;
1029 }
1030
1031 msg.module_id = mconfig->id.module_id;
1032 msg.instance_id = mconfig->id.pvt_id;
1033 msg.ppl_instance_id = mconfig->pipe->ppl_id;
1034 msg.param_data_size = module_config_size;
1035 msg.core_id = mconfig->core_id;
1036 msg.domain = mconfig->domain;
1037
1038 ret = skl_ipc_init_instance(&ctx->ipc, &msg, param_data);
1039 if (ret < 0) {
1040 dev_err(ctx->dev, "Failed to init instance ret=%d\n", ret);
1041 kfree(param_data);
1042 return ret;
1043 }
1044 mconfig->m_state = SKL_MODULE_INIT_DONE;
1045 kfree(param_data);
1046 return ret;
1047}
1048
1049static void skl_dump_bind_info(struct skl_sst *ctx, struct skl_module_cfg
1050 *src_module, struct skl_module_cfg *dst_module)
1051{
1052 dev_dbg(ctx->dev, "%s: src module_id = %d src_instance=%d\n",
1053 __func__, src_module->id.module_id, src_module->id.pvt_id);
1054 dev_dbg(ctx->dev, "%s: dst_module=%d dst_instance=%d\n", __func__,
1055 dst_module->id.module_id, dst_module->id.pvt_id);
1056
1057 dev_dbg(ctx->dev, "src_module state = %d dst module state = %d\n",
1058 src_module->m_state, dst_module->m_state);
1059}
1060
1061
1062
1063
1064
1065
1066int skl_unbind_modules(struct skl_sst *ctx,
1067 struct skl_module_cfg *src_mcfg,
1068 struct skl_module_cfg *dst_mcfg)
1069{
1070 int ret;
1071 struct skl_ipc_bind_unbind_msg msg;
1072 struct skl_module_inst_id src_id = src_mcfg->id;
1073 struct skl_module_inst_id dst_id = dst_mcfg->id;
1074 int in_max = dst_mcfg->module->max_input_pins;
1075 int out_max = src_mcfg->module->max_output_pins;
1076 int src_index, dst_index, src_pin_state, dst_pin_state;
1077
1078 skl_dump_bind_info(ctx, src_mcfg, dst_mcfg);
1079
1080
1081 src_index = skl_get_queue_index(src_mcfg->m_out_pin, dst_id, out_max);
1082 if (src_index < 0)
1083 return 0;
1084
1085 msg.src_queue = src_index;
1086
1087
1088 dst_index = skl_get_queue_index(dst_mcfg->m_in_pin, src_id, in_max);
1089 if (dst_index < 0)
1090 return 0;
1091
1092 msg.dst_queue = dst_index;
1093
1094 src_pin_state = src_mcfg->m_out_pin[src_index].pin_state;
1095 dst_pin_state = dst_mcfg->m_in_pin[dst_index].pin_state;
1096
1097 if (src_pin_state != SKL_PIN_BIND_DONE ||
1098 dst_pin_state != SKL_PIN_BIND_DONE)
1099 return 0;
1100
1101 msg.module_id = src_mcfg->id.module_id;
1102 msg.instance_id = src_mcfg->id.pvt_id;
1103 msg.dst_module_id = dst_mcfg->id.module_id;
1104 msg.dst_instance_id = dst_mcfg->id.pvt_id;
1105 msg.bind = false;
1106
1107 ret = skl_ipc_bind_unbind(&ctx->ipc, &msg);
1108 if (!ret) {
1109
1110 skl_free_queue(src_mcfg->m_out_pin, src_index);
1111 skl_free_queue(dst_mcfg->m_in_pin, dst_index);
1112
1113
1114
1115
1116
1117 skl_clear_module_state(src_mcfg->m_out_pin, out_max, src_mcfg);
1118 }
1119
1120 return ret;
1121}
1122
1123static void fill_pin_params(struct skl_audio_data_format *pin_fmt,
1124 struct skl_module_fmt *format)
1125{
1126 pin_fmt->number_of_channels = format->channels;
1127 pin_fmt->s_freq = format->s_freq;
1128 pin_fmt->bit_depth = format->bit_depth;
1129 pin_fmt->valid_bit_depth = format->valid_bit_depth;
1130 pin_fmt->ch_cfg = format->ch_cfg;
1131 pin_fmt->sample_type = format->sample_type;
1132 pin_fmt->channel_map = format->ch_map;
1133 pin_fmt->interleaving = format->interleaving_style;
1134}
1135
1136#define CPR_SINK_FMT_PARAM_ID 2
1137
1138
1139
1140
1141
1142
1143
1144
1145int skl_bind_modules(struct skl_sst *ctx,
1146 struct skl_module_cfg *src_mcfg,
1147 struct skl_module_cfg *dst_mcfg)
1148{
1149 int ret = 0;
1150 struct skl_ipc_bind_unbind_msg msg;
1151 int in_max = dst_mcfg->module->max_input_pins;
1152 int out_max = src_mcfg->module->max_output_pins;
1153 int src_index, dst_index;
1154 struct skl_module_fmt *format;
1155 struct skl_cpr_pin_fmt pin_fmt;
1156 struct skl_module *module;
1157 struct skl_module_iface *fmt;
1158
1159 skl_dump_bind_info(ctx, src_mcfg, dst_mcfg);
1160
1161 if (src_mcfg->m_state < SKL_MODULE_INIT_DONE ||
1162 dst_mcfg->m_state < SKL_MODULE_INIT_DONE)
1163 return 0;
1164
1165 src_index = skl_alloc_queue(src_mcfg->m_out_pin, dst_mcfg, out_max);
1166 if (src_index < 0)
1167 return -EINVAL;
1168
1169 msg.src_queue = src_index;
1170 dst_index = skl_alloc_queue(dst_mcfg->m_in_pin, src_mcfg, in_max);
1171 if (dst_index < 0) {
1172 skl_free_queue(src_mcfg->m_out_pin, src_index);
1173 return -EINVAL;
1174 }
1175
1176
1177
1178
1179
1180 if (src_mcfg->m_type == SKL_MODULE_TYPE_COPIER && src_index > 0) {
1181 pin_fmt.sink_id = src_index;
1182 module = src_mcfg->module;
1183 fmt = &module->formats[src_mcfg->fmt_idx];
1184
1185
1186 format = &fmt->inputs[0].fmt;
1187 fill_pin_params(&(pin_fmt.src_fmt), format);
1188
1189 format = &fmt->outputs[src_index].fmt;
1190 fill_pin_params(&(pin_fmt.dst_fmt), format);
1191 ret = skl_set_module_params(ctx, (void *)&pin_fmt,
1192 sizeof(struct skl_cpr_pin_fmt),
1193 CPR_SINK_FMT_PARAM_ID, src_mcfg);
1194
1195 if (ret < 0)
1196 goto out;
1197 }
1198
1199 msg.dst_queue = dst_index;
1200
1201 dev_dbg(ctx->dev, "src queue = %d dst queue =%d\n",
1202 msg.src_queue, msg.dst_queue);
1203
1204 msg.module_id = src_mcfg->id.module_id;
1205 msg.instance_id = src_mcfg->id.pvt_id;
1206 msg.dst_module_id = dst_mcfg->id.module_id;
1207 msg.dst_instance_id = dst_mcfg->id.pvt_id;
1208 msg.bind = true;
1209
1210 ret = skl_ipc_bind_unbind(&ctx->ipc, &msg);
1211
1212 if (!ret) {
1213 src_mcfg->m_state = SKL_MODULE_BIND_DONE;
1214 src_mcfg->m_out_pin[src_index].pin_state = SKL_PIN_BIND_DONE;
1215 dst_mcfg->m_in_pin[dst_index].pin_state = SKL_PIN_BIND_DONE;
1216 return ret;
1217 }
1218out:
1219
1220 skl_free_queue(src_mcfg->m_out_pin, src_index);
1221 skl_free_queue(dst_mcfg->m_in_pin, dst_index);
1222
1223 return ret;
1224}
1225
1226static int skl_set_pipe_state(struct skl_sst *ctx, struct skl_pipe *pipe,
1227 enum skl_ipc_pipeline_state state)
1228{
1229 dev_dbg(ctx->dev, "%s: pipe_state = %d\n", __func__, state);
1230
1231 return skl_ipc_set_pipeline_state(&ctx->ipc, pipe->ppl_id, state);
1232}
1233
1234
1235
1236
1237
1238
1239
1240int skl_create_pipeline(struct skl_sst *ctx, struct skl_pipe *pipe)
1241{
1242 int ret;
1243
1244 dev_dbg(ctx->dev, "%s: pipe_id = %d\n", __func__, pipe->ppl_id);
1245
1246 ret = skl_ipc_create_pipeline(&ctx->ipc, pipe->memory_pages,
1247 pipe->pipe_priority, pipe->ppl_id,
1248 pipe->lp_mode);
1249 if (ret < 0) {
1250 dev_err(ctx->dev, "Failed to create pipeline\n");
1251 return ret;
1252 }
1253
1254 pipe->state = SKL_PIPE_CREATED;
1255
1256 return 0;
1257}
1258
1259
1260
1261
1262
1263
1264
1265int skl_delete_pipe(struct skl_sst *ctx, struct skl_pipe *pipe)
1266{
1267 int ret;
1268
1269 dev_dbg(ctx->dev, "%s: pipe = %d\n", __func__, pipe->ppl_id);
1270
1271
1272 if (pipe->state < SKL_PIPE_CREATED)
1273 return 0;
1274
1275
1276 if (pipe->state >= SKL_PIPE_STARTED) {
1277 ret = skl_set_pipe_state(ctx, pipe, PPL_PAUSED);
1278 if (ret < 0) {
1279 dev_err(ctx->dev, "Failed to stop pipeline\n");
1280 return ret;
1281 }
1282
1283 pipe->state = SKL_PIPE_PAUSED;
1284 }
1285
1286
1287 ret = skl_set_pipe_state(ctx, pipe, PPL_RESET);
1288 if (ret < 0) {
1289 dev_err(ctx->dev, "Failed to reset pipe ret=%d\n", ret);
1290 return ret;
1291 }
1292
1293 pipe->state = SKL_PIPE_RESET;
1294
1295 ret = skl_ipc_delete_pipeline(&ctx->ipc, pipe->ppl_id);
1296 if (ret < 0) {
1297 dev_err(ctx->dev, "Failed to delete pipeline\n");
1298 return ret;
1299 }
1300
1301 pipe->state = SKL_PIPE_INVALID;
1302
1303 return ret;
1304}
1305
1306
1307
1308
1309
1310
1311int skl_run_pipe(struct skl_sst *ctx, struct skl_pipe *pipe)
1312{
1313 int ret;
1314
1315 dev_dbg(ctx->dev, "%s: pipe = %d\n", __func__, pipe->ppl_id);
1316
1317
1318 if (pipe->state < SKL_PIPE_CREATED)
1319 return 0;
1320
1321
1322 ret = skl_set_pipe_state(ctx, pipe, PPL_PAUSED);
1323 if (ret < 0) {
1324 dev_err(ctx->dev, "Failed to pause pipe\n");
1325 return ret;
1326 }
1327
1328 pipe->state = SKL_PIPE_PAUSED;
1329
1330 ret = skl_set_pipe_state(ctx, pipe, PPL_RUNNING);
1331 if (ret < 0) {
1332 dev_err(ctx->dev, "Failed to start pipe\n");
1333 return ret;
1334 }
1335
1336 pipe->state = SKL_PIPE_STARTED;
1337
1338 return 0;
1339}
1340
1341
1342
1343
1344
1345int skl_stop_pipe(struct skl_sst *ctx, struct skl_pipe *pipe)
1346{
1347 int ret;
1348
1349 dev_dbg(ctx->dev, "In %s pipe=%d\n", __func__, pipe->ppl_id);
1350
1351
1352 if (pipe->state < SKL_PIPE_PAUSED)
1353 return 0;
1354
1355 ret = skl_set_pipe_state(ctx, pipe, PPL_PAUSED);
1356 if (ret < 0) {
1357 dev_dbg(ctx->dev, "Failed to stop pipe\n");
1358 return ret;
1359 }
1360
1361 pipe->state = SKL_PIPE_PAUSED;
1362
1363 return 0;
1364}
1365
1366
1367
1368
1369
1370int skl_reset_pipe(struct skl_sst *ctx, struct skl_pipe *pipe)
1371{
1372 int ret;
1373
1374
1375 if (pipe->state < SKL_PIPE_PAUSED)
1376 return 0;
1377
1378 ret = skl_set_pipe_state(ctx, pipe, PPL_RESET);
1379 if (ret < 0) {
1380 dev_dbg(ctx->dev, "Failed to reset pipe ret=%d\n", ret);
1381 return ret;
1382 }
1383
1384 pipe->state = SKL_PIPE_RESET;
1385
1386 return 0;
1387}
1388
1389
1390int skl_set_module_params(struct skl_sst *ctx, u32 *params, int size,
1391 u32 param_id, struct skl_module_cfg *mcfg)
1392{
1393 struct skl_ipc_large_config_msg msg;
1394
1395 msg.module_id = mcfg->id.module_id;
1396 msg.instance_id = mcfg->id.pvt_id;
1397 msg.param_data_size = size;
1398 msg.large_param_id = param_id;
1399
1400 return skl_ipc_set_large_config(&ctx->ipc, &msg, params);
1401}
1402
1403int skl_get_module_params(struct skl_sst *ctx, u32 *params, int size,
1404 u32 param_id, struct skl_module_cfg *mcfg)
1405{
1406 struct skl_ipc_large_config_msg msg;
1407
1408 msg.module_id = mcfg->id.module_id;
1409 msg.instance_id = mcfg->id.pvt_id;
1410 msg.param_data_size = size;
1411 msg.large_param_id = param_id;
1412
1413 return skl_ipc_get_large_config(&ctx->ipc, &msg, params);
1414}
1415