1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20#include <linux/slab.h>
21#include <linux/pci.h>
22#include <sound/core.h>
23#include <sound/pcm.h>
24#include "skl-sst-dsp.h"
25#include "skl-sst-ipc.h"
26#include "skl.h"
27#include "../common/sst-dsp.h"
28#include "../common/sst-dsp-priv.h"
29#include "skl-topology.h"
30#include "skl-tplg-interface.h"
31
32static int skl_alloc_dma_buf(struct device *dev,
33 struct snd_dma_buffer *dmab, size_t size)
34{
35 struct hdac_ext_bus *ebus = dev_get_drvdata(dev);
36 struct hdac_bus *bus = ebus_to_hbus(ebus);
37
38 if (!bus)
39 return -ENODEV;
40
41 return bus->io_ops->dma_alloc_pages(bus, SNDRV_DMA_TYPE_DEV, size, dmab);
42}
43
44static int skl_free_dma_buf(struct device *dev, struct snd_dma_buffer *dmab)
45{
46 struct hdac_ext_bus *ebus = dev_get_drvdata(dev);
47 struct hdac_bus *bus = ebus_to_hbus(ebus);
48
49 if (!bus)
50 return -ENODEV;
51
52 bus->io_ops->dma_free_pages(bus, dmab);
53
54 return 0;
55}
56
57#define NOTIFICATION_PARAM_ID 3
58#define NOTIFICATION_MASK 0xf
59
60
61static void skl_dsp_enable_notification(struct skl_sst *ctx, bool enable)
62{
63 struct notification_mask mask;
64 struct skl_ipc_large_config_msg msg = {0};
65
66 mask.notify = NOTIFICATION_MASK;
67 mask.enable = enable;
68
69 msg.large_param_id = NOTIFICATION_PARAM_ID;
70 msg.param_data_size = sizeof(mask);
71
72 skl_ipc_set_large_config(&ctx->ipc, &msg, (u32 *)&mask);
73}
74
75static struct skl_dsp_loader_ops skl_get_loader_ops(void)
76{
77 struct skl_dsp_loader_ops loader_ops;
78
79 memset(&loader_ops, 0, sizeof(struct skl_dsp_loader_ops));
80
81 loader_ops.alloc_dma_buf = skl_alloc_dma_buf;
82 loader_ops.free_dma_buf = skl_free_dma_buf;
83
84 return loader_ops;
85};
86
87static const struct skl_dsp_ops dsp_ops[] = {
88 {
89 .id = 0x9d70,
90 .loader_ops = skl_get_loader_ops,
91 .init = skl_sst_dsp_init,
92 .cleanup = skl_sst_dsp_cleanup
93 },
94};
95
96static int skl_get_dsp_ops(int pci_id)
97{
98 int i;
99
100 for (i = 0; i < ARRAY_SIZE(dsp_ops); i++) {
101 if (dsp_ops[i].id == pci_id)
102 return i;
103 }
104
105 return -EINVAL;
106}
107
108int skl_init_dsp(struct skl *skl)
109{
110 void __iomem *mmio_base;
111 struct hdac_ext_bus *ebus = &skl->ebus;
112 struct hdac_bus *bus = ebus_to_hbus(ebus);
113 struct skl_dsp_loader_ops loader_ops;
114 int irq = bus->irq;
115 int ret, index;
116
117
118 snd_hdac_ext_bus_ppcap_enable(&skl->ebus, true);
119 snd_hdac_ext_bus_ppcap_int_enable(&skl->ebus, true);
120
121
122 mmio_base = pci_ioremap_bar(skl->pci, 4);
123 if (mmio_base == NULL) {
124 dev_err(bus->dev, "ioremap error\n");
125 return -ENXIO;
126 }
127
128 index = skl_get_dsp_ops(skl->pci->device);
129 if (index < 0)
130 return -EINVAL;
131
132 loader_ops = dsp_ops[index].loader_ops();
133 ret = dsp_ops[index].init(bus->dev, mmio_base, irq,
134 skl->fw_name, loader_ops, &skl->skl_sst);
135
136 if (ret < 0)
137 return ret;
138
139 skl_dsp_enable_notification(skl->skl_sst, false);
140 dev_dbg(bus->dev, "dsp registration status=%d\n", ret);
141
142 return ret;
143}
144
145int skl_free_dsp(struct skl *skl)
146{
147 struct hdac_ext_bus *ebus = &skl->ebus;
148 struct hdac_bus *bus = ebus_to_hbus(ebus);
149 struct skl_sst *ctx = skl->skl_sst;
150 int index;
151
152
153 snd_hdac_ext_bus_ppcap_int_enable(&skl->ebus, false);
154
155 index = skl_get_dsp_ops(skl->pci->device);
156 if (index < 0)
157 return -EIO;
158
159 dsp_ops[index].cleanup(bus->dev, ctx);
160
161 if (ctx->dsp->addr.lpe)
162 iounmap(ctx->dsp->addr.lpe);
163
164 return 0;
165}
166
167int skl_suspend_dsp(struct skl *skl)
168{
169 struct skl_sst *ctx = skl->skl_sst;
170 int ret;
171
172
173 if (!skl->ebus.ppcap)
174 return 0;
175
176 ret = skl_dsp_sleep(ctx->dsp);
177 if (ret < 0)
178 return ret;
179
180
181 snd_hdac_ext_bus_ppcap_int_enable(&skl->ebus, false);
182 snd_hdac_ext_bus_ppcap_enable(&skl->ebus, false);
183
184 return 0;
185}
186
187int skl_resume_dsp(struct skl *skl)
188{
189 struct skl_sst *ctx = skl->skl_sst;
190 int ret;
191
192
193 if (!skl->ebus.ppcap)
194 return 0;
195
196
197 snd_hdac_ext_bus_ppcap_enable(&skl->ebus, true);
198 snd_hdac_ext_bus_ppcap_int_enable(&skl->ebus, true);
199
200 ret = skl_dsp_wake(ctx->dsp);
201 if (ret < 0)
202 return ret;
203
204 skl_dsp_enable_notification(skl->skl_sst, false);
205 return ret;
206}
207
208enum skl_bitdepth skl_get_bit_depth(int params)
209{
210 switch (params) {
211 case 8:
212 return SKL_DEPTH_8BIT;
213
214 case 16:
215 return SKL_DEPTH_16BIT;
216
217 case 24:
218 return SKL_DEPTH_24BIT;
219
220 case 32:
221 return SKL_DEPTH_32BIT;
222
223 default:
224 return SKL_DEPTH_INVALID;
225
226 }
227}
228
229
230
231
232
233
234
235static void skl_set_base_module_format(struct skl_sst *ctx,
236 struct skl_module_cfg *mconfig,
237 struct skl_base_cfg *base_cfg)
238{
239 struct skl_module_fmt *format = &mconfig->in_fmt[0];
240
241 base_cfg->audio_fmt.number_of_channels = (u8)format->channels;
242
243 base_cfg->audio_fmt.s_freq = format->s_freq;
244 base_cfg->audio_fmt.bit_depth = format->bit_depth;
245 base_cfg->audio_fmt.valid_bit_depth = format->valid_bit_depth;
246 base_cfg->audio_fmt.ch_cfg = format->ch_cfg;
247
248 dev_dbg(ctx->dev, "bit_depth=%x valid_bd=%x ch_config=%x\n",
249 format->bit_depth, format->valid_bit_depth,
250 format->ch_cfg);
251
252 base_cfg->audio_fmt.channel_map = format->ch_map;
253
254 base_cfg->audio_fmt.interleaving = format->interleaving_style;
255
256 base_cfg->cps = mconfig->mcps;
257 base_cfg->ibs = mconfig->ibs;
258 base_cfg->obs = mconfig->obs;
259 base_cfg->is_pages = mconfig->mem_pages;
260}
261
262
263
264
265
266static void skl_copy_copier_caps(struct skl_module_cfg *mconfig,
267 struct skl_cpr_cfg *cpr_mconfig)
268{
269 if (mconfig->formats_config.caps_size == 0)
270 return;
271
272 memcpy(cpr_mconfig->gtw_cfg.config_data,
273 mconfig->formats_config.caps,
274 mconfig->formats_config.caps_size);
275
276 cpr_mconfig->gtw_cfg.config_length =
277 (mconfig->formats_config.caps_size) / 4;
278}
279
280#define SKL_NON_GATEWAY_CPR_NODE_ID 0xFFFFFFFF
281
282
283
284
285static u32 skl_get_node_id(struct skl_sst *ctx,
286 struct skl_module_cfg *mconfig)
287{
288 union skl_connector_node_id node_id = {0};
289 union skl_ssp_dma_node ssp_node = {0};
290 struct skl_pipe_params *params = mconfig->pipe->p_params;
291
292 switch (mconfig->dev_type) {
293 case SKL_DEVICE_BT:
294 node_id.node.dma_type =
295 (SKL_CONN_SOURCE == mconfig->hw_conn_type) ?
296 SKL_DMA_I2S_LINK_OUTPUT_CLASS :
297 SKL_DMA_I2S_LINK_INPUT_CLASS;
298 node_id.node.vindex = params->host_dma_id +
299 (mconfig->vbus_id << 3);
300 break;
301
302 case SKL_DEVICE_I2S:
303 node_id.node.dma_type =
304 (SKL_CONN_SOURCE == mconfig->hw_conn_type) ?
305 SKL_DMA_I2S_LINK_OUTPUT_CLASS :
306 SKL_DMA_I2S_LINK_INPUT_CLASS;
307 ssp_node.dma_node.time_slot_index = mconfig->time_slot;
308 ssp_node.dma_node.i2s_instance = mconfig->vbus_id;
309 node_id.node.vindex = ssp_node.val;
310 break;
311
312 case SKL_DEVICE_DMIC:
313 node_id.node.dma_type = SKL_DMA_DMIC_LINK_INPUT_CLASS;
314 node_id.node.vindex = mconfig->vbus_id +
315 (mconfig->time_slot);
316 break;
317
318 case SKL_DEVICE_HDALINK:
319 node_id.node.dma_type =
320 (SKL_CONN_SOURCE == mconfig->hw_conn_type) ?
321 SKL_DMA_HDA_LINK_OUTPUT_CLASS :
322 SKL_DMA_HDA_LINK_INPUT_CLASS;
323 node_id.node.vindex = params->link_dma_id;
324 break;
325
326 case SKL_DEVICE_HDAHOST:
327 node_id.node.dma_type =
328 (SKL_CONN_SOURCE == mconfig->hw_conn_type) ?
329 SKL_DMA_HDA_HOST_OUTPUT_CLASS :
330 SKL_DMA_HDA_HOST_INPUT_CLASS;
331 node_id.node.vindex = params->host_dma_id;
332 break;
333
334 default:
335 node_id.val = 0xFFFFFFFF;
336 break;
337 }
338
339 return node_id.val;
340}
341
342static void skl_setup_cpr_gateway_cfg(struct skl_sst *ctx,
343 struct skl_module_cfg *mconfig,
344 struct skl_cpr_cfg *cpr_mconfig)
345{
346 cpr_mconfig->gtw_cfg.node_id = skl_get_node_id(ctx, mconfig);
347
348 if (cpr_mconfig->gtw_cfg.node_id == SKL_NON_GATEWAY_CPR_NODE_ID) {
349 cpr_mconfig->cpr_feature_mask = 0;
350 return;
351 }
352
353 if (SKL_CONN_SOURCE == mconfig->hw_conn_type)
354 cpr_mconfig->gtw_cfg.dma_buffer_size = 2 * mconfig->obs;
355 else
356 cpr_mconfig->gtw_cfg.dma_buffer_size = 2 * mconfig->ibs;
357
358 cpr_mconfig->cpr_feature_mask = 0;
359 cpr_mconfig->gtw_cfg.config_length = 0;
360
361 skl_copy_copier_caps(mconfig, cpr_mconfig);
362}
363
364#define DMA_CONTROL_ID 5
365
366int skl_dsp_set_dma_control(struct skl_sst *ctx, struct skl_module_cfg *mconfig)
367{
368 struct skl_dma_control *dma_ctrl;
369 struct skl_i2s_config_blob config_blob;
370 struct skl_ipc_large_config_msg msg = {0};
371 int err = 0;
372
373
374
375
376
377
378 if (mconfig->formats_config.caps_size == sizeof(config_blob))
379 return 0;
380
381 msg.large_param_id = DMA_CONTROL_ID;
382 msg.param_data_size = sizeof(struct skl_dma_control) +
383 mconfig->formats_config.caps_size;
384
385 dma_ctrl = kzalloc(msg.param_data_size, GFP_KERNEL);
386 if (dma_ctrl == NULL)
387 return -ENOMEM;
388
389 dma_ctrl->node_id = skl_get_node_id(ctx, mconfig);
390
391
392 dma_ctrl->config_length = sizeof(config_blob) / 4;
393
394 memcpy(dma_ctrl->config_data, mconfig->formats_config.caps,
395 mconfig->formats_config.caps_size);
396
397 err = skl_ipc_set_large_config(&ctx->ipc, &msg, (u32 *)dma_ctrl);
398
399 kfree(dma_ctrl);
400
401 return err;
402}
403
404static void skl_setup_out_format(struct skl_sst *ctx,
405 struct skl_module_cfg *mconfig,
406 struct skl_audio_data_format *out_fmt)
407{
408 struct skl_module_fmt *format = &mconfig->out_fmt[0];
409
410 out_fmt->number_of_channels = (u8)format->channels;
411 out_fmt->s_freq = format->s_freq;
412 out_fmt->bit_depth = format->bit_depth;
413 out_fmt->valid_bit_depth = format->valid_bit_depth;
414 out_fmt->ch_cfg = format->ch_cfg;
415
416 out_fmt->channel_map = format->ch_map;
417 out_fmt->interleaving = format->interleaving_style;
418 out_fmt->sample_type = format->sample_type;
419
420 dev_dbg(ctx->dev, "copier out format chan=%d fre=%d bitdepth=%d\n",
421 out_fmt->number_of_channels, format->s_freq, format->bit_depth);
422}
423
424
425
426
427
428
429static void skl_set_src_format(struct skl_sst *ctx,
430 struct skl_module_cfg *mconfig,
431 struct skl_src_module_cfg *src_mconfig)
432{
433 struct skl_module_fmt *fmt = &mconfig->out_fmt[0];
434
435 skl_set_base_module_format(ctx, mconfig,
436 (struct skl_base_cfg *)src_mconfig);
437
438 src_mconfig->src_cfg = fmt->s_freq;
439}
440
441
442
443
444
445
446static void skl_set_updown_mixer_format(struct skl_sst *ctx,
447 struct skl_module_cfg *mconfig,
448 struct skl_up_down_mixer_cfg *mixer_mconfig)
449{
450 struct skl_module_fmt *fmt = &mconfig->out_fmt[0];
451 int i = 0;
452
453 skl_set_base_module_format(ctx, mconfig,
454 (struct skl_base_cfg *)mixer_mconfig);
455 mixer_mconfig->out_ch_cfg = fmt->ch_cfg;
456
457
458 mixer_mconfig->coeff_sel = 0x0;
459
460
461 for (i = 0; i < UP_DOWN_MIXER_MAX_COEFF; i++)
462 mixer_mconfig->coeff[i] = 0xDEADBEEF;
463}
464
465
466
467
468
469
470
471
472static void skl_set_copier_format(struct skl_sst *ctx,
473 struct skl_module_cfg *mconfig,
474 struct skl_cpr_cfg *cpr_mconfig)
475{
476 struct skl_audio_data_format *out_fmt = &cpr_mconfig->out_fmt;
477 struct skl_base_cfg *base_cfg = (struct skl_base_cfg *)cpr_mconfig;
478
479 skl_set_base_module_format(ctx, mconfig, base_cfg);
480
481 skl_setup_out_format(ctx, mconfig, out_fmt);
482 skl_setup_cpr_gateway_cfg(ctx, mconfig, cpr_mconfig);
483}
484
485
486
487
488
489
490static void skl_set_algo_format(struct skl_sst *ctx,
491 struct skl_module_cfg *mconfig,
492 struct skl_algo_cfg *algo_mcfg)
493{
494 struct skl_base_cfg *base_cfg = (struct skl_base_cfg *)algo_mcfg;
495
496 skl_set_base_module_format(ctx, mconfig, base_cfg);
497
498 if (mconfig->formats_config.caps_size == 0)
499 return;
500
501 memcpy(algo_mcfg->params,
502 mconfig->formats_config.caps,
503 mconfig->formats_config.caps_size);
504
505}
506
507
508
509
510
511
512
513
514static void skl_set_base_outfmt_format(struct skl_sst *ctx,
515 struct skl_module_cfg *mconfig,
516 struct skl_base_outfmt_cfg *base_outfmt_mcfg)
517{
518 struct skl_audio_data_format *out_fmt = &base_outfmt_mcfg->out_fmt;
519 struct skl_base_cfg *base_cfg =
520 (struct skl_base_cfg *)base_outfmt_mcfg;
521
522 skl_set_base_module_format(ctx, mconfig, base_cfg);
523 skl_setup_out_format(ctx, mconfig, out_fmt);
524}
525
526static u16 skl_get_module_param_size(struct skl_sst *ctx,
527 struct skl_module_cfg *mconfig)
528{
529 u16 param_size;
530
531 switch (mconfig->m_type) {
532 case SKL_MODULE_TYPE_COPIER:
533 param_size = sizeof(struct skl_cpr_cfg);
534 param_size += mconfig->formats_config.caps_size;
535 return param_size;
536
537 case SKL_MODULE_TYPE_SRCINT:
538 return sizeof(struct skl_src_module_cfg);
539
540 case SKL_MODULE_TYPE_UPDWMIX:
541 return sizeof(struct skl_up_down_mixer_cfg);
542
543 case SKL_MODULE_TYPE_ALGO:
544 param_size = sizeof(struct skl_base_cfg);
545 param_size += mconfig->formats_config.caps_size;
546 return param_size;
547
548 case SKL_MODULE_TYPE_BASE_OUTFMT:
549 return sizeof(struct skl_base_outfmt_cfg);
550
551 default:
552
553
554
555
556 return sizeof(struct skl_base_cfg);
557 }
558
559 return 0;
560}
561
562
563
564
565
566
567
568
569static int skl_set_module_format(struct skl_sst *ctx,
570 struct skl_module_cfg *module_config,
571 u16 *module_config_size,
572 void **param_data)
573{
574 u16 param_size;
575
576 param_size = skl_get_module_param_size(ctx, module_config);
577
578 *param_data = kzalloc(param_size, GFP_KERNEL);
579 if (NULL == *param_data)
580 return -ENOMEM;
581
582 *module_config_size = param_size;
583
584 switch (module_config->m_type) {
585 case SKL_MODULE_TYPE_COPIER:
586 skl_set_copier_format(ctx, module_config, *param_data);
587 break;
588
589 case SKL_MODULE_TYPE_SRCINT:
590 skl_set_src_format(ctx, module_config, *param_data);
591 break;
592
593 case SKL_MODULE_TYPE_UPDWMIX:
594 skl_set_updown_mixer_format(ctx, module_config, *param_data);
595 break;
596
597 case SKL_MODULE_TYPE_ALGO:
598 skl_set_algo_format(ctx, module_config, *param_data);
599 break;
600
601 case SKL_MODULE_TYPE_BASE_OUTFMT:
602 skl_set_base_outfmt_format(ctx, module_config, *param_data);
603 break;
604
605 default:
606 skl_set_base_module_format(ctx, module_config, *param_data);
607 break;
608
609 }
610
611 dev_dbg(ctx->dev, "Module type=%d config size: %d bytes\n",
612 module_config->id.module_id, param_size);
613 print_hex_dump(KERN_DEBUG, "Module params:", DUMP_PREFIX_OFFSET, 8, 4,
614 *param_data, param_size, false);
615 return 0;
616}
617
618static int skl_get_queue_index(struct skl_module_pin *mpin,
619 struct skl_module_inst_id id, int max)
620{
621 int i;
622
623 for (i = 0; i < max; i++) {
624 if (mpin[i].id.module_id == id.module_id &&
625 mpin[i].id.instance_id == id.instance_id)
626 return i;
627 }
628
629 return -EINVAL;
630}
631
632
633
634
635
636
637static int skl_alloc_queue(struct skl_module_pin *mpin,
638 struct skl_module_cfg *tgt_cfg, int max)
639{
640 int i;
641 struct skl_module_inst_id id = tgt_cfg->id;
642
643
644
645
646
647
648 for (i = 0; i < max; i++) {
649 if (mpin[i].is_dynamic) {
650 if (!mpin[i].in_use &&
651 mpin[i].pin_state == SKL_PIN_UNBIND) {
652
653 mpin[i].in_use = true;
654 mpin[i].id.module_id = id.module_id;
655 mpin[i].id.instance_id = id.instance_id;
656 mpin[i].tgt_mcfg = tgt_cfg;
657 return i;
658 }
659 } else {
660 if (mpin[i].id.module_id == id.module_id &&
661 mpin[i].id.instance_id == id.instance_id &&
662 mpin[i].pin_state == SKL_PIN_UNBIND) {
663
664 mpin[i].tgt_mcfg = tgt_cfg;
665 return i;
666 }
667 }
668 }
669
670 return -EINVAL;
671}
672
673static void skl_free_queue(struct skl_module_pin *mpin, int q_index)
674{
675 if (mpin[q_index].is_dynamic) {
676 mpin[q_index].in_use = false;
677 mpin[q_index].id.module_id = 0;
678 mpin[q_index].id.instance_id = 0;
679 }
680 mpin[q_index].pin_state = SKL_PIN_UNBIND;
681 mpin[q_index].tgt_mcfg = NULL;
682}
683
684
685
686static void skl_clear_module_state(struct skl_module_pin *mpin, int max,
687 struct skl_module_cfg *mcfg)
688{
689 int i;
690 bool found = false;
691
692 for (i = 0; i < max; i++) {
693 if (mpin[i].pin_state == SKL_PIN_UNBIND)
694 continue;
695 found = true;
696 break;
697 }
698
699 if (!found)
700 mcfg->m_state = SKL_MODULE_UNINIT;
701 return;
702}
703
704
705
706
707
708
709
710int skl_init_module(struct skl_sst *ctx,
711 struct skl_module_cfg *mconfig)
712{
713 u16 module_config_size = 0;
714 void *param_data = NULL;
715 int ret;
716 struct skl_ipc_init_instance_msg msg;
717
718 dev_dbg(ctx->dev, "%s: module_id = %d instance=%d\n", __func__,
719 mconfig->id.module_id, mconfig->id.instance_id);
720
721 if (mconfig->pipe->state != SKL_PIPE_CREATED) {
722 dev_err(ctx->dev, "Pipe not created state= %d pipe_id= %d\n",
723 mconfig->pipe->state, mconfig->pipe->ppl_id);
724 return -EIO;
725 }
726
727 ret = skl_set_module_format(ctx, mconfig,
728 &module_config_size, ¶m_data);
729 if (ret < 0) {
730 dev_err(ctx->dev, "Failed to set module format ret=%d\n", ret);
731 return ret;
732 }
733
734 msg.module_id = mconfig->id.module_id;
735 msg.instance_id = mconfig->id.instance_id;
736 msg.ppl_instance_id = mconfig->pipe->ppl_id;
737 msg.param_data_size = module_config_size;
738 msg.core_id = mconfig->core_id;
739
740 ret = skl_ipc_init_instance(&ctx->ipc, &msg, param_data);
741 if (ret < 0) {
742 dev_err(ctx->dev, "Failed to init instance ret=%d\n", ret);
743 kfree(param_data);
744 return ret;
745 }
746 mconfig->m_state = SKL_MODULE_INIT_DONE;
747
748 return ret;
749}
750
751static void skl_dump_bind_info(struct skl_sst *ctx, struct skl_module_cfg
752 *src_module, struct skl_module_cfg *dst_module)
753{
754 dev_dbg(ctx->dev, "%s: src module_id = %d src_instance=%d\n",
755 __func__, src_module->id.module_id, src_module->id.instance_id);
756 dev_dbg(ctx->dev, "%s: dst_module=%d dst_instacne=%d\n", __func__,
757 dst_module->id.module_id, dst_module->id.instance_id);
758
759 dev_dbg(ctx->dev, "src_module state = %d dst module state = %d\n",
760 src_module->m_state, dst_module->m_state);
761}
762
763
764
765
766
767
768int skl_unbind_modules(struct skl_sst *ctx,
769 struct skl_module_cfg *src_mcfg,
770 struct skl_module_cfg *dst_mcfg)
771{
772 int ret;
773 struct skl_ipc_bind_unbind_msg msg;
774 struct skl_module_inst_id src_id = src_mcfg->id;
775 struct skl_module_inst_id dst_id = dst_mcfg->id;
776 int in_max = dst_mcfg->max_in_queue;
777 int out_max = src_mcfg->max_out_queue;
778 int src_index, dst_index, src_pin_state, dst_pin_state;
779
780 skl_dump_bind_info(ctx, src_mcfg, dst_mcfg);
781
782
783 src_index = skl_get_queue_index(src_mcfg->m_out_pin, dst_id, out_max);
784 if (src_index < 0)
785 return 0;
786
787 msg.src_queue = src_index;
788
789
790 dst_index = skl_get_queue_index(dst_mcfg->m_in_pin, src_id, in_max);
791 if (dst_index < 0)
792 return 0;
793
794 msg.dst_queue = dst_index;
795
796 src_pin_state = src_mcfg->m_out_pin[src_index].pin_state;
797 dst_pin_state = dst_mcfg->m_in_pin[dst_index].pin_state;
798
799 if (src_pin_state != SKL_PIN_BIND_DONE ||
800 dst_pin_state != SKL_PIN_BIND_DONE)
801 return 0;
802
803 msg.module_id = src_mcfg->id.module_id;
804 msg.instance_id = src_mcfg->id.instance_id;
805 msg.dst_module_id = dst_mcfg->id.module_id;
806 msg.dst_instance_id = dst_mcfg->id.instance_id;
807 msg.bind = false;
808
809 ret = skl_ipc_bind_unbind(&ctx->ipc, &msg);
810 if (!ret) {
811
812 skl_free_queue(src_mcfg->m_out_pin, src_index);
813 skl_free_queue(dst_mcfg->m_in_pin, dst_index);
814
815
816
817
818
819 skl_clear_module_state(src_mcfg->m_out_pin, out_max, src_mcfg);
820 }
821
822 return ret;
823}
824
825
826
827
828
829
830
831
832int skl_bind_modules(struct skl_sst *ctx,
833 struct skl_module_cfg *src_mcfg,
834 struct skl_module_cfg *dst_mcfg)
835{
836 int ret;
837 struct skl_ipc_bind_unbind_msg msg;
838 int in_max = dst_mcfg->max_in_queue;
839 int out_max = src_mcfg->max_out_queue;
840 int src_index, dst_index;
841
842 skl_dump_bind_info(ctx, src_mcfg, dst_mcfg);
843
844 if (src_mcfg->m_state < SKL_MODULE_INIT_DONE ||
845 dst_mcfg->m_state < SKL_MODULE_INIT_DONE)
846 return 0;
847
848 src_index = skl_alloc_queue(src_mcfg->m_out_pin, dst_mcfg, out_max);
849 if (src_index < 0)
850 return -EINVAL;
851
852 msg.src_queue = src_index;
853 dst_index = skl_alloc_queue(dst_mcfg->m_in_pin, src_mcfg, in_max);
854 if (dst_index < 0) {
855 skl_free_queue(src_mcfg->m_out_pin, src_index);
856 return -EINVAL;
857 }
858
859 msg.dst_queue = dst_index;
860
861 dev_dbg(ctx->dev, "src queue = %d dst queue =%d\n",
862 msg.src_queue, msg.dst_queue);
863
864 msg.module_id = src_mcfg->id.module_id;
865 msg.instance_id = src_mcfg->id.instance_id;
866 msg.dst_module_id = dst_mcfg->id.module_id;
867 msg.dst_instance_id = dst_mcfg->id.instance_id;
868 msg.bind = true;
869
870 ret = skl_ipc_bind_unbind(&ctx->ipc, &msg);
871
872 if (!ret) {
873 src_mcfg->m_state = SKL_MODULE_BIND_DONE;
874 src_mcfg->m_out_pin[src_index].pin_state = SKL_PIN_BIND_DONE;
875 dst_mcfg->m_in_pin[dst_index].pin_state = SKL_PIN_BIND_DONE;
876 } else {
877
878 skl_free_queue(src_mcfg->m_out_pin, src_index);
879 skl_free_queue(dst_mcfg->m_in_pin, dst_index);
880 }
881
882 return ret;
883}
884
885static int skl_set_pipe_state(struct skl_sst *ctx, struct skl_pipe *pipe,
886 enum skl_ipc_pipeline_state state)
887{
888 dev_dbg(ctx->dev, "%s: pipe_satate = %d\n", __func__, state);
889
890 return skl_ipc_set_pipeline_state(&ctx->ipc, pipe->ppl_id, state);
891}
892
893
894
895
896
897
898
899int skl_create_pipeline(struct skl_sst *ctx, struct skl_pipe *pipe)
900{
901 int ret;
902
903 dev_dbg(ctx->dev, "%s: pipe_id = %d\n", __func__, pipe->ppl_id);
904
905 ret = skl_ipc_create_pipeline(&ctx->ipc, pipe->memory_pages,
906 pipe->pipe_priority, pipe->ppl_id);
907 if (ret < 0) {
908 dev_err(ctx->dev, "Failed to create pipeline\n");
909 return ret;
910 }
911
912 pipe->state = SKL_PIPE_CREATED;
913
914 return 0;
915}
916
917
918
919
920
921
922
923int skl_delete_pipe(struct skl_sst *ctx, struct skl_pipe *pipe)
924{
925 int ret;
926
927 dev_dbg(ctx->dev, "%s: pipe = %d\n", __func__, pipe->ppl_id);
928
929
930 if (pipe->state > SKL_PIPE_STARTED) {
931 ret = skl_set_pipe_state(ctx, pipe, PPL_PAUSED);
932 if (ret < 0) {
933 dev_err(ctx->dev, "Failed to stop pipeline\n");
934 return ret;
935 }
936
937 pipe->state = SKL_PIPE_PAUSED;
938 } else {
939
940 if (pipe->state < SKL_PIPE_CREATED)
941 return 0;
942
943 ret = skl_ipc_delete_pipeline(&ctx->ipc, pipe->ppl_id);
944 if (ret < 0)
945 dev_err(ctx->dev, "Failed to delete pipeline\n");
946
947 pipe->state = SKL_PIPE_INVALID;
948 }
949
950 return ret;
951}
952
953
954
955
956
957
958int skl_run_pipe(struct skl_sst *ctx, struct skl_pipe *pipe)
959{
960 int ret;
961
962 dev_dbg(ctx->dev, "%s: pipe = %d\n", __func__, pipe->ppl_id);
963
964
965 if (pipe->state < SKL_PIPE_CREATED)
966 return 0;
967
968
969 ret = skl_set_pipe_state(ctx, pipe, PPL_PAUSED);
970 if (ret < 0) {
971 dev_err(ctx->dev, "Failed to pause pipe\n");
972 return ret;
973 }
974
975 pipe->state = SKL_PIPE_PAUSED;
976
977 ret = skl_set_pipe_state(ctx, pipe, PPL_RUNNING);
978 if (ret < 0) {
979 dev_err(ctx->dev, "Failed to start pipe\n");
980 return ret;
981 }
982
983 pipe->state = SKL_PIPE_STARTED;
984
985 return 0;
986}
987
988
989
990
991
992int skl_stop_pipe(struct skl_sst *ctx, struct skl_pipe *pipe)
993{
994 int ret;
995
996 dev_dbg(ctx->dev, "In %s pipe=%d\n", __func__, pipe->ppl_id);
997
998
999 if (pipe->state < SKL_PIPE_PAUSED)
1000 return 0;
1001
1002 ret = skl_set_pipe_state(ctx, pipe, PPL_PAUSED);
1003 if (ret < 0) {
1004 dev_dbg(ctx->dev, "Failed to stop pipe\n");
1005 return ret;
1006 }
1007
1008 pipe->state = SKL_PIPE_CREATED;
1009
1010 return 0;
1011}
1012
1013
1014int skl_set_module_params(struct skl_sst *ctx, u32 *params, int size,
1015 u32 param_id, struct skl_module_cfg *mcfg)
1016{
1017 struct skl_ipc_large_config_msg msg;
1018
1019 msg.module_id = mcfg->id.module_id;
1020 msg.instance_id = mcfg->id.instance_id;
1021 msg.param_data_size = size;
1022 msg.large_param_id = param_id;
1023
1024 return skl_ipc_set_large_config(&ctx->ipc, &msg, params);
1025}
1026
1027int skl_get_module_params(struct skl_sst *ctx, u32 *params, int size,
1028 u32 param_id, struct skl_module_cfg *mcfg)
1029{
1030 struct skl_ipc_large_config_msg msg;
1031
1032 msg.module_id = mcfg->id.module_id;
1033 msg.instance_id = mcfg->id.instance_id;
1034 msg.param_data_size = size;
1035 msg.large_param_id = param_id;
1036
1037 return skl_ipc_get_large_config(&ctx->ipc, &msg, params);
1038}
1039