1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20#include <linux/slab.h>
21#include <linux/pci.h>
22#include <sound/core.h>
23#include <sound/pcm.h>
24#include "skl-sst-dsp.h"
25#include "skl-sst-ipc.h"
26#include "skl.h"
27#include "../common/sst-dsp.h"
28#include "../common/sst-dsp-priv.h"
29#include "skl-topology.h"
30#include "skl-tplg-interface.h"
31
32static int skl_alloc_dma_buf(struct device *dev,
33 struct snd_dma_buffer *dmab, size_t size)
34{
35 struct hdac_ext_bus *ebus = dev_get_drvdata(dev);
36 struct hdac_bus *bus = ebus_to_hbus(ebus);
37
38 if (!bus)
39 return -ENODEV;
40
41 return bus->io_ops->dma_alloc_pages(bus, SNDRV_DMA_TYPE_DEV, size, dmab);
42}
43
44static int skl_free_dma_buf(struct device *dev, struct snd_dma_buffer *dmab)
45{
46 struct hdac_ext_bus *ebus = dev_get_drvdata(dev);
47 struct hdac_bus *bus = ebus_to_hbus(ebus);
48
49 if (!bus)
50 return -ENODEV;
51
52 bus->io_ops->dma_free_pages(bus, dmab);
53
54 return 0;
55}
56
57#define NOTIFICATION_PARAM_ID 3
58#define NOTIFICATION_MASK 0xf
59
60
61static void skl_dsp_enable_notification(struct skl_sst *ctx, bool enable)
62{
63 struct notification_mask mask;
64 struct skl_ipc_large_config_msg msg = {0};
65
66 mask.notify = NOTIFICATION_MASK;
67 mask.enable = enable;
68
69 msg.large_param_id = NOTIFICATION_PARAM_ID;
70 msg.param_data_size = sizeof(mask);
71
72 skl_ipc_set_large_config(&ctx->ipc, &msg, (u32 *)&mask);
73}
74
75static int skl_dsp_setup_spib(struct device *dev, unsigned int size,
76 int stream_tag, int enable)
77{
78 struct hdac_ext_bus *ebus = dev_get_drvdata(dev);
79 struct hdac_bus *bus = ebus_to_hbus(ebus);
80 struct hdac_stream *stream = snd_hdac_get_stream(bus,
81 SNDRV_PCM_STREAM_PLAYBACK, stream_tag);
82 struct hdac_ext_stream *estream;
83
84 if (!stream)
85 return -EINVAL;
86
87 estream = stream_to_hdac_ext_stream(stream);
88
89 snd_hdac_ext_stream_spbcap_enable(ebus, enable, stream->index);
90
91
92 snd_hdac_ext_stream_set_spib(ebus, estream, size);
93
94 return 0;
95}
96
97static int skl_dsp_prepare(struct device *dev, unsigned int format,
98 unsigned int size, struct snd_dma_buffer *dmab)
99{
100 struct hdac_ext_bus *ebus = dev_get_drvdata(dev);
101 struct hdac_bus *bus = ebus_to_hbus(ebus);
102 struct hdac_ext_stream *estream;
103 struct hdac_stream *stream;
104 struct snd_pcm_substream substream;
105 int ret;
106
107 if (!bus)
108 return -ENODEV;
109
110 memset(&substream, 0, sizeof(substream));
111 substream.stream = SNDRV_PCM_STREAM_PLAYBACK;
112
113 estream = snd_hdac_ext_stream_assign(ebus, &substream,
114 HDAC_EXT_STREAM_TYPE_HOST);
115 if (!estream)
116 return -ENODEV;
117
118 stream = hdac_stream(estream);
119
120
121 ret = snd_hdac_dsp_prepare(stream, format, size, dmab);
122 if (ret < 0)
123 return ret;
124
125 skl_dsp_setup_spib(dev, size, stream->stream_tag, true);
126
127 return stream->stream_tag;
128}
129
130static int skl_dsp_trigger(struct device *dev, bool start, int stream_tag)
131{
132 struct hdac_ext_bus *ebus = dev_get_drvdata(dev);
133 struct hdac_stream *stream;
134 struct hdac_bus *bus = ebus_to_hbus(ebus);
135
136 if (!bus)
137 return -ENODEV;
138
139 stream = snd_hdac_get_stream(bus,
140 SNDRV_PCM_STREAM_PLAYBACK, stream_tag);
141 if (!stream)
142 return -EINVAL;
143
144 snd_hdac_dsp_trigger(stream, start);
145
146 return 0;
147}
148
149static int skl_dsp_cleanup(struct device *dev,
150 struct snd_dma_buffer *dmab, int stream_tag)
151{
152 struct hdac_ext_bus *ebus = dev_get_drvdata(dev);
153 struct hdac_stream *stream;
154 struct hdac_ext_stream *estream;
155 struct hdac_bus *bus = ebus_to_hbus(ebus);
156
157 if (!bus)
158 return -ENODEV;
159
160 stream = snd_hdac_get_stream(bus,
161 SNDRV_PCM_STREAM_PLAYBACK, stream_tag);
162 if (!stream)
163 return -EINVAL;
164
165 estream = stream_to_hdac_ext_stream(stream);
166 skl_dsp_setup_spib(dev, 0, stream_tag, false);
167 snd_hdac_ext_stream_release(estream, HDAC_EXT_STREAM_TYPE_HOST);
168
169 snd_hdac_dsp_cleanup(stream, dmab);
170
171 return 0;
172}
173
174static struct skl_dsp_loader_ops skl_get_loader_ops(void)
175{
176 struct skl_dsp_loader_ops loader_ops;
177
178 memset(&loader_ops, 0, sizeof(struct skl_dsp_loader_ops));
179
180 loader_ops.alloc_dma_buf = skl_alloc_dma_buf;
181 loader_ops.free_dma_buf = skl_free_dma_buf;
182
183 return loader_ops;
184};
185
186static struct skl_dsp_loader_ops bxt_get_loader_ops(void)
187{
188 struct skl_dsp_loader_ops loader_ops;
189
190 memset(&loader_ops, 0, sizeof(loader_ops));
191
192 loader_ops.alloc_dma_buf = skl_alloc_dma_buf;
193 loader_ops.free_dma_buf = skl_free_dma_buf;
194 loader_ops.prepare = skl_dsp_prepare;
195 loader_ops.trigger = skl_dsp_trigger;
196 loader_ops.cleanup = skl_dsp_cleanup;
197
198 return loader_ops;
199};
200
201static const struct skl_dsp_ops dsp_ops[] = {
202 {
203 .id = 0x9d70,
204 .loader_ops = skl_get_loader_ops,
205 .init = skl_sst_dsp_init,
206 .init_fw = skl_sst_init_fw,
207 .cleanup = skl_sst_dsp_cleanup
208 },
209 {
210 .id = 0x9d71,
211 .loader_ops = skl_get_loader_ops,
212 .init = skl_sst_dsp_init,
213 .init_fw = skl_sst_init_fw,
214 .cleanup = skl_sst_dsp_cleanup
215 },
216 {
217 .id = 0x5a98,
218 .loader_ops = bxt_get_loader_ops,
219 .init = bxt_sst_dsp_init,
220 .init_fw = bxt_sst_init_fw,
221 .cleanup = bxt_sst_dsp_cleanup
222 },
223};
224
225const struct skl_dsp_ops *skl_get_dsp_ops(int pci_id)
226{
227 int i;
228
229 for (i = 0; i < ARRAY_SIZE(dsp_ops); i++) {
230 if (dsp_ops[i].id == pci_id)
231 return &dsp_ops[i];
232 }
233
234 return NULL;
235}
236
237int skl_init_dsp(struct skl *skl)
238{
239 void __iomem *mmio_base;
240 struct hdac_ext_bus *ebus = &skl->ebus;
241 struct hdac_bus *bus = ebus_to_hbus(ebus);
242 struct skl_dsp_loader_ops loader_ops;
243 int irq = bus->irq;
244 const struct skl_dsp_ops *ops;
245 int ret;
246
247
248 snd_hdac_ext_bus_ppcap_enable(&skl->ebus, true);
249 snd_hdac_ext_bus_ppcap_int_enable(&skl->ebus, true);
250
251
252 mmio_base = pci_ioremap_bar(skl->pci, 4);
253 if (mmio_base == NULL) {
254 dev_err(bus->dev, "ioremap error\n");
255 return -ENXIO;
256 }
257
258 ops = skl_get_dsp_ops(skl->pci->device);
259 if (!ops)
260 return -EIO;
261
262 loader_ops = ops->loader_ops();
263 ret = ops->init(bus->dev, mmio_base, irq,
264 skl->fw_name, loader_ops,
265 &skl->skl_sst);
266
267 if (ret < 0)
268 return ret;
269
270 dev_dbg(bus->dev, "dsp registration status=%d\n", ret);
271
272 return ret;
273}
274
275int skl_free_dsp(struct skl *skl)
276{
277 struct hdac_ext_bus *ebus = &skl->ebus;
278 struct hdac_bus *bus = ebus_to_hbus(ebus);
279 struct skl_sst *ctx = skl->skl_sst;
280 const struct skl_dsp_ops *ops;
281
282
283 snd_hdac_ext_bus_ppcap_int_enable(&skl->ebus, false);
284
285 ops = skl_get_dsp_ops(skl->pci->device);
286 if (!ops)
287 return -EIO;
288
289 ops->cleanup(bus->dev, ctx);
290
291 if (ctx->dsp->addr.lpe)
292 iounmap(ctx->dsp->addr.lpe);
293
294 return 0;
295}
296
297int skl_suspend_dsp(struct skl *skl)
298{
299 struct skl_sst *ctx = skl->skl_sst;
300 int ret;
301
302
303 if (!skl->ebus.bus.ppcap)
304 return 0;
305
306 ret = skl_dsp_sleep(ctx->dsp);
307 if (ret < 0)
308 return ret;
309
310
311 snd_hdac_ext_bus_ppcap_int_enable(&skl->ebus, false);
312 snd_hdac_ext_bus_ppcap_enable(&skl->ebus, false);
313
314 return 0;
315}
316
317int skl_resume_dsp(struct skl *skl)
318{
319 struct skl_sst *ctx = skl->skl_sst;
320 int ret;
321
322
323 if (!skl->ebus.bus.ppcap)
324 return 0;
325
326
327 snd_hdac_ext_bus_ppcap_enable(&skl->ebus, true);
328 snd_hdac_ext_bus_ppcap_int_enable(&skl->ebus, true);
329
330
331 if (skl->skl_sst->is_first_boot == true)
332 return 0;
333
334 ret = skl_dsp_wake(ctx->dsp);
335 if (ret < 0)
336 return ret;
337
338 skl_dsp_enable_notification(skl->skl_sst, false);
339 return ret;
340}
341
342enum skl_bitdepth skl_get_bit_depth(int params)
343{
344 switch (params) {
345 case 8:
346 return SKL_DEPTH_8BIT;
347
348 case 16:
349 return SKL_DEPTH_16BIT;
350
351 case 24:
352 return SKL_DEPTH_24BIT;
353
354 case 32:
355 return SKL_DEPTH_32BIT;
356
357 default:
358 return SKL_DEPTH_INVALID;
359
360 }
361}
362
363
364
365
366
367
368
369static void skl_set_base_module_format(struct skl_sst *ctx,
370 struct skl_module_cfg *mconfig,
371 struct skl_base_cfg *base_cfg)
372{
373 struct skl_module_fmt *format = &mconfig->in_fmt[0];
374
375 base_cfg->audio_fmt.number_of_channels = (u8)format->channels;
376
377 base_cfg->audio_fmt.s_freq = format->s_freq;
378 base_cfg->audio_fmt.bit_depth = format->bit_depth;
379 base_cfg->audio_fmt.valid_bit_depth = format->valid_bit_depth;
380 base_cfg->audio_fmt.ch_cfg = format->ch_cfg;
381
382 dev_dbg(ctx->dev, "bit_depth=%x valid_bd=%x ch_config=%x\n",
383 format->bit_depth, format->valid_bit_depth,
384 format->ch_cfg);
385
386 base_cfg->audio_fmt.channel_map = format->ch_map;
387
388 base_cfg->audio_fmt.interleaving = format->interleaving_style;
389
390 base_cfg->cps = mconfig->mcps;
391 base_cfg->ibs = mconfig->ibs;
392 base_cfg->obs = mconfig->obs;
393 base_cfg->is_pages = mconfig->mem_pages;
394}
395
396
397
398
399
400static void skl_copy_copier_caps(struct skl_module_cfg *mconfig,
401 struct skl_cpr_cfg *cpr_mconfig)
402{
403 if (mconfig->formats_config.caps_size == 0)
404 return;
405
406 memcpy(cpr_mconfig->gtw_cfg.config_data,
407 mconfig->formats_config.caps,
408 mconfig->formats_config.caps_size);
409
410 cpr_mconfig->gtw_cfg.config_length =
411 (mconfig->formats_config.caps_size) / 4;
412}
413
414#define SKL_NON_GATEWAY_CPR_NODE_ID 0xFFFFFFFF
415
416
417
418
419static u32 skl_get_node_id(struct skl_sst *ctx,
420 struct skl_module_cfg *mconfig)
421{
422 union skl_connector_node_id node_id = {0};
423 union skl_ssp_dma_node ssp_node = {0};
424 struct skl_pipe_params *params = mconfig->pipe->p_params;
425
426 switch (mconfig->dev_type) {
427 case SKL_DEVICE_BT:
428 node_id.node.dma_type =
429 (SKL_CONN_SOURCE == mconfig->hw_conn_type) ?
430 SKL_DMA_I2S_LINK_OUTPUT_CLASS :
431 SKL_DMA_I2S_LINK_INPUT_CLASS;
432 node_id.node.vindex = params->host_dma_id +
433 (mconfig->vbus_id << 3);
434 break;
435
436 case SKL_DEVICE_I2S:
437 node_id.node.dma_type =
438 (SKL_CONN_SOURCE == mconfig->hw_conn_type) ?
439 SKL_DMA_I2S_LINK_OUTPUT_CLASS :
440 SKL_DMA_I2S_LINK_INPUT_CLASS;
441 ssp_node.dma_node.time_slot_index = mconfig->time_slot;
442 ssp_node.dma_node.i2s_instance = mconfig->vbus_id;
443 node_id.node.vindex = ssp_node.val;
444 break;
445
446 case SKL_DEVICE_DMIC:
447 node_id.node.dma_type = SKL_DMA_DMIC_LINK_INPUT_CLASS;
448 node_id.node.vindex = mconfig->vbus_id +
449 (mconfig->time_slot);
450 break;
451
452 case SKL_DEVICE_HDALINK:
453 node_id.node.dma_type =
454 (SKL_CONN_SOURCE == mconfig->hw_conn_type) ?
455 SKL_DMA_HDA_LINK_OUTPUT_CLASS :
456 SKL_DMA_HDA_LINK_INPUT_CLASS;
457 node_id.node.vindex = params->link_dma_id;
458 break;
459
460 case SKL_DEVICE_HDAHOST:
461 node_id.node.dma_type =
462 (SKL_CONN_SOURCE == mconfig->hw_conn_type) ?
463 SKL_DMA_HDA_HOST_OUTPUT_CLASS :
464 SKL_DMA_HDA_HOST_INPUT_CLASS;
465 node_id.node.vindex = params->host_dma_id;
466 break;
467
468 default:
469 node_id.val = 0xFFFFFFFF;
470 break;
471 }
472
473 return node_id.val;
474}
475
476static void skl_setup_cpr_gateway_cfg(struct skl_sst *ctx,
477 struct skl_module_cfg *mconfig,
478 struct skl_cpr_cfg *cpr_mconfig)
479{
480 cpr_mconfig->gtw_cfg.node_id = skl_get_node_id(ctx, mconfig);
481
482 if (cpr_mconfig->gtw_cfg.node_id == SKL_NON_GATEWAY_CPR_NODE_ID) {
483 cpr_mconfig->cpr_feature_mask = 0;
484 return;
485 }
486
487 if (SKL_CONN_SOURCE == mconfig->hw_conn_type)
488 cpr_mconfig->gtw_cfg.dma_buffer_size = 2 * mconfig->obs;
489 else
490 cpr_mconfig->gtw_cfg.dma_buffer_size = 2 * mconfig->ibs;
491
492 cpr_mconfig->cpr_feature_mask = 0;
493 cpr_mconfig->gtw_cfg.config_length = 0;
494
495 skl_copy_copier_caps(mconfig, cpr_mconfig);
496}
497
498#define DMA_CONTROL_ID 5
499
500int skl_dsp_set_dma_control(struct skl_sst *ctx, struct skl_module_cfg *mconfig)
501{
502 struct skl_dma_control *dma_ctrl;
503 struct skl_i2s_config_blob config_blob;
504 struct skl_ipc_large_config_msg msg = {0};
505 int err = 0;
506
507
508
509
510
511
512 if (mconfig->formats_config.caps_size == sizeof(config_blob))
513 return 0;
514
515 msg.large_param_id = DMA_CONTROL_ID;
516 msg.param_data_size = sizeof(struct skl_dma_control) +
517 mconfig->formats_config.caps_size;
518
519 dma_ctrl = kzalloc(msg.param_data_size, GFP_KERNEL);
520 if (dma_ctrl == NULL)
521 return -ENOMEM;
522
523 dma_ctrl->node_id = skl_get_node_id(ctx, mconfig);
524
525
526 dma_ctrl->config_length = sizeof(config_blob) / 4;
527
528 memcpy(dma_ctrl->config_data, mconfig->formats_config.caps,
529 mconfig->formats_config.caps_size);
530
531 err = skl_ipc_set_large_config(&ctx->ipc, &msg, (u32 *)dma_ctrl);
532
533 kfree(dma_ctrl);
534
535 return err;
536}
537
538static void skl_setup_out_format(struct skl_sst *ctx,
539 struct skl_module_cfg *mconfig,
540 struct skl_audio_data_format *out_fmt)
541{
542 struct skl_module_fmt *format = &mconfig->out_fmt[0];
543
544 out_fmt->number_of_channels = (u8)format->channels;
545 out_fmt->s_freq = format->s_freq;
546 out_fmt->bit_depth = format->bit_depth;
547 out_fmt->valid_bit_depth = format->valid_bit_depth;
548 out_fmt->ch_cfg = format->ch_cfg;
549
550 out_fmt->channel_map = format->ch_map;
551 out_fmt->interleaving = format->interleaving_style;
552 out_fmt->sample_type = format->sample_type;
553
554 dev_dbg(ctx->dev, "copier out format chan=%d fre=%d bitdepth=%d\n",
555 out_fmt->number_of_channels, format->s_freq, format->bit_depth);
556}
557
558
559
560
561
562
563static void skl_set_src_format(struct skl_sst *ctx,
564 struct skl_module_cfg *mconfig,
565 struct skl_src_module_cfg *src_mconfig)
566{
567 struct skl_module_fmt *fmt = &mconfig->out_fmt[0];
568
569 skl_set_base_module_format(ctx, mconfig,
570 (struct skl_base_cfg *)src_mconfig);
571
572 src_mconfig->src_cfg = fmt->s_freq;
573}
574
575
576
577
578
579
580static void skl_set_updown_mixer_format(struct skl_sst *ctx,
581 struct skl_module_cfg *mconfig,
582 struct skl_up_down_mixer_cfg *mixer_mconfig)
583{
584 struct skl_module_fmt *fmt = &mconfig->out_fmt[0];
585 int i = 0;
586
587 skl_set_base_module_format(ctx, mconfig,
588 (struct skl_base_cfg *)mixer_mconfig);
589 mixer_mconfig->out_ch_cfg = fmt->ch_cfg;
590
591
592 mixer_mconfig->coeff_sel = 0x0;
593
594
595 for (i = 0; i < UP_DOWN_MIXER_MAX_COEFF; i++)
596 mixer_mconfig->coeff[i] = 0xDEADBEEF;
597}
598
599
600
601
602
603
604
605
606static void skl_set_copier_format(struct skl_sst *ctx,
607 struct skl_module_cfg *mconfig,
608 struct skl_cpr_cfg *cpr_mconfig)
609{
610 struct skl_audio_data_format *out_fmt = &cpr_mconfig->out_fmt;
611 struct skl_base_cfg *base_cfg = (struct skl_base_cfg *)cpr_mconfig;
612
613 skl_set_base_module_format(ctx, mconfig, base_cfg);
614
615 skl_setup_out_format(ctx, mconfig, out_fmt);
616 skl_setup_cpr_gateway_cfg(ctx, mconfig, cpr_mconfig);
617}
618
619
620
621
622
623
624static void skl_set_algo_format(struct skl_sst *ctx,
625 struct skl_module_cfg *mconfig,
626 struct skl_algo_cfg *algo_mcfg)
627{
628 struct skl_base_cfg *base_cfg = (struct skl_base_cfg *)algo_mcfg;
629
630 skl_set_base_module_format(ctx, mconfig, base_cfg);
631
632 if (mconfig->formats_config.caps_size == 0)
633 return;
634
635 memcpy(algo_mcfg->params,
636 mconfig->formats_config.caps,
637 mconfig->formats_config.caps_size);
638
639}
640
641
642
643
644
645
646
647
648static void skl_set_base_outfmt_format(struct skl_sst *ctx,
649 struct skl_module_cfg *mconfig,
650 struct skl_base_outfmt_cfg *base_outfmt_mcfg)
651{
652 struct skl_audio_data_format *out_fmt = &base_outfmt_mcfg->out_fmt;
653 struct skl_base_cfg *base_cfg =
654 (struct skl_base_cfg *)base_outfmt_mcfg;
655
656 skl_set_base_module_format(ctx, mconfig, base_cfg);
657 skl_setup_out_format(ctx, mconfig, out_fmt);
658}
659
660static u16 skl_get_module_param_size(struct skl_sst *ctx,
661 struct skl_module_cfg *mconfig)
662{
663 u16 param_size;
664
665 switch (mconfig->m_type) {
666 case SKL_MODULE_TYPE_COPIER:
667 param_size = sizeof(struct skl_cpr_cfg);
668 param_size += mconfig->formats_config.caps_size;
669 return param_size;
670
671 case SKL_MODULE_TYPE_SRCINT:
672 return sizeof(struct skl_src_module_cfg);
673
674 case SKL_MODULE_TYPE_UPDWMIX:
675 return sizeof(struct skl_up_down_mixer_cfg);
676
677 case SKL_MODULE_TYPE_ALGO:
678 param_size = sizeof(struct skl_base_cfg);
679 param_size += mconfig->formats_config.caps_size;
680 return param_size;
681
682 case SKL_MODULE_TYPE_BASE_OUTFMT:
683 case SKL_MODULE_TYPE_KPB:
684 return sizeof(struct skl_base_outfmt_cfg);
685
686 default:
687
688
689
690
691 return sizeof(struct skl_base_cfg);
692 }
693
694 return 0;
695}
696
697
698
699
700
701
702
703
704static int skl_set_module_format(struct skl_sst *ctx,
705 struct skl_module_cfg *module_config,
706 u16 *module_config_size,
707 void **param_data)
708{
709 u16 param_size;
710
711 param_size = skl_get_module_param_size(ctx, module_config);
712
713 *param_data = kzalloc(param_size, GFP_KERNEL);
714 if (NULL == *param_data)
715 return -ENOMEM;
716
717 *module_config_size = param_size;
718
719 switch (module_config->m_type) {
720 case SKL_MODULE_TYPE_COPIER:
721 skl_set_copier_format(ctx, module_config, *param_data);
722 break;
723
724 case SKL_MODULE_TYPE_SRCINT:
725 skl_set_src_format(ctx, module_config, *param_data);
726 break;
727
728 case SKL_MODULE_TYPE_UPDWMIX:
729 skl_set_updown_mixer_format(ctx, module_config, *param_data);
730 break;
731
732 case SKL_MODULE_TYPE_ALGO:
733 skl_set_algo_format(ctx, module_config, *param_data);
734 break;
735
736 case SKL_MODULE_TYPE_BASE_OUTFMT:
737 case SKL_MODULE_TYPE_KPB:
738 skl_set_base_outfmt_format(ctx, module_config, *param_data);
739 break;
740
741 default:
742 skl_set_base_module_format(ctx, module_config, *param_data);
743 break;
744
745 }
746
747 dev_dbg(ctx->dev, "Module type=%d config size: %d bytes\n",
748 module_config->id.module_id, param_size);
749 print_hex_dump_debug("Module params:", DUMP_PREFIX_OFFSET, 8, 4,
750 *param_data, param_size, false);
751 return 0;
752}
753
754static int skl_get_queue_index(struct skl_module_pin *mpin,
755 struct skl_module_inst_id id, int max)
756{
757 int i;
758
759 for (i = 0; i < max; i++) {
760 if (mpin[i].id.module_id == id.module_id &&
761 mpin[i].id.instance_id == id.instance_id)
762 return i;
763 }
764
765 return -EINVAL;
766}
767
768
769
770
771
772
773static int skl_alloc_queue(struct skl_module_pin *mpin,
774 struct skl_module_cfg *tgt_cfg, int max)
775{
776 int i;
777 struct skl_module_inst_id id = tgt_cfg->id;
778
779
780
781
782
783
784 for (i = 0; i < max; i++) {
785 if (mpin[i].is_dynamic) {
786 if (!mpin[i].in_use &&
787 mpin[i].pin_state == SKL_PIN_UNBIND) {
788
789 mpin[i].in_use = true;
790 mpin[i].id.module_id = id.module_id;
791 mpin[i].id.instance_id = id.instance_id;
792 mpin[i].id.pvt_id = id.pvt_id;
793 mpin[i].tgt_mcfg = tgt_cfg;
794 return i;
795 }
796 } else {
797 if (mpin[i].id.module_id == id.module_id &&
798 mpin[i].id.instance_id == id.instance_id &&
799 mpin[i].pin_state == SKL_PIN_UNBIND) {
800
801 mpin[i].tgt_mcfg = tgt_cfg;
802 return i;
803 }
804 }
805 }
806
807 return -EINVAL;
808}
809
810static void skl_free_queue(struct skl_module_pin *mpin, int q_index)
811{
812 if (mpin[q_index].is_dynamic) {
813 mpin[q_index].in_use = false;
814 mpin[q_index].id.module_id = 0;
815 mpin[q_index].id.instance_id = 0;
816 mpin[q_index].id.pvt_id = 0;
817 }
818 mpin[q_index].pin_state = SKL_PIN_UNBIND;
819 mpin[q_index].tgt_mcfg = NULL;
820}
821
822
823
824static void skl_clear_module_state(struct skl_module_pin *mpin, int max,
825 struct skl_module_cfg *mcfg)
826{
827 int i;
828 bool found = false;
829
830 for (i = 0; i < max; i++) {
831 if (mpin[i].pin_state == SKL_PIN_UNBIND)
832 continue;
833 found = true;
834 break;
835 }
836
837 if (!found)
838 mcfg->m_state = SKL_MODULE_UNINIT;
839 return;
840}
841
842
843
844
845
846
847
848int skl_init_module(struct skl_sst *ctx,
849 struct skl_module_cfg *mconfig)
850{
851 u16 module_config_size = 0;
852 void *param_data = NULL;
853 int ret;
854 struct skl_ipc_init_instance_msg msg;
855
856 dev_dbg(ctx->dev, "%s: module_id = %d instance=%d\n", __func__,
857 mconfig->id.module_id, mconfig->id.pvt_id);
858
859 if (mconfig->pipe->state != SKL_PIPE_CREATED) {
860 dev_err(ctx->dev, "Pipe not created state= %d pipe_id= %d\n",
861 mconfig->pipe->state, mconfig->pipe->ppl_id);
862 return -EIO;
863 }
864
865 ret = skl_set_module_format(ctx, mconfig,
866 &module_config_size, ¶m_data);
867 if (ret < 0) {
868 dev_err(ctx->dev, "Failed to set module format ret=%d\n", ret);
869 return ret;
870 }
871
872 msg.module_id = mconfig->id.module_id;
873 msg.instance_id = mconfig->id.pvt_id;
874 msg.ppl_instance_id = mconfig->pipe->ppl_id;
875 msg.param_data_size = module_config_size;
876 msg.core_id = mconfig->core_id;
877 msg.domain = mconfig->domain;
878
879 ret = skl_ipc_init_instance(&ctx->ipc, &msg, param_data);
880 if (ret < 0) {
881 dev_err(ctx->dev, "Failed to init instance ret=%d\n", ret);
882 kfree(param_data);
883 return ret;
884 }
885 mconfig->m_state = SKL_MODULE_INIT_DONE;
886 kfree(param_data);
887 return ret;
888}
889
890static void skl_dump_bind_info(struct skl_sst *ctx, struct skl_module_cfg
891 *src_module, struct skl_module_cfg *dst_module)
892{
893 dev_dbg(ctx->dev, "%s: src module_id = %d src_instance=%d\n",
894 __func__, src_module->id.module_id, src_module->id.pvt_id);
895 dev_dbg(ctx->dev, "%s: dst_module=%d dst_instacne=%d\n", __func__,
896 dst_module->id.module_id, dst_module->id.pvt_id);
897
898 dev_dbg(ctx->dev, "src_module state = %d dst module state = %d\n",
899 src_module->m_state, dst_module->m_state);
900}
901
902
903
904
905
906
907int skl_unbind_modules(struct skl_sst *ctx,
908 struct skl_module_cfg *src_mcfg,
909 struct skl_module_cfg *dst_mcfg)
910{
911 int ret;
912 struct skl_ipc_bind_unbind_msg msg;
913 struct skl_module_inst_id src_id = src_mcfg->id;
914 struct skl_module_inst_id dst_id = dst_mcfg->id;
915 int in_max = dst_mcfg->max_in_queue;
916 int out_max = src_mcfg->max_out_queue;
917 int src_index, dst_index, src_pin_state, dst_pin_state;
918
919 skl_dump_bind_info(ctx, src_mcfg, dst_mcfg);
920
921
922 src_index = skl_get_queue_index(src_mcfg->m_out_pin, dst_id, out_max);
923 if (src_index < 0)
924 return 0;
925
926 msg.src_queue = src_index;
927
928
929 dst_index = skl_get_queue_index(dst_mcfg->m_in_pin, src_id, in_max);
930 if (dst_index < 0)
931 return 0;
932
933 msg.dst_queue = dst_index;
934
935 src_pin_state = src_mcfg->m_out_pin[src_index].pin_state;
936 dst_pin_state = dst_mcfg->m_in_pin[dst_index].pin_state;
937
938 if (src_pin_state != SKL_PIN_BIND_DONE ||
939 dst_pin_state != SKL_PIN_BIND_DONE)
940 return 0;
941
942 msg.module_id = src_mcfg->id.module_id;
943 msg.instance_id = src_mcfg->id.pvt_id;
944 msg.dst_module_id = dst_mcfg->id.module_id;
945 msg.dst_instance_id = dst_mcfg->id.pvt_id;
946 msg.bind = false;
947
948 ret = skl_ipc_bind_unbind(&ctx->ipc, &msg);
949 if (!ret) {
950
951 skl_free_queue(src_mcfg->m_out_pin, src_index);
952 skl_free_queue(dst_mcfg->m_in_pin, dst_index);
953
954
955
956
957
958 skl_clear_module_state(src_mcfg->m_out_pin, out_max, src_mcfg);
959 }
960
961 return ret;
962}
963
964
965
966
967
968
969
970
971int skl_bind_modules(struct skl_sst *ctx,
972 struct skl_module_cfg *src_mcfg,
973 struct skl_module_cfg *dst_mcfg)
974{
975 int ret;
976 struct skl_ipc_bind_unbind_msg msg;
977 int in_max = dst_mcfg->max_in_queue;
978 int out_max = src_mcfg->max_out_queue;
979 int src_index, dst_index;
980
981 skl_dump_bind_info(ctx, src_mcfg, dst_mcfg);
982
983 if (src_mcfg->m_state < SKL_MODULE_INIT_DONE ||
984 dst_mcfg->m_state < SKL_MODULE_INIT_DONE)
985 return 0;
986
987 src_index = skl_alloc_queue(src_mcfg->m_out_pin, dst_mcfg, out_max);
988 if (src_index < 0)
989 return -EINVAL;
990
991 msg.src_queue = src_index;
992 dst_index = skl_alloc_queue(dst_mcfg->m_in_pin, src_mcfg, in_max);
993 if (dst_index < 0) {
994 skl_free_queue(src_mcfg->m_out_pin, src_index);
995 return -EINVAL;
996 }
997
998 msg.dst_queue = dst_index;
999
1000 dev_dbg(ctx->dev, "src queue = %d dst queue =%d\n",
1001 msg.src_queue, msg.dst_queue);
1002
1003 msg.module_id = src_mcfg->id.module_id;
1004 msg.instance_id = src_mcfg->id.pvt_id;
1005 msg.dst_module_id = dst_mcfg->id.module_id;
1006 msg.dst_instance_id = dst_mcfg->id.pvt_id;
1007 msg.bind = true;
1008
1009 ret = skl_ipc_bind_unbind(&ctx->ipc, &msg);
1010
1011 if (!ret) {
1012 src_mcfg->m_state = SKL_MODULE_BIND_DONE;
1013 src_mcfg->m_out_pin[src_index].pin_state = SKL_PIN_BIND_DONE;
1014 dst_mcfg->m_in_pin[dst_index].pin_state = SKL_PIN_BIND_DONE;
1015 } else {
1016
1017 skl_free_queue(src_mcfg->m_out_pin, src_index);
1018 skl_free_queue(dst_mcfg->m_in_pin, dst_index);
1019 }
1020
1021 return ret;
1022}
1023
1024static int skl_set_pipe_state(struct skl_sst *ctx, struct skl_pipe *pipe,
1025 enum skl_ipc_pipeline_state state)
1026{
1027 dev_dbg(ctx->dev, "%s: pipe_satate = %d\n", __func__, state);
1028
1029 return skl_ipc_set_pipeline_state(&ctx->ipc, pipe->ppl_id, state);
1030}
1031
1032
1033
1034
1035
1036
1037
1038int skl_create_pipeline(struct skl_sst *ctx, struct skl_pipe *pipe)
1039{
1040 int ret;
1041
1042 dev_dbg(ctx->dev, "%s: pipe_id = %d\n", __func__, pipe->ppl_id);
1043
1044 ret = skl_ipc_create_pipeline(&ctx->ipc, pipe->memory_pages,
1045 pipe->pipe_priority, pipe->ppl_id);
1046 if (ret < 0) {
1047 dev_err(ctx->dev, "Failed to create pipeline\n");
1048 return ret;
1049 }
1050
1051 pipe->state = SKL_PIPE_CREATED;
1052
1053 return 0;
1054}
1055
1056
1057
1058
1059
1060
1061
1062int skl_delete_pipe(struct skl_sst *ctx, struct skl_pipe *pipe)
1063{
1064 int ret;
1065
1066 dev_dbg(ctx->dev, "%s: pipe = %d\n", __func__, pipe->ppl_id);
1067
1068
1069 if (pipe->state > SKL_PIPE_STARTED) {
1070 ret = skl_set_pipe_state(ctx, pipe, PPL_PAUSED);
1071 if (ret < 0) {
1072 dev_err(ctx->dev, "Failed to stop pipeline\n");
1073 return ret;
1074 }
1075
1076 pipe->state = SKL_PIPE_PAUSED;
1077 }
1078
1079
1080 if (pipe->state < SKL_PIPE_CREATED)
1081 return 0;
1082
1083 ret = skl_ipc_delete_pipeline(&ctx->ipc, pipe->ppl_id);
1084 if (ret < 0) {
1085 dev_err(ctx->dev, "Failed to delete pipeline\n");
1086 return ret;
1087 }
1088
1089 pipe->state = SKL_PIPE_INVALID;
1090
1091 return ret;
1092}
1093
1094
1095
1096
1097
1098
1099int skl_run_pipe(struct skl_sst *ctx, struct skl_pipe *pipe)
1100{
1101 int ret;
1102
1103 dev_dbg(ctx->dev, "%s: pipe = %d\n", __func__, pipe->ppl_id);
1104
1105
1106 if (pipe->state < SKL_PIPE_CREATED)
1107 return 0;
1108
1109
1110 ret = skl_set_pipe_state(ctx, pipe, PPL_PAUSED);
1111 if (ret < 0) {
1112 dev_err(ctx->dev, "Failed to pause pipe\n");
1113 return ret;
1114 }
1115
1116 pipe->state = SKL_PIPE_PAUSED;
1117
1118 ret = skl_set_pipe_state(ctx, pipe, PPL_RUNNING);
1119 if (ret < 0) {
1120 dev_err(ctx->dev, "Failed to start pipe\n");
1121 return ret;
1122 }
1123
1124 pipe->state = SKL_PIPE_STARTED;
1125
1126 return 0;
1127}
1128
1129
1130
1131
1132
1133int skl_stop_pipe(struct skl_sst *ctx, struct skl_pipe *pipe)
1134{
1135 int ret;
1136
1137 dev_dbg(ctx->dev, "In %s pipe=%d\n", __func__, pipe->ppl_id);
1138
1139
1140 if (pipe->state < SKL_PIPE_PAUSED)
1141 return 0;
1142
1143 ret = skl_set_pipe_state(ctx, pipe, PPL_PAUSED);
1144 if (ret < 0) {
1145 dev_dbg(ctx->dev, "Failed to stop pipe\n");
1146 return ret;
1147 }
1148
1149 pipe->state = SKL_PIPE_PAUSED;
1150
1151 return 0;
1152}
1153
1154
1155
1156
1157
1158int skl_reset_pipe(struct skl_sst *ctx, struct skl_pipe *pipe)
1159{
1160 int ret;
1161
1162
1163 if (pipe->state < SKL_PIPE_PAUSED)
1164 return 0;
1165
1166 ret = skl_set_pipe_state(ctx, pipe, PPL_RESET);
1167 if (ret < 0) {
1168 dev_dbg(ctx->dev, "Failed to reset pipe ret=%d\n", ret);
1169 return ret;
1170 }
1171
1172 pipe->state = SKL_PIPE_RESET;
1173
1174 return 0;
1175}
1176
1177
1178int skl_set_module_params(struct skl_sst *ctx, u32 *params, int size,
1179 u32 param_id, struct skl_module_cfg *mcfg)
1180{
1181 struct skl_ipc_large_config_msg msg;
1182
1183 msg.module_id = mcfg->id.module_id;
1184 msg.instance_id = mcfg->id.pvt_id;
1185 msg.param_data_size = size;
1186 msg.large_param_id = param_id;
1187
1188 return skl_ipc_set_large_config(&ctx->ipc, &msg, params);
1189}
1190
1191int skl_get_module_params(struct skl_sst *ctx, u32 *params, int size,
1192 u32 param_id, struct skl_module_cfg *mcfg)
1193{
1194 struct skl_ipc_large_config_msg msg;
1195
1196 msg.module_id = mcfg->id.module_id;
1197 msg.instance_id = mcfg->id.pvt_id;
1198 msg.param_data_size = size;
1199 msg.large_param_id = param_id;
1200
1201 return skl_ipc_get_large_config(&ctx->ipc, &msg, params);
1202}
1203