1 /*
2 * skl-message.c - HDA DSP interface for FW registration, Pipe and Module
3 * configurations
4 *
5 * Copyright (C) 2015 Intel Corp
6 * Author:Rafal Redzimski <rafal.f.redzimski@intel.com>
7 * Jeeja KP <jeeja.kp@intel.com>
8 * ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
9 *
10 * This program is free software; you can redistribute it and/or modify
11 * it under the terms of the GNU General Public License as version 2, as
12 * published by the Free Software Foundation.
13 *
14 * This program is distributed in the hope that it will be useful, but
15 * WITHOUT ANY WARRANTY; without even the implied warranty of
16 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
17 * General Public License for more details.
18 */
19
20 #include <linux/slab.h>
21 #include <linux/pci.h>
22 #include <sound/core.h>
23 #include <sound/pcm.h>
24 #include "skl-sst-dsp.h"
25 #include "skl-sst-ipc.h"
26 #include "skl.h"
27 #include "../common/sst-dsp.h"
28 #include "../common/sst-dsp-priv.h"
29 #include "skl-topology.h"
30 #include "skl-tplg-interface.h"
31
skl_alloc_dma_buf(struct device * dev,struct snd_dma_buffer * dmab,size_t size)32 static int skl_alloc_dma_buf(struct device *dev,
33 struct snd_dma_buffer *dmab, size_t size)
34 {
35 struct hdac_ext_bus *ebus = dev_get_drvdata(dev);
36 struct hdac_bus *bus = ebus_to_hbus(ebus);
37
38 if (!bus)
39 return -ENODEV;
40
41 return bus->io_ops->dma_alloc_pages(bus, SNDRV_DMA_TYPE_DEV, size, dmab);
42 }
43
skl_free_dma_buf(struct device * dev,struct snd_dma_buffer * dmab)44 static int skl_free_dma_buf(struct device *dev, struct snd_dma_buffer *dmab)
45 {
46 struct hdac_ext_bus *ebus = dev_get_drvdata(dev);
47 struct hdac_bus *bus = ebus_to_hbus(ebus);
48
49 if (!bus)
50 return -ENODEV;
51
52 bus->io_ops->dma_free_pages(bus, dmab);
53
54 return 0;
55 }
56
57 #define NOTIFICATION_PARAM_ID 3
58 #define NOTIFICATION_MASK 0xf
59
60 /* disable notfication for underruns/overruns from firmware module */
skl_dsp_enable_notification(struct skl_sst * ctx,bool enable)61 static void skl_dsp_enable_notification(struct skl_sst *ctx, bool enable)
62 {
63 struct notification_mask mask;
64 struct skl_ipc_large_config_msg msg = {0};
65
66 mask.notify = NOTIFICATION_MASK;
67 mask.enable = enable;
68
69 msg.large_param_id = NOTIFICATION_PARAM_ID;
70 msg.param_data_size = sizeof(mask);
71
72 skl_ipc_set_large_config(&ctx->ipc, &msg, (u32 *)&mask);
73 }
74
skl_dsp_setup_spib(struct device * dev,unsigned int size,int stream_tag,int enable)75 static int skl_dsp_setup_spib(struct device *dev, unsigned int size,
76 int stream_tag, int enable)
77 {
78 struct hdac_ext_bus *ebus = dev_get_drvdata(dev);
79 struct hdac_bus *bus = ebus_to_hbus(ebus);
80 struct hdac_stream *stream = snd_hdac_get_stream(bus,
81 SNDRV_PCM_STREAM_PLAYBACK, stream_tag);
82 struct hdac_ext_stream *estream;
83
84 if (!stream)
85 return -EINVAL;
86
87 estream = stream_to_hdac_ext_stream(stream);
88 /* enable/disable SPIB for this hdac stream */
89 snd_hdac_ext_stream_spbcap_enable(ebus, enable, stream->index);
90
91 /* set the spib value */
92 snd_hdac_ext_stream_set_spib(ebus, estream, size);
93
94 return 0;
95 }
96
skl_dsp_prepare(struct device * dev,unsigned int format,unsigned int size,struct snd_dma_buffer * dmab)97 static int skl_dsp_prepare(struct device *dev, unsigned int format,
98 unsigned int size, struct snd_dma_buffer *dmab)
99 {
100 struct hdac_ext_bus *ebus = dev_get_drvdata(dev);
101 struct hdac_bus *bus = ebus_to_hbus(ebus);
102 struct hdac_ext_stream *estream;
103 struct hdac_stream *stream;
104 struct snd_pcm_substream substream;
105 int ret;
106
107 if (!bus)
108 return -ENODEV;
109
110 memset(&substream, 0, sizeof(substream));
111 substream.stream = SNDRV_PCM_STREAM_PLAYBACK;
112
113 estream = snd_hdac_ext_stream_assign(ebus, &substream,
114 HDAC_EXT_STREAM_TYPE_HOST);
115 if (!estream)
116 return -ENODEV;
117
118 stream = hdac_stream(estream);
119
120 /* assign decouple host dma channel */
121 ret = snd_hdac_dsp_prepare(stream, format, size, dmab);
122 if (ret < 0)
123 return ret;
124
125 skl_dsp_setup_spib(dev, size, stream->stream_tag, true);
126
127 return stream->stream_tag;
128 }
129
skl_dsp_trigger(struct device * dev,bool start,int stream_tag)130 static int skl_dsp_trigger(struct device *dev, bool start, int stream_tag)
131 {
132 struct hdac_ext_bus *ebus = dev_get_drvdata(dev);
133 struct hdac_stream *stream;
134 struct hdac_bus *bus = ebus_to_hbus(ebus);
135
136 if (!bus)
137 return -ENODEV;
138
139 stream = snd_hdac_get_stream(bus,
140 SNDRV_PCM_STREAM_PLAYBACK, stream_tag);
141 if (!stream)
142 return -EINVAL;
143
144 snd_hdac_dsp_trigger(stream, start);
145
146 return 0;
147 }
148
skl_dsp_cleanup(struct device * dev,struct snd_dma_buffer * dmab,int stream_tag)149 static int skl_dsp_cleanup(struct device *dev,
150 struct snd_dma_buffer *dmab, int stream_tag)
151 {
152 struct hdac_ext_bus *ebus = dev_get_drvdata(dev);
153 struct hdac_stream *stream;
154 struct hdac_ext_stream *estream;
155 struct hdac_bus *bus = ebus_to_hbus(ebus);
156
157 if (!bus)
158 return -ENODEV;
159
160 stream = snd_hdac_get_stream(bus,
161 SNDRV_PCM_STREAM_PLAYBACK, stream_tag);
162 if (!stream)
163 return -EINVAL;
164
165 estream = stream_to_hdac_ext_stream(stream);
166 skl_dsp_setup_spib(dev, 0, stream_tag, false);
167 snd_hdac_ext_stream_release(estream, HDAC_EXT_STREAM_TYPE_HOST);
168
169 snd_hdac_dsp_cleanup(stream, dmab);
170
171 return 0;
172 }
173
skl_get_loader_ops(void)174 static struct skl_dsp_loader_ops skl_get_loader_ops(void)
175 {
176 struct skl_dsp_loader_ops loader_ops;
177
178 memset(&loader_ops, 0, sizeof(struct skl_dsp_loader_ops));
179
180 loader_ops.alloc_dma_buf = skl_alloc_dma_buf;
181 loader_ops.free_dma_buf = skl_free_dma_buf;
182
183 return loader_ops;
184 };
185
bxt_get_loader_ops(void)186 static struct skl_dsp_loader_ops bxt_get_loader_ops(void)
187 {
188 struct skl_dsp_loader_ops loader_ops;
189
190 memset(&loader_ops, 0, sizeof(loader_ops));
191
192 loader_ops.alloc_dma_buf = skl_alloc_dma_buf;
193 loader_ops.free_dma_buf = skl_free_dma_buf;
194 loader_ops.prepare = skl_dsp_prepare;
195 loader_ops.trigger = skl_dsp_trigger;
196 loader_ops.cleanup = skl_dsp_cleanup;
197
198 return loader_ops;
199 };
200
201 static const struct skl_dsp_ops dsp_ops[] = {
202 {
203 .id = 0x9d70,
204 .loader_ops = skl_get_loader_ops,
205 .init = skl_sst_dsp_init,
206 .init_fw = skl_sst_init_fw,
207 .cleanup = skl_sst_dsp_cleanup
208 },
209 {
210 .id = 0x9d71,
211 .loader_ops = skl_get_loader_ops,
212 .init = skl_sst_dsp_init,
213 .init_fw = skl_sst_init_fw,
214 .cleanup = skl_sst_dsp_cleanup
215 },
216 {
217 .id = 0x5a98,
218 .loader_ops = bxt_get_loader_ops,
219 .init = bxt_sst_dsp_init,
220 .init_fw = bxt_sst_init_fw,
221 .cleanup = bxt_sst_dsp_cleanup
222 },
223 };
224
skl_get_dsp_ops(int pci_id)225 const struct skl_dsp_ops *skl_get_dsp_ops(int pci_id)
226 {
227 int i;
228
229 for (i = 0; i < ARRAY_SIZE(dsp_ops); i++) {
230 if (dsp_ops[i].id == pci_id)
231 return &dsp_ops[i];
232 }
233
234 return NULL;
235 }
236
skl_init_dsp(struct skl * skl)237 int skl_init_dsp(struct skl *skl)
238 {
239 void __iomem *mmio_base;
240 struct hdac_ext_bus *ebus = &skl->ebus;
241 struct hdac_bus *bus = ebus_to_hbus(ebus);
242 struct skl_dsp_loader_ops loader_ops;
243 int irq = bus->irq;
244 const struct skl_dsp_ops *ops;
245 int ret;
246
247 /* enable ppcap interrupt */
248 snd_hdac_ext_bus_ppcap_enable(&skl->ebus, true);
249 snd_hdac_ext_bus_ppcap_int_enable(&skl->ebus, true);
250
251 /* read the BAR of the ADSP MMIO */
252 mmio_base = pci_ioremap_bar(skl->pci, 4);
253 if (mmio_base == NULL) {
254 dev_err(bus->dev, "ioremap error\n");
255 return -ENXIO;
256 }
257
258 ops = skl_get_dsp_ops(skl->pci->device);
259 if (!ops)
260 return -EIO;
261
262 loader_ops = ops->loader_ops();
263 ret = ops->init(bus->dev, mmio_base, irq,
264 skl->fw_name, loader_ops,
265 &skl->skl_sst);
266
267 if (ret < 0)
268 return ret;
269
270 dev_dbg(bus->dev, "dsp registration status=%d\n", ret);
271
272 return ret;
273 }
274
skl_free_dsp(struct skl * skl)275 int skl_free_dsp(struct skl *skl)
276 {
277 struct hdac_ext_bus *ebus = &skl->ebus;
278 struct hdac_bus *bus = ebus_to_hbus(ebus);
279 struct skl_sst *ctx = skl->skl_sst;
280 const struct skl_dsp_ops *ops;
281
282 /* disable ppcap interrupt */
283 snd_hdac_ext_bus_ppcap_int_enable(&skl->ebus, false);
284
285 ops = skl_get_dsp_ops(skl->pci->device);
286 if (!ops)
287 return -EIO;
288
289 ops->cleanup(bus->dev, ctx);
290
291 if (ctx->dsp->addr.lpe)
292 iounmap(ctx->dsp->addr.lpe);
293
294 return 0;
295 }
296
skl_suspend_dsp(struct skl * skl)297 int skl_suspend_dsp(struct skl *skl)
298 {
299 struct skl_sst *ctx = skl->skl_sst;
300 int ret;
301
302 /* if ppcap is not supported return 0 */
303 if (!skl->ebus.bus.ppcap)
304 return 0;
305
306 ret = skl_dsp_sleep(ctx->dsp);
307 if (ret < 0)
308 return ret;
309
310 /* disable ppcap interrupt */
311 snd_hdac_ext_bus_ppcap_int_enable(&skl->ebus, false);
312 snd_hdac_ext_bus_ppcap_enable(&skl->ebus, false);
313
314 return 0;
315 }
316
skl_resume_dsp(struct skl * skl)317 int skl_resume_dsp(struct skl *skl)
318 {
319 struct skl_sst *ctx = skl->skl_sst;
320 int ret;
321
322 /* if ppcap is not supported return 0 */
323 if (!skl->ebus.bus.ppcap)
324 return 0;
325
326 /* enable ppcap interrupt */
327 snd_hdac_ext_bus_ppcap_enable(&skl->ebus, true);
328 snd_hdac_ext_bus_ppcap_int_enable(&skl->ebus, true);
329
330 /* check if DSP 1st boot is done */
331 if (skl->skl_sst->is_first_boot == true)
332 return 0;
333
334 /* disable dynamic clock gating during fw and lib download */
335 ctx->enable_miscbdcge(ctx->dev, false);
336
337 ret = skl_dsp_wake(ctx->dsp);
338 ctx->enable_miscbdcge(ctx->dev, true);
339 if (ret < 0)
340 return ret;
341
342 skl_dsp_enable_notification(skl->skl_sst, false);
343 return ret;
344 }
345
skl_get_bit_depth(int params)346 enum skl_bitdepth skl_get_bit_depth(int params)
347 {
348 switch (params) {
349 case 8:
350 return SKL_DEPTH_8BIT;
351
352 case 16:
353 return SKL_DEPTH_16BIT;
354
355 case 24:
356 return SKL_DEPTH_24BIT;
357
358 case 32:
359 return SKL_DEPTH_32BIT;
360
361 default:
362 return SKL_DEPTH_INVALID;
363
364 }
365 }
366
367 /*
368 * Each module in DSP expects a base module configuration, which consists of
369 * PCM format information, which we calculate in driver and resource values
370 * which are read from widget information passed through topology binary
371 * This is send when we create a module with INIT_INSTANCE IPC msg
372 */
skl_set_base_module_format(struct skl_sst * ctx,struct skl_module_cfg * mconfig,struct skl_base_cfg * base_cfg)373 static void skl_set_base_module_format(struct skl_sst *ctx,
374 struct skl_module_cfg *mconfig,
375 struct skl_base_cfg *base_cfg)
376 {
377 struct skl_module_fmt *format = &mconfig->in_fmt[0];
378
379 base_cfg->audio_fmt.number_of_channels = (u8)format->channels;
380
381 base_cfg->audio_fmt.s_freq = format->s_freq;
382 base_cfg->audio_fmt.bit_depth = format->bit_depth;
383 base_cfg->audio_fmt.valid_bit_depth = format->valid_bit_depth;
384 base_cfg->audio_fmt.ch_cfg = format->ch_cfg;
385
386 dev_dbg(ctx->dev, "bit_depth=%x valid_bd=%x ch_config=%x\n",
387 format->bit_depth, format->valid_bit_depth,
388 format->ch_cfg);
389
390 base_cfg->audio_fmt.channel_map = format->ch_map;
391
392 base_cfg->audio_fmt.interleaving = format->interleaving_style;
393
394 base_cfg->cps = mconfig->mcps;
395 base_cfg->ibs = mconfig->ibs;
396 base_cfg->obs = mconfig->obs;
397 base_cfg->is_pages = mconfig->mem_pages;
398 }
399
400 /*
401 * Copies copier capabilities into copier module and updates copier module
402 * config size.
403 */
skl_copy_copier_caps(struct skl_module_cfg * mconfig,struct skl_cpr_cfg * cpr_mconfig)404 static void skl_copy_copier_caps(struct skl_module_cfg *mconfig,
405 struct skl_cpr_cfg *cpr_mconfig)
406 {
407 if (mconfig->formats_config.caps_size == 0)
408 return;
409
410 memcpy(cpr_mconfig->gtw_cfg.config_data,
411 mconfig->formats_config.caps,
412 mconfig->formats_config.caps_size);
413
414 cpr_mconfig->gtw_cfg.config_length =
415 (mconfig->formats_config.caps_size) / 4;
416 }
417
418 #define SKL_NON_GATEWAY_CPR_NODE_ID 0xFFFFFFFF
419 /*
420 * Calculate the gatewat settings required for copier module, type of
421 * gateway and index of gateway to use
422 */
skl_get_node_id(struct skl_sst * ctx,struct skl_module_cfg * mconfig)423 static u32 skl_get_node_id(struct skl_sst *ctx,
424 struct skl_module_cfg *mconfig)
425 {
426 union skl_connector_node_id node_id = {0};
427 union skl_ssp_dma_node ssp_node = {0};
428 struct skl_pipe_params *params = mconfig->pipe->p_params;
429
430 switch (mconfig->dev_type) {
431 case SKL_DEVICE_BT:
432 node_id.node.dma_type =
433 (SKL_CONN_SOURCE == mconfig->hw_conn_type) ?
434 SKL_DMA_I2S_LINK_OUTPUT_CLASS :
435 SKL_DMA_I2S_LINK_INPUT_CLASS;
436 node_id.node.vindex = params->host_dma_id +
437 (mconfig->vbus_id << 3);
438 break;
439
440 case SKL_DEVICE_I2S:
441 node_id.node.dma_type =
442 (SKL_CONN_SOURCE == mconfig->hw_conn_type) ?
443 SKL_DMA_I2S_LINK_OUTPUT_CLASS :
444 SKL_DMA_I2S_LINK_INPUT_CLASS;
445 ssp_node.dma_node.time_slot_index = mconfig->time_slot;
446 ssp_node.dma_node.i2s_instance = mconfig->vbus_id;
447 node_id.node.vindex = ssp_node.val;
448 break;
449
450 case SKL_DEVICE_DMIC:
451 node_id.node.dma_type = SKL_DMA_DMIC_LINK_INPUT_CLASS;
452 node_id.node.vindex = mconfig->vbus_id +
453 (mconfig->time_slot);
454 break;
455
456 case SKL_DEVICE_HDALINK:
457 node_id.node.dma_type =
458 (SKL_CONN_SOURCE == mconfig->hw_conn_type) ?
459 SKL_DMA_HDA_LINK_OUTPUT_CLASS :
460 SKL_DMA_HDA_LINK_INPUT_CLASS;
461 node_id.node.vindex = params->link_dma_id;
462 break;
463
464 case SKL_DEVICE_HDAHOST:
465 node_id.node.dma_type =
466 (SKL_CONN_SOURCE == mconfig->hw_conn_type) ?
467 SKL_DMA_HDA_HOST_OUTPUT_CLASS :
468 SKL_DMA_HDA_HOST_INPUT_CLASS;
469 node_id.node.vindex = params->host_dma_id;
470 break;
471
472 default:
473 node_id.val = 0xFFFFFFFF;
474 break;
475 }
476
477 return node_id.val;
478 }
479
skl_setup_cpr_gateway_cfg(struct skl_sst * ctx,struct skl_module_cfg * mconfig,struct skl_cpr_cfg * cpr_mconfig)480 static void skl_setup_cpr_gateway_cfg(struct skl_sst *ctx,
481 struct skl_module_cfg *mconfig,
482 struct skl_cpr_cfg *cpr_mconfig)
483 {
484 cpr_mconfig->gtw_cfg.node_id = skl_get_node_id(ctx, mconfig);
485
486 if (cpr_mconfig->gtw_cfg.node_id == SKL_NON_GATEWAY_CPR_NODE_ID) {
487 cpr_mconfig->cpr_feature_mask = 0;
488 return;
489 }
490
491 if (SKL_CONN_SOURCE == mconfig->hw_conn_type)
492 cpr_mconfig->gtw_cfg.dma_buffer_size = 2 * mconfig->obs;
493 else
494 cpr_mconfig->gtw_cfg.dma_buffer_size = 2 * mconfig->ibs;
495
496 cpr_mconfig->cpr_feature_mask = 0;
497 cpr_mconfig->gtw_cfg.config_length = 0;
498
499 skl_copy_copier_caps(mconfig, cpr_mconfig);
500 }
501
502 #define DMA_CONTROL_ID 5
503
skl_dsp_set_dma_control(struct skl_sst * ctx,struct skl_module_cfg * mconfig)504 int skl_dsp_set_dma_control(struct skl_sst *ctx, struct skl_module_cfg *mconfig)
505 {
506 struct skl_dma_control *dma_ctrl;
507 struct skl_i2s_config_blob config_blob;
508 struct skl_ipc_large_config_msg msg = {0};
509 int err = 0;
510
511
512 /*
513 * if blob size is same as capablity size, then no dma control
514 * present so return
515 */
516 if (mconfig->formats_config.caps_size == sizeof(config_blob))
517 return 0;
518
519 msg.large_param_id = DMA_CONTROL_ID;
520 msg.param_data_size = sizeof(struct skl_dma_control) +
521 mconfig->formats_config.caps_size;
522
523 dma_ctrl = kzalloc(msg.param_data_size, GFP_KERNEL);
524 if (dma_ctrl == NULL)
525 return -ENOMEM;
526
527 dma_ctrl->node_id = skl_get_node_id(ctx, mconfig);
528
529 /* size in dwords */
530 dma_ctrl->config_length = sizeof(config_blob) / 4;
531
532 memcpy(dma_ctrl->config_data, mconfig->formats_config.caps,
533 mconfig->formats_config.caps_size);
534
535 err = skl_ipc_set_large_config(&ctx->ipc, &msg, (u32 *)dma_ctrl);
536
537 kfree(dma_ctrl);
538
539 return err;
540 }
541
skl_setup_out_format(struct skl_sst * ctx,struct skl_module_cfg * mconfig,struct skl_audio_data_format * out_fmt)542 static void skl_setup_out_format(struct skl_sst *ctx,
543 struct skl_module_cfg *mconfig,
544 struct skl_audio_data_format *out_fmt)
545 {
546 struct skl_module_fmt *format = &mconfig->out_fmt[0];
547
548 out_fmt->number_of_channels = (u8)format->channels;
549 out_fmt->s_freq = format->s_freq;
550 out_fmt->bit_depth = format->bit_depth;
551 out_fmt->valid_bit_depth = format->valid_bit_depth;
552 out_fmt->ch_cfg = format->ch_cfg;
553
554 out_fmt->channel_map = format->ch_map;
555 out_fmt->interleaving = format->interleaving_style;
556 out_fmt->sample_type = format->sample_type;
557
558 dev_dbg(ctx->dev, "copier out format chan=%d fre=%d bitdepth=%d\n",
559 out_fmt->number_of_channels, format->s_freq, format->bit_depth);
560 }
561
562 /*
563 * DSP needs SRC module for frequency conversion, SRC takes base module
564 * configuration and the target frequency as extra parameter passed as src
565 * config
566 */
skl_set_src_format(struct skl_sst * ctx,struct skl_module_cfg * mconfig,struct skl_src_module_cfg * src_mconfig)567 static void skl_set_src_format(struct skl_sst *ctx,
568 struct skl_module_cfg *mconfig,
569 struct skl_src_module_cfg *src_mconfig)
570 {
571 struct skl_module_fmt *fmt = &mconfig->out_fmt[0];
572
573 skl_set_base_module_format(ctx, mconfig,
574 (struct skl_base_cfg *)src_mconfig);
575
576 src_mconfig->src_cfg = fmt->s_freq;
577 }
578
579 /*
580 * DSP needs updown module to do channel conversion. updown module take base
581 * module configuration and channel configuration
582 * It also take coefficients and now we have defaults applied here
583 */
skl_set_updown_mixer_format(struct skl_sst * ctx,struct skl_module_cfg * mconfig,struct skl_up_down_mixer_cfg * mixer_mconfig)584 static void skl_set_updown_mixer_format(struct skl_sst *ctx,
585 struct skl_module_cfg *mconfig,
586 struct skl_up_down_mixer_cfg *mixer_mconfig)
587 {
588 struct skl_module_fmt *fmt = &mconfig->out_fmt[0];
589 int i = 0;
590
591 skl_set_base_module_format(ctx, mconfig,
592 (struct skl_base_cfg *)mixer_mconfig);
593 mixer_mconfig->out_ch_cfg = fmt->ch_cfg;
594
595 /* Select F/W default coefficient */
596 mixer_mconfig->coeff_sel = 0x0;
597
598 /* User coeff, don't care since we are selecting F/W defaults */
599 for (i = 0; i < UP_DOWN_MIXER_MAX_COEFF; i++)
600 mixer_mconfig->coeff[i] = 0xDEADBEEF;
601 }
602
603 /*
604 * 'copier' is DSP internal module which copies data from Host DMA (HDA host
605 * dma) or link (hda link, SSP, PDM)
606 * Here we calculate the copier module parameters, like PCM format, output
607 * format, gateway settings
608 * copier_module_config is sent as input buffer with INIT_INSTANCE IPC msg
609 */
skl_set_copier_format(struct skl_sst * ctx,struct skl_module_cfg * mconfig,struct skl_cpr_cfg * cpr_mconfig)610 static void skl_set_copier_format(struct skl_sst *ctx,
611 struct skl_module_cfg *mconfig,
612 struct skl_cpr_cfg *cpr_mconfig)
613 {
614 struct skl_audio_data_format *out_fmt = &cpr_mconfig->out_fmt;
615 struct skl_base_cfg *base_cfg = (struct skl_base_cfg *)cpr_mconfig;
616
617 skl_set_base_module_format(ctx, mconfig, base_cfg);
618
619 skl_setup_out_format(ctx, mconfig, out_fmt);
620 skl_setup_cpr_gateway_cfg(ctx, mconfig, cpr_mconfig);
621 }
622
623 /*
624 * Algo module are DSP pre processing modules. Algo module take base module
625 * configuration and params
626 */
627
skl_set_algo_format(struct skl_sst * ctx,struct skl_module_cfg * mconfig,struct skl_algo_cfg * algo_mcfg)628 static void skl_set_algo_format(struct skl_sst *ctx,
629 struct skl_module_cfg *mconfig,
630 struct skl_algo_cfg *algo_mcfg)
631 {
632 struct skl_base_cfg *base_cfg = (struct skl_base_cfg *)algo_mcfg;
633
634 skl_set_base_module_format(ctx, mconfig, base_cfg);
635
636 if (mconfig->formats_config.caps_size == 0)
637 return;
638
639 memcpy(algo_mcfg->params,
640 mconfig->formats_config.caps,
641 mconfig->formats_config.caps_size);
642
643 }
644
645 /*
646 * Mic select module allows selecting one or many input channels, thus
647 * acting as a demux.
648 *
649 * Mic select module take base module configuration and out-format
650 * configuration
651 */
skl_set_base_outfmt_format(struct skl_sst * ctx,struct skl_module_cfg * mconfig,struct skl_base_outfmt_cfg * base_outfmt_mcfg)652 static void skl_set_base_outfmt_format(struct skl_sst *ctx,
653 struct skl_module_cfg *mconfig,
654 struct skl_base_outfmt_cfg *base_outfmt_mcfg)
655 {
656 struct skl_audio_data_format *out_fmt = &base_outfmt_mcfg->out_fmt;
657 struct skl_base_cfg *base_cfg =
658 (struct skl_base_cfg *)base_outfmt_mcfg;
659
660 skl_set_base_module_format(ctx, mconfig, base_cfg);
661 skl_setup_out_format(ctx, mconfig, out_fmt);
662 }
663
skl_get_module_param_size(struct skl_sst * ctx,struct skl_module_cfg * mconfig)664 static u16 skl_get_module_param_size(struct skl_sst *ctx,
665 struct skl_module_cfg *mconfig)
666 {
667 u16 param_size;
668
669 switch (mconfig->m_type) {
670 case SKL_MODULE_TYPE_COPIER:
671 param_size = sizeof(struct skl_cpr_cfg);
672 param_size += mconfig->formats_config.caps_size;
673 return param_size;
674
675 case SKL_MODULE_TYPE_SRCINT:
676 return sizeof(struct skl_src_module_cfg);
677
678 case SKL_MODULE_TYPE_UPDWMIX:
679 return sizeof(struct skl_up_down_mixer_cfg);
680
681 case SKL_MODULE_TYPE_ALGO:
682 param_size = sizeof(struct skl_base_cfg);
683 param_size += mconfig->formats_config.caps_size;
684 return param_size;
685
686 case SKL_MODULE_TYPE_BASE_OUTFMT:
687 case SKL_MODULE_TYPE_KPB:
688 return sizeof(struct skl_base_outfmt_cfg);
689
690 default:
691 /*
692 * return only base cfg when no specific module type is
693 * specified
694 */
695 return sizeof(struct skl_base_cfg);
696 }
697
698 return 0;
699 }
700
701 /*
702 * DSP firmware supports various modules like copier, SRC, updown etc.
703 * These modules required various parameters to be calculated and sent for
704 * the module initialization to DSP. By default a generic module needs only
705 * base module format configuration
706 */
707
skl_set_module_format(struct skl_sst * ctx,struct skl_module_cfg * module_config,u16 * module_config_size,void ** param_data)708 static int skl_set_module_format(struct skl_sst *ctx,
709 struct skl_module_cfg *module_config,
710 u16 *module_config_size,
711 void **param_data)
712 {
713 u16 param_size;
714
715 param_size = skl_get_module_param_size(ctx, module_config);
716
717 *param_data = kzalloc(param_size, GFP_KERNEL);
718 if (NULL == *param_data)
719 return -ENOMEM;
720
721 *module_config_size = param_size;
722
723 switch (module_config->m_type) {
724 case SKL_MODULE_TYPE_COPIER:
725 skl_set_copier_format(ctx, module_config, *param_data);
726 break;
727
728 case SKL_MODULE_TYPE_SRCINT:
729 skl_set_src_format(ctx, module_config, *param_data);
730 break;
731
732 case SKL_MODULE_TYPE_UPDWMIX:
733 skl_set_updown_mixer_format(ctx, module_config, *param_data);
734 break;
735
736 case SKL_MODULE_TYPE_ALGO:
737 skl_set_algo_format(ctx, module_config, *param_data);
738 break;
739
740 case SKL_MODULE_TYPE_BASE_OUTFMT:
741 case SKL_MODULE_TYPE_KPB:
742 skl_set_base_outfmt_format(ctx, module_config, *param_data);
743 break;
744
745 default:
746 skl_set_base_module_format(ctx, module_config, *param_data);
747 break;
748
749 }
750
751 dev_dbg(ctx->dev, "Module type=%d config size: %d bytes\n",
752 module_config->id.module_id, param_size);
753 print_hex_dump_debug("Module params:", DUMP_PREFIX_OFFSET, 8, 4,
754 *param_data, param_size, false);
755 return 0;
756 }
757
skl_get_queue_index(struct skl_module_pin * mpin,struct skl_module_inst_id id,int max)758 static int skl_get_queue_index(struct skl_module_pin *mpin,
759 struct skl_module_inst_id id, int max)
760 {
761 int i;
762
763 for (i = 0; i < max; i++) {
764 if (mpin[i].id.module_id == id.module_id &&
765 mpin[i].id.instance_id == id.instance_id)
766 return i;
767 }
768
769 return -EINVAL;
770 }
771
772 /*
773 * Allocates queue for each module.
774 * if dynamic, the pin_index is allocated 0 to max_pin.
775 * In static, the pin_index is fixed based on module_id and instance id
776 */
skl_alloc_queue(struct skl_module_pin * mpin,struct skl_module_cfg * tgt_cfg,int max)777 static int skl_alloc_queue(struct skl_module_pin *mpin,
778 struct skl_module_cfg *tgt_cfg, int max)
779 {
780 int i;
781 struct skl_module_inst_id id = tgt_cfg->id;
782 /*
783 * if pin in dynamic, find first free pin
784 * otherwise find match module and instance id pin as topology will
785 * ensure a unique pin is assigned to this so no need to
786 * allocate/free
787 */
788 for (i = 0; i < max; i++) {
789 if (mpin[i].is_dynamic) {
790 if (!mpin[i].in_use &&
791 mpin[i].pin_state == SKL_PIN_UNBIND) {
792
793 mpin[i].in_use = true;
794 mpin[i].id.module_id = id.module_id;
795 mpin[i].id.instance_id = id.instance_id;
796 mpin[i].id.pvt_id = id.pvt_id;
797 mpin[i].tgt_mcfg = tgt_cfg;
798 return i;
799 }
800 } else {
801 if (mpin[i].id.module_id == id.module_id &&
802 mpin[i].id.instance_id == id.instance_id &&
803 mpin[i].pin_state == SKL_PIN_UNBIND) {
804
805 mpin[i].tgt_mcfg = tgt_cfg;
806 return i;
807 }
808 }
809 }
810
811 return -EINVAL;
812 }
813
skl_free_queue(struct skl_module_pin * mpin,int q_index)814 static void skl_free_queue(struct skl_module_pin *mpin, int q_index)
815 {
816 if (mpin[q_index].is_dynamic) {
817 mpin[q_index].in_use = false;
818 mpin[q_index].id.module_id = 0;
819 mpin[q_index].id.instance_id = 0;
820 mpin[q_index].id.pvt_id = 0;
821 }
822 mpin[q_index].pin_state = SKL_PIN_UNBIND;
823 mpin[q_index].tgt_mcfg = NULL;
824 }
825
826 /* Module state will be set to unint, if all the out pin state is UNBIND */
827
skl_clear_module_state(struct skl_module_pin * mpin,int max,struct skl_module_cfg * mcfg)828 static void skl_clear_module_state(struct skl_module_pin *mpin, int max,
829 struct skl_module_cfg *mcfg)
830 {
831 int i;
832 bool found = false;
833
834 for (i = 0; i < max; i++) {
835 if (mpin[i].pin_state == SKL_PIN_UNBIND)
836 continue;
837 found = true;
838 break;
839 }
840
841 if (!found)
842 mcfg->m_state = SKL_MODULE_UNINIT;
843 return;
844 }
845
846 /*
847 * A module needs to be instanataited in DSP. A mdoule is present in a
848 * collection of module referred as a PIPE.
849 * We first calculate the module format, based on module type and then
850 * invoke the DSP by sending IPC INIT_INSTANCE using ipc helper
851 */
skl_init_module(struct skl_sst * ctx,struct skl_module_cfg * mconfig)852 int skl_init_module(struct skl_sst *ctx,
853 struct skl_module_cfg *mconfig)
854 {
855 u16 module_config_size = 0;
856 void *param_data = NULL;
857 int ret;
858 struct skl_ipc_init_instance_msg msg;
859
860 dev_dbg(ctx->dev, "%s: module_id = %d instance=%d\n", __func__,
861 mconfig->id.module_id, mconfig->id.pvt_id);
862
863 if (mconfig->pipe->state != SKL_PIPE_CREATED) {
864 dev_err(ctx->dev, "Pipe not created state= %d pipe_id= %d\n",
865 mconfig->pipe->state, mconfig->pipe->ppl_id);
866 return -EIO;
867 }
868
869 ret = skl_set_module_format(ctx, mconfig,
870 &module_config_size, ¶m_data);
871 if (ret < 0) {
872 dev_err(ctx->dev, "Failed to set module format ret=%d\n", ret);
873 return ret;
874 }
875
876 msg.module_id = mconfig->id.module_id;
877 msg.instance_id = mconfig->id.pvt_id;
878 msg.ppl_instance_id = mconfig->pipe->ppl_id;
879 msg.param_data_size = module_config_size;
880 msg.core_id = mconfig->core_id;
881 msg.domain = mconfig->domain;
882
883 ret = skl_ipc_init_instance(&ctx->ipc, &msg, param_data);
884 if (ret < 0) {
885 dev_err(ctx->dev, "Failed to init instance ret=%d\n", ret);
886 kfree(param_data);
887 return ret;
888 }
889 mconfig->m_state = SKL_MODULE_INIT_DONE;
890 kfree(param_data);
891 return ret;
892 }
893
skl_dump_bind_info(struct skl_sst * ctx,struct skl_module_cfg * src_module,struct skl_module_cfg * dst_module)894 static void skl_dump_bind_info(struct skl_sst *ctx, struct skl_module_cfg
895 *src_module, struct skl_module_cfg *dst_module)
896 {
897 dev_dbg(ctx->dev, "%s: src module_id = %d src_instance=%d\n",
898 __func__, src_module->id.module_id, src_module->id.pvt_id);
899 dev_dbg(ctx->dev, "%s: dst_module=%d dst_instacne=%d\n", __func__,
900 dst_module->id.module_id, dst_module->id.pvt_id);
901
902 dev_dbg(ctx->dev, "src_module state = %d dst module state = %d\n",
903 src_module->m_state, dst_module->m_state);
904 }
905
906 /*
907 * On module freeup, we need to unbind the module with modules
908 * it is already bind.
909 * Find the pin allocated and unbind then using bind_unbind IPC
910 */
skl_unbind_modules(struct skl_sst * ctx,struct skl_module_cfg * src_mcfg,struct skl_module_cfg * dst_mcfg)911 int skl_unbind_modules(struct skl_sst *ctx,
912 struct skl_module_cfg *src_mcfg,
913 struct skl_module_cfg *dst_mcfg)
914 {
915 int ret;
916 struct skl_ipc_bind_unbind_msg msg;
917 struct skl_module_inst_id src_id = src_mcfg->id;
918 struct skl_module_inst_id dst_id = dst_mcfg->id;
919 int in_max = dst_mcfg->max_in_queue;
920 int out_max = src_mcfg->max_out_queue;
921 int src_index, dst_index, src_pin_state, dst_pin_state;
922
923 skl_dump_bind_info(ctx, src_mcfg, dst_mcfg);
924
925 /* get src queue index */
926 src_index = skl_get_queue_index(src_mcfg->m_out_pin, dst_id, out_max);
927 if (src_index < 0)
928 return 0;
929
930 msg.src_queue = src_index;
931
932 /* get dst queue index */
933 dst_index = skl_get_queue_index(dst_mcfg->m_in_pin, src_id, in_max);
934 if (dst_index < 0)
935 return 0;
936
937 msg.dst_queue = dst_index;
938
939 src_pin_state = src_mcfg->m_out_pin[src_index].pin_state;
940 dst_pin_state = dst_mcfg->m_in_pin[dst_index].pin_state;
941
942 if (src_pin_state != SKL_PIN_BIND_DONE ||
943 dst_pin_state != SKL_PIN_BIND_DONE)
944 return 0;
945
946 msg.module_id = src_mcfg->id.module_id;
947 msg.instance_id = src_mcfg->id.pvt_id;
948 msg.dst_module_id = dst_mcfg->id.module_id;
949 msg.dst_instance_id = dst_mcfg->id.pvt_id;
950 msg.bind = false;
951
952 ret = skl_ipc_bind_unbind(&ctx->ipc, &msg);
953 if (!ret) {
954 /* free queue only if unbind is success */
955 skl_free_queue(src_mcfg->m_out_pin, src_index);
956 skl_free_queue(dst_mcfg->m_in_pin, dst_index);
957
958 /*
959 * check only if src module bind state, bind is
960 * always from src -> sink
961 */
962 skl_clear_module_state(src_mcfg->m_out_pin, out_max, src_mcfg);
963 }
964
965 return ret;
966 }
967
968 /*
969 * Once a module is instantiated it need to be 'bind' with other modules in
970 * the pipeline. For binding we need to find the module pins which are bind
971 * together
972 * This function finds the pins and then sends bund_unbind IPC message to
973 * DSP using IPC helper
974 */
skl_bind_modules(struct skl_sst * ctx,struct skl_module_cfg * src_mcfg,struct skl_module_cfg * dst_mcfg)975 int skl_bind_modules(struct skl_sst *ctx,
976 struct skl_module_cfg *src_mcfg,
977 struct skl_module_cfg *dst_mcfg)
978 {
979 int ret;
980 struct skl_ipc_bind_unbind_msg msg;
981 int in_max = dst_mcfg->max_in_queue;
982 int out_max = src_mcfg->max_out_queue;
983 int src_index, dst_index;
984
985 skl_dump_bind_info(ctx, src_mcfg, dst_mcfg);
986
987 if (src_mcfg->m_state < SKL_MODULE_INIT_DONE ||
988 dst_mcfg->m_state < SKL_MODULE_INIT_DONE)
989 return 0;
990
991 src_index = skl_alloc_queue(src_mcfg->m_out_pin, dst_mcfg, out_max);
992 if (src_index < 0)
993 return -EINVAL;
994
995 msg.src_queue = src_index;
996 dst_index = skl_alloc_queue(dst_mcfg->m_in_pin, src_mcfg, in_max);
997 if (dst_index < 0) {
998 skl_free_queue(src_mcfg->m_out_pin, src_index);
999 return -EINVAL;
1000 }
1001
1002 msg.dst_queue = dst_index;
1003
1004 dev_dbg(ctx->dev, "src queue = %d dst queue =%d\n",
1005 msg.src_queue, msg.dst_queue);
1006
1007 msg.module_id = src_mcfg->id.module_id;
1008 msg.instance_id = src_mcfg->id.pvt_id;
1009 msg.dst_module_id = dst_mcfg->id.module_id;
1010 msg.dst_instance_id = dst_mcfg->id.pvt_id;
1011 msg.bind = true;
1012
1013 ret = skl_ipc_bind_unbind(&ctx->ipc, &msg);
1014
1015 if (!ret) {
1016 src_mcfg->m_state = SKL_MODULE_BIND_DONE;
1017 src_mcfg->m_out_pin[src_index].pin_state = SKL_PIN_BIND_DONE;
1018 dst_mcfg->m_in_pin[dst_index].pin_state = SKL_PIN_BIND_DONE;
1019 } else {
1020 /* error case , if IPC fails, clear the queue index */
1021 skl_free_queue(src_mcfg->m_out_pin, src_index);
1022 skl_free_queue(dst_mcfg->m_in_pin, dst_index);
1023 }
1024
1025 return ret;
1026 }
1027
skl_set_pipe_state(struct skl_sst * ctx,struct skl_pipe * pipe,enum skl_ipc_pipeline_state state)1028 static int skl_set_pipe_state(struct skl_sst *ctx, struct skl_pipe *pipe,
1029 enum skl_ipc_pipeline_state state)
1030 {
1031 dev_dbg(ctx->dev, "%s: pipe_satate = %d\n", __func__, state);
1032
1033 return skl_ipc_set_pipeline_state(&ctx->ipc, pipe->ppl_id, state);
1034 }
1035
1036 /*
1037 * A pipeline is a collection of modules. Before a module in instantiated a
1038 * pipeline needs to be created for it.
1039 * This function creates pipeline, by sending create pipeline IPC messages
1040 * to FW
1041 */
skl_create_pipeline(struct skl_sst * ctx,struct skl_pipe * pipe)1042 int skl_create_pipeline(struct skl_sst *ctx, struct skl_pipe *pipe)
1043 {
1044 int ret;
1045
1046 dev_dbg(ctx->dev, "%s: pipe_id = %d\n", __func__, pipe->ppl_id);
1047
1048 ret = skl_ipc_create_pipeline(&ctx->ipc, pipe->memory_pages,
1049 pipe->pipe_priority, pipe->ppl_id);
1050 if (ret < 0) {
1051 dev_err(ctx->dev, "Failed to create pipeline\n");
1052 return ret;
1053 }
1054
1055 pipe->state = SKL_PIPE_CREATED;
1056
1057 return 0;
1058 }
1059
1060 /*
1061 * A pipeline needs to be deleted on cleanup. If a pipeline is running, then
1062 * pause the pipeline first and then delete it
1063 * The pipe delete is done by sending delete pipeline IPC. DSP will stop the
1064 * DMA engines and releases resources
1065 */
skl_delete_pipe(struct skl_sst * ctx,struct skl_pipe * pipe)1066 int skl_delete_pipe(struct skl_sst *ctx, struct skl_pipe *pipe)
1067 {
1068 int ret;
1069
1070 dev_dbg(ctx->dev, "%s: pipe = %d\n", __func__, pipe->ppl_id);
1071
1072 /* If pipe is started, do stop the pipe in FW. */
1073 if (pipe->state > SKL_PIPE_STARTED) {
1074 ret = skl_set_pipe_state(ctx, pipe, PPL_PAUSED);
1075 if (ret < 0) {
1076 dev_err(ctx->dev, "Failed to stop pipeline\n");
1077 return ret;
1078 }
1079
1080 pipe->state = SKL_PIPE_PAUSED;
1081 }
1082
1083 /* If pipe was not created in FW, do not try to delete it */
1084 if (pipe->state < SKL_PIPE_CREATED)
1085 return 0;
1086
1087 ret = skl_ipc_delete_pipeline(&ctx->ipc, pipe->ppl_id);
1088 if (ret < 0) {
1089 dev_err(ctx->dev, "Failed to delete pipeline\n");
1090 return ret;
1091 }
1092
1093 pipe->state = SKL_PIPE_INVALID;
1094
1095 return ret;
1096 }
1097
1098 /*
1099 * A pipeline is also a scheduling entity in DSP which can be run, stopped
1100 * For processing data the pipe need to be run by sending IPC set pipe state
1101 * to DSP
1102 */
skl_run_pipe(struct skl_sst * ctx,struct skl_pipe * pipe)1103 int skl_run_pipe(struct skl_sst *ctx, struct skl_pipe *pipe)
1104 {
1105 int ret;
1106
1107 dev_dbg(ctx->dev, "%s: pipe = %d\n", __func__, pipe->ppl_id);
1108
1109 /* If pipe was not created in FW, do not try to pause or delete */
1110 if (pipe->state < SKL_PIPE_CREATED)
1111 return 0;
1112
1113 /* Pipe has to be paused before it is started */
1114 ret = skl_set_pipe_state(ctx, pipe, PPL_PAUSED);
1115 if (ret < 0) {
1116 dev_err(ctx->dev, "Failed to pause pipe\n");
1117 return ret;
1118 }
1119
1120 pipe->state = SKL_PIPE_PAUSED;
1121
1122 ret = skl_set_pipe_state(ctx, pipe, PPL_RUNNING);
1123 if (ret < 0) {
1124 dev_err(ctx->dev, "Failed to start pipe\n");
1125 return ret;
1126 }
1127
1128 pipe->state = SKL_PIPE_STARTED;
1129
1130 return 0;
1131 }
1132
1133 /*
1134 * Stop the pipeline by sending set pipe state IPC
1135 * DSP doesnt implement stop so we always send pause message
1136 */
skl_stop_pipe(struct skl_sst * ctx,struct skl_pipe * pipe)1137 int skl_stop_pipe(struct skl_sst *ctx, struct skl_pipe *pipe)
1138 {
1139 int ret;
1140
1141 dev_dbg(ctx->dev, "In %s pipe=%d\n", __func__, pipe->ppl_id);
1142
1143 /* If pipe was not created in FW, do not try to pause or delete */
1144 if (pipe->state < SKL_PIPE_PAUSED)
1145 return 0;
1146
1147 ret = skl_set_pipe_state(ctx, pipe, PPL_PAUSED);
1148 if (ret < 0) {
1149 dev_dbg(ctx->dev, "Failed to stop pipe\n");
1150 return ret;
1151 }
1152
1153 pipe->state = SKL_PIPE_PAUSED;
1154
1155 return 0;
1156 }
1157
1158 /*
1159 * Reset the pipeline by sending set pipe state IPC this will reset the DMA
1160 * from the DSP side
1161 */
skl_reset_pipe(struct skl_sst * ctx,struct skl_pipe * pipe)1162 int skl_reset_pipe(struct skl_sst *ctx, struct skl_pipe *pipe)
1163 {
1164 int ret;
1165
1166 /* If pipe was not created in FW, do not try to pause or delete */
1167 if (pipe->state < SKL_PIPE_PAUSED)
1168 return 0;
1169
1170 ret = skl_set_pipe_state(ctx, pipe, PPL_RESET);
1171 if (ret < 0) {
1172 dev_dbg(ctx->dev, "Failed to reset pipe ret=%d\n", ret);
1173 return ret;
1174 }
1175
1176 pipe->state = SKL_PIPE_RESET;
1177
1178 return 0;
1179 }
1180
1181 /* Algo parameter set helper function */
skl_set_module_params(struct skl_sst * ctx,u32 * params,int size,u32 param_id,struct skl_module_cfg * mcfg)1182 int skl_set_module_params(struct skl_sst *ctx, u32 *params, int size,
1183 u32 param_id, struct skl_module_cfg *mcfg)
1184 {
1185 struct skl_ipc_large_config_msg msg;
1186
1187 msg.module_id = mcfg->id.module_id;
1188 msg.instance_id = mcfg->id.pvt_id;
1189 msg.param_data_size = size;
1190 msg.large_param_id = param_id;
1191
1192 return skl_ipc_set_large_config(&ctx->ipc, &msg, params);
1193 }
1194
skl_get_module_params(struct skl_sst * ctx,u32 * params,int size,u32 param_id,struct skl_module_cfg * mcfg)1195 int skl_get_module_params(struct skl_sst *ctx, u32 *params, int size,
1196 u32 param_id, struct skl_module_cfg *mcfg)
1197 {
1198 struct skl_ipc_large_config_msg msg;
1199
1200 msg.module_id = mcfg->id.module_id;
1201 msg.instance_id = mcfg->id.pvt_id;
1202 msg.param_data_size = size;
1203 msg.large_param_id = param_id;
1204
1205 return skl_ipc_get_large_config(&ctx->ipc, &msg, params);
1206 }
1207