1 // SPDX-License-Identifier: GPL-2.0-only
2 /*
3  *  skl-message.c - HDA DSP interface for FW registration, Pipe and Module
4  *  configurations
5  *
6  *  Copyright (C) 2015 Intel Corp
7  *  Author:Rafal Redzimski <rafal.f.redzimski@intel.com>
8  *	   Jeeja KP <jeeja.kp@intel.com>
9  *  ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
10  */
11 
12 #include <linux/slab.h>
13 #include <linux/pci.h>
14 #include <sound/core.h>
15 #include <sound/pcm.h>
16 #include <uapi/sound/skl-tplg-interface.h>
17 #include "skl-sst-dsp.h"
18 #include "cnl-sst-dsp.h"
19 #include "skl-sst-ipc.h"
20 #include "skl.h"
21 #include "../common/sst-dsp.h"
22 #include "../common/sst-dsp-priv.h"
23 #include "skl-topology.h"
24 
skl_alloc_dma_buf(struct device * dev,struct snd_dma_buffer * dmab,size_t size)25 static int skl_alloc_dma_buf(struct device *dev,
26 		struct snd_dma_buffer *dmab, size_t size)
27 {
28 	return snd_dma_alloc_pages(SNDRV_DMA_TYPE_DEV, dev, size, dmab);
29 }
30 
skl_free_dma_buf(struct device * dev,struct snd_dma_buffer * dmab)31 static int skl_free_dma_buf(struct device *dev, struct snd_dma_buffer *dmab)
32 {
33 	snd_dma_free_pages(dmab);
34 	return 0;
35 }
36 
37 #define SKL_ASTATE_PARAM_ID	4
38 
skl_dsp_set_astate_cfg(struct skl_dev * skl,u32 cnt,void * data)39 void skl_dsp_set_astate_cfg(struct skl_dev *skl, u32 cnt, void *data)
40 {
41 	struct skl_ipc_large_config_msg	msg = {0};
42 
43 	msg.large_param_id = SKL_ASTATE_PARAM_ID;
44 	msg.param_data_size = (cnt * sizeof(struct skl_astate_param) +
45 				sizeof(cnt));
46 
47 	skl_ipc_set_large_config(&skl->ipc, &msg, data);
48 }
49 
skl_dsp_setup_spib(struct device * dev,unsigned int size,int stream_tag,int enable)50 static int skl_dsp_setup_spib(struct device *dev, unsigned int size,
51 				int stream_tag, int enable)
52 {
53 	struct hdac_bus *bus = dev_get_drvdata(dev);
54 	struct hdac_stream *stream = snd_hdac_get_stream(bus,
55 			SNDRV_PCM_STREAM_PLAYBACK, stream_tag);
56 	struct hdac_ext_stream *estream;
57 
58 	if (!stream)
59 		return -EINVAL;
60 
61 	estream = stream_to_hdac_ext_stream(stream);
62 	/* enable/disable SPIB for this hdac stream */
63 	snd_hdac_ext_stream_spbcap_enable(bus, enable, stream->index);
64 
65 	/* set the spib value */
66 	snd_hdac_ext_stream_set_spib(bus, estream, size);
67 
68 	return 0;
69 }
70 
skl_dsp_prepare(struct device * dev,unsigned int format,unsigned int size,struct snd_dma_buffer * dmab)71 static int skl_dsp_prepare(struct device *dev, unsigned int format,
72 			unsigned int size, struct snd_dma_buffer *dmab)
73 {
74 	struct hdac_bus *bus = dev_get_drvdata(dev);
75 	struct hdac_ext_stream *estream;
76 	struct hdac_stream *stream;
77 	struct snd_pcm_substream substream;
78 	int ret;
79 
80 	if (!bus)
81 		return -ENODEV;
82 
83 	memset(&substream, 0, sizeof(substream));
84 	substream.stream = SNDRV_PCM_STREAM_PLAYBACK;
85 
86 	estream = snd_hdac_ext_stream_assign(bus, &substream,
87 					HDAC_EXT_STREAM_TYPE_HOST);
88 	if (!estream)
89 		return -ENODEV;
90 
91 	stream = hdac_stream(estream);
92 
93 	/* assign decouple host dma channel */
94 	ret = snd_hdac_dsp_prepare(stream, format, size, dmab);
95 	if (ret < 0)
96 		return ret;
97 
98 	skl_dsp_setup_spib(dev, size, stream->stream_tag, true);
99 
100 	return stream->stream_tag;
101 }
102 
skl_dsp_trigger(struct device * dev,bool start,int stream_tag)103 static int skl_dsp_trigger(struct device *dev, bool start, int stream_tag)
104 {
105 	struct hdac_bus *bus = dev_get_drvdata(dev);
106 	struct hdac_stream *stream;
107 
108 	if (!bus)
109 		return -ENODEV;
110 
111 	stream = snd_hdac_get_stream(bus,
112 		SNDRV_PCM_STREAM_PLAYBACK, stream_tag);
113 	if (!stream)
114 		return -EINVAL;
115 
116 	snd_hdac_dsp_trigger(stream, start);
117 
118 	return 0;
119 }
120 
skl_dsp_cleanup(struct device * dev,struct snd_dma_buffer * dmab,int stream_tag)121 static int skl_dsp_cleanup(struct device *dev,
122 		struct snd_dma_buffer *dmab, int stream_tag)
123 {
124 	struct hdac_bus *bus = dev_get_drvdata(dev);
125 	struct hdac_stream *stream;
126 	struct hdac_ext_stream *estream;
127 
128 	if (!bus)
129 		return -ENODEV;
130 
131 	stream = snd_hdac_get_stream(bus,
132 		SNDRV_PCM_STREAM_PLAYBACK, stream_tag);
133 	if (!stream)
134 		return -EINVAL;
135 
136 	estream = stream_to_hdac_ext_stream(stream);
137 	skl_dsp_setup_spib(dev, 0, stream_tag, false);
138 	snd_hdac_ext_stream_release(estream, HDAC_EXT_STREAM_TYPE_HOST);
139 
140 	snd_hdac_dsp_cleanup(stream, dmab);
141 
142 	return 0;
143 }
144 
skl_get_loader_ops(void)145 static struct skl_dsp_loader_ops skl_get_loader_ops(void)
146 {
147 	struct skl_dsp_loader_ops loader_ops;
148 
149 	memset(&loader_ops, 0, sizeof(struct skl_dsp_loader_ops));
150 
151 	loader_ops.alloc_dma_buf = skl_alloc_dma_buf;
152 	loader_ops.free_dma_buf = skl_free_dma_buf;
153 
154 	return loader_ops;
155 };
156 
bxt_get_loader_ops(void)157 static struct skl_dsp_loader_ops bxt_get_loader_ops(void)
158 {
159 	struct skl_dsp_loader_ops loader_ops;
160 
161 	memset(&loader_ops, 0, sizeof(loader_ops));
162 
163 	loader_ops.alloc_dma_buf = skl_alloc_dma_buf;
164 	loader_ops.free_dma_buf = skl_free_dma_buf;
165 	loader_ops.prepare = skl_dsp_prepare;
166 	loader_ops.trigger = skl_dsp_trigger;
167 	loader_ops.cleanup = skl_dsp_cleanup;
168 
169 	return loader_ops;
170 };
171 
172 static const struct skl_dsp_ops dsp_ops[] = {
173 	{
174 		.id = 0x9d70,
175 		.num_cores = 2,
176 		.loader_ops = skl_get_loader_ops,
177 		.init = skl_sst_dsp_init,
178 		.init_fw = skl_sst_init_fw,
179 		.cleanup = skl_sst_dsp_cleanup
180 	},
181 	{
182 		.id = 0x9d71,
183 		.num_cores = 2,
184 		.loader_ops = skl_get_loader_ops,
185 		.init = skl_sst_dsp_init,
186 		.init_fw = skl_sst_init_fw,
187 		.cleanup = skl_sst_dsp_cleanup
188 	},
189 	{
190 		.id = 0x5a98,
191 		.num_cores = 2,
192 		.loader_ops = bxt_get_loader_ops,
193 		.init = bxt_sst_dsp_init,
194 		.init_fw = bxt_sst_init_fw,
195 		.cleanup = bxt_sst_dsp_cleanup
196 	},
197 	{
198 		.id = 0x3198,
199 		.num_cores = 2,
200 		.loader_ops = bxt_get_loader_ops,
201 		.init = bxt_sst_dsp_init,
202 		.init_fw = bxt_sst_init_fw,
203 		.cleanup = bxt_sst_dsp_cleanup
204 	},
205 	{
206 		.id = 0x9dc8,
207 		.num_cores = 4,
208 		.loader_ops = bxt_get_loader_ops,
209 		.init = cnl_sst_dsp_init,
210 		.init_fw = cnl_sst_init_fw,
211 		.cleanup = cnl_sst_dsp_cleanup
212 	},
213 	{
214 		.id = 0xa348,
215 		.num_cores = 4,
216 		.loader_ops = bxt_get_loader_ops,
217 		.init = cnl_sst_dsp_init,
218 		.init_fw = cnl_sst_init_fw,
219 		.cleanup = cnl_sst_dsp_cleanup
220 	},
221 	{
222 		.id = 0x02c8,
223 		.num_cores = 4,
224 		.loader_ops = bxt_get_loader_ops,
225 		.init = cnl_sst_dsp_init,
226 		.init_fw = cnl_sst_init_fw,
227 		.cleanup = cnl_sst_dsp_cleanup
228 	},
229 	{
230 		.id = 0x06c8,
231 		.num_cores = 4,
232 		.loader_ops = bxt_get_loader_ops,
233 		.init = cnl_sst_dsp_init,
234 		.init_fw = cnl_sst_init_fw,
235 		.cleanup = cnl_sst_dsp_cleanup
236 	},
237 };
238 
skl_get_dsp_ops(int pci_id)239 const struct skl_dsp_ops *skl_get_dsp_ops(int pci_id)
240 {
241 	int i;
242 
243 	for (i = 0; i < ARRAY_SIZE(dsp_ops); i++) {
244 		if (dsp_ops[i].id == pci_id)
245 			return &dsp_ops[i];
246 	}
247 
248 	return NULL;
249 }
250 
skl_init_dsp(struct skl_dev * skl)251 int skl_init_dsp(struct skl_dev *skl)
252 {
253 	void __iomem *mmio_base;
254 	struct hdac_bus *bus = skl_to_bus(skl);
255 	struct skl_dsp_loader_ops loader_ops;
256 	int irq = bus->irq;
257 	const struct skl_dsp_ops *ops;
258 	struct skl_dsp_cores *cores;
259 	int ret;
260 
261 	/* enable ppcap interrupt */
262 	snd_hdac_ext_bus_ppcap_enable(bus, true);
263 	snd_hdac_ext_bus_ppcap_int_enable(bus, true);
264 
265 	/* read the BAR of the ADSP MMIO */
266 	mmio_base = pci_ioremap_bar(skl->pci, 4);
267 	if (mmio_base == NULL) {
268 		dev_err(bus->dev, "ioremap error\n");
269 		return -ENXIO;
270 	}
271 
272 	ops = skl_get_dsp_ops(skl->pci->device);
273 	if (!ops) {
274 		ret = -EIO;
275 		goto unmap_mmio;
276 	}
277 
278 	loader_ops = ops->loader_ops();
279 	ret = ops->init(bus->dev, mmio_base, irq,
280 				skl->fw_name, loader_ops,
281 				&skl);
282 
283 	if (ret < 0)
284 		goto unmap_mmio;
285 
286 	skl->dsp_ops = ops;
287 	cores = &skl->cores;
288 	cores->count = ops->num_cores;
289 
290 	cores->state = kcalloc(cores->count, sizeof(*cores->state), GFP_KERNEL);
291 	if (!cores->state) {
292 		ret = -ENOMEM;
293 		goto unmap_mmio;
294 	}
295 
296 	cores->usage_count = kcalloc(cores->count, sizeof(*cores->usage_count),
297 				     GFP_KERNEL);
298 	if (!cores->usage_count) {
299 		ret = -ENOMEM;
300 		goto free_core_state;
301 	}
302 
303 	dev_dbg(bus->dev, "dsp registration status=%d\n", ret);
304 
305 	return 0;
306 
307 free_core_state:
308 	kfree(cores->state);
309 
310 unmap_mmio:
311 	iounmap(mmio_base);
312 
313 	return ret;
314 }
315 
skl_free_dsp(struct skl_dev * skl)316 int skl_free_dsp(struct skl_dev *skl)
317 {
318 	struct hdac_bus *bus = skl_to_bus(skl);
319 
320 	/* disable  ppcap interrupt */
321 	snd_hdac_ext_bus_ppcap_int_enable(bus, false);
322 
323 	skl->dsp_ops->cleanup(bus->dev, skl);
324 
325 	kfree(skl->cores.state);
326 	kfree(skl->cores.usage_count);
327 
328 	if (skl->dsp->addr.lpe)
329 		iounmap(skl->dsp->addr.lpe);
330 
331 	return 0;
332 }
333 
334 /*
335  * In the case of "suspend_active" i.e, the Audio IP being active
336  * during system suspend, immediately excecute any pending D0i3 work
337  * before suspending. This is needed for the IP to work in low power
338  * mode during system suspend. In the case of normal suspend, cancel
339  * any pending D0i3 work.
340  */
skl_suspend_late_dsp(struct skl_dev * skl)341 int skl_suspend_late_dsp(struct skl_dev *skl)
342 {
343 	struct delayed_work *dwork;
344 
345 	if (!skl)
346 		return 0;
347 
348 	dwork = &skl->d0i3.work;
349 
350 	if (dwork->work.func) {
351 		if (skl->supend_active)
352 			flush_delayed_work(dwork);
353 		else
354 			cancel_delayed_work_sync(dwork);
355 	}
356 
357 	return 0;
358 }
359 
skl_suspend_dsp(struct skl_dev * skl)360 int skl_suspend_dsp(struct skl_dev *skl)
361 {
362 	struct hdac_bus *bus = skl_to_bus(skl);
363 	int ret;
364 
365 	/* if ppcap is not supported return 0 */
366 	if (!bus->ppcap)
367 		return 0;
368 
369 	ret = skl_dsp_sleep(skl->dsp);
370 	if (ret < 0)
371 		return ret;
372 
373 	/* disable ppcap interrupt */
374 	snd_hdac_ext_bus_ppcap_int_enable(bus, false);
375 	snd_hdac_ext_bus_ppcap_enable(bus, false);
376 
377 	return 0;
378 }
379 
skl_resume_dsp(struct skl_dev * skl)380 int skl_resume_dsp(struct skl_dev *skl)
381 {
382 	struct hdac_bus *bus = skl_to_bus(skl);
383 	int ret;
384 
385 	/* if ppcap is not supported return 0 */
386 	if (!bus->ppcap)
387 		return 0;
388 
389 	/* enable ppcap interrupt */
390 	snd_hdac_ext_bus_ppcap_enable(bus, true);
391 	snd_hdac_ext_bus_ppcap_int_enable(bus, true);
392 
393 	/* check if DSP 1st boot is done */
394 	if (skl->is_first_boot)
395 		return 0;
396 
397 	/*
398 	 * Disable dynamic clock and power gating during firmware
399 	 * and library download
400 	 */
401 	skl->enable_miscbdcge(skl->dev, false);
402 	skl->clock_power_gating(skl->dev, false);
403 
404 	ret = skl_dsp_wake(skl->dsp);
405 	skl->enable_miscbdcge(skl->dev, true);
406 	skl->clock_power_gating(skl->dev, true);
407 	if (ret < 0)
408 		return ret;
409 
410 	if (skl->cfg.astate_cfg != NULL) {
411 		skl_dsp_set_astate_cfg(skl, skl->cfg.astate_cfg->count,
412 					skl->cfg.astate_cfg);
413 	}
414 	return ret;
415 }
416 
skl_get_bit_depth(int params)417 enum skl_bitdepth skl_get_bit_depth(int params)
418 {
419 	switch (params) {
420 	case 8:
421 		return SKL_DEPTH_8BIT;
422 
423 	case 16:
424 		return SKL_DEPTH_16BIT;
425 
426 	case 24:
427 		return SKL_DEPTH_24BIT;
428 
429 	case 32:
430 		return SKL_DEPTH_32BIT;
431 
432 	default:
433 		return SKL_DEPTH_INVALID;
434 
435 	}
436 }
437 
438 /*
439  * Each module in DSP expects a base module configuration, which consists of
440  * PCM format information, which we calculate in driver and resource values
441  * which are read from widget information passed through topology binary
442  * This is send when we create a module with INIT_INSTANCE IPC msg
443  */
skl_set_base_module_format(struct skl_dev * skl,struct skl_module_cfg * mconfig,struct skl_base_cfg * base_cfg)444 static void skl_set_base_module_format(struct skl_dev *skl,
445 			struct skl_module_cfg *mconfig,
446 			struct skl_base_cfg *base_cfg)
447 {
448 	struct skl_module *module = mconfig->module;
449 	struct skl_module_res *res = &module->resources[mconfig->res_idx];
450 	struct skl_module_iface *fmt = &module->formats[mconfig->fmt_idx];
451 	struct skl_module_fmt *format = &fmt->inputs[0].fmt;
452 
453 	base_cfg->audio_fmt.number_of_channels = format->channels;
454 
455 	base_cfg->audio_fmt.s_freq = format->s_freq;
456 	base_cfg->audio_fmt.bit_depth = format->bit_depth;
457 	base_cfg->audio_fmt.valid_bit_depth = format->valid_bit_depth;
458 	base_cfg->audio_fmt.ch_cfg = format->ch_cfg;
459 	base_cfg->audio_fmt.sample_type = format->sample_type;
460 
461 	dev_dbg(skl->dev, "bit_depth=%x valid_bd=%x ch_config=%x\n",
462 			format->bit_depth, format->valid_bit_depth,
463 			format->ch_cfg);
464 
465 	base_cfg->audio_fmt.channel_map = format->ch_map;
466 
467 	base_cfg->audio_fmt.interleaving = format->interleaving_style;
468 
469 	base_cfg->cpc = res->cpc;
470 	base_cfg->ibs = res->ibs;
471 	base_cfg->obs = res->obs;
472 	base_cfg->is_pages = res->is_pages;
473 }
474 
fill_pin_params(struct skl_audio_data_format * pin_fmt,struct skl_module_fmt * format)475 static void fill_pin_params(struct skl_audio_data_format *pin_fmt,
476 			    struct skl_module_fmt *format)
477 {
478 	pin_fmt->number_of_channels = format->channels;
479 	pin_fmt->s_freq = format->s_freq;
480 	pin_fmt->bit_depth = format->bit_depth;
481 	pin_fmt->valid_bit_depth = format->valid_bit_depth;
482 	pin_fmt->ch_cfg = format->ch_cfg;
483 	pin_fmt->sample_type = format->sample_type;
484 	pin_fmt->channel_map = format->ch_map;
485 	pin_fmt->interleaving = format->interleaving_style;
486 }
487 
488 /*
489  * Any module configuration begins with a base module configuration but
490  * can be followed by a generic extension containing audio format for all
491  * module's pins that are in use.
492  */
skl_set_base_ext_module_format(struct skl_dev * skl,struct skl_module_cfg * mconfig,struct skl_base_cfg_ext * base_cfg_ext)493 static void skl_set_base_ext_module_format(struct skl_dev *skl,
494 					   struct skl_module_cfg *mconfig,
495 					   struct skl_base_cfg_ext *base_cfg_ext)
496 {
497 	struct skl_module *module = mconfig->module;
498 	struct skl_module_pin_resources *pin_res;
499 	struct skl_module_iface *fmt = &module->formats[mconfig->fmt_idx];
500 	struct skl_module_res *res = &module->resources[mconfig->res_idx];
501 	struct skl_module_fmt *format;
502 	struct skl_pin_format *pin_fmt;
503 	char *params;
504 	int i;
505 
506 	base_cfg_ext->nr_input_pins = res->nr_input_pins;
507 	base_cfg_ext->nr_output_pins = res->nr_output_pins;
508 	base_cfg_ext->priv_param_length =
509 		mconfig->formats_config[SKL_PARAM_INIT].caps_size;
510 
511 	for (i = 0; i < res->nr_input_pins; i++) {
512 		pin_res = &res->input[i];
513 		pin_fmt = &base_cfg_ext->pins_fmt[i];
514 
515 		pin_fmt->pin_idx = pin_res->pin_index;
516 		pin_fmt->buf_size = pin_res->buf_size;
517 
518 		format = &fmt->inputs[pin_res->pin_index].fmt;
519 		fill_pin_params(&pin_fmt->audio_fmt, format);
520 	}
521 
522 	for (i = 0; i < res->nr_output_pins; i++) {
523 		pin_res = &res->output[i];
524 		pin_fmt = &base_cfg_ext->pins_fmt[res->nr_input_pins + i];
525 
526 		pin_fmt->pin_idx = pin_res->pin_index;
527 		pin_fmt->buf_size = pin_res->buf_size;
528 
529 		format = &fmt->outputs[pin_res->pin_index].fmt;
530 		fill_pin_params(&pin_fmt->audio_fmt, format);
531 	}
532 
533 	if (!base_cfg_ext->priv_param_length)
534 		return;
535 
536 	params = (char *)base_cfg_ext + sizeof(struct skl_base_cfg_ext);
537 	params += (base_cfg_ext->nr_input_pins + base_cfg_ext->nr_output_pins) *
538 		  sizeof(struct skl_pin_format);
539 
540 	memcpy(params, mconfig->formats_config[SKL_PARAM_INIT].caps,
541 	       mconfig->formats_config[SKL_PARAM_INIT].caps_size);
542 }
543 
544 /*
545  * Copies copier capabilities into copier module and updates copier module
546  * config size.
547  */
skl_copy_copier_caps(struct skl_module_cfg * mconfig,struct skl_cpr_cfg * cpr_mconfig)548 static void skl_copy_copier_caps(struct skl_module_cfg *mconfig,
549 				struct skl_cpr_cfg *cpr_mconfig)
550 {
551 	if (mconfig->formats_config[SKL_PARAM_INIT].caps_size == 0)
552 		return;
553 
554 	memcpy(cpr_mconfig->gtw_cfg.config_data,
555 			mconfig->formats_config[SKL_PARAM_INIT].caps,
556 			mconfig->formats_config[SKL_PARAM_INIT].caps_size);
557 
558 	cpr_mconfig->gtw_cfg.config_length =
559 			(mconfig->formats_config[SKL_PARAM_INIT].caps_size) / 4;
560 }
561 
562 #define SKL_NON_GATEWAY_CPR_NODE_ID 0xFFFFFFFF
563 /*
564  * Calculate the gatewat settings required for copier module, type of
565  * gateway and index of gateway to use
566  */
skl_get_node_id(struct skl_dev * skl,struct skl_module_cfg * mconfig)567 static u32 skl_get_node_id(struct skl_dev *skl,
568 			struct skl_module_cfg *mconfig)
569 {
570 	union skl_connector_node_id node_id = {0};
571 	union skl_ssp_dma_node ssp_node  = {0};
572 	struct skl_pipe_params *params = mconfig->pipe->p_params;
573 
574 	switch (mconfig->dev_type) {
575 	case SKL_DEVICE_BT:
576 		node_id.node.dma_type =
577 			(SKL_CONN_SOURCE == mconfig->hw_conn_type) ?
578 			SKL_DMA_I2S_LINK_OUTPUT_CLASS :
579 			SKL_DMA_I2S_LINK_INPUT_CLASS;
580 		node_id.node.vindex = params->host_dma_id +
581 					(mconfig->vbus_id << 3);
582 		break;
583 
584 	case SKL_DEVICE_I2S:
585 		node_id.node.dma_type =
586 			(SKL_CONN_SOURCE == mconfig->hw_conn_type) ?
587 			SKL_DMA_I2S_LINK_OUTPUT_CLASS :
588 			SKL_DMA_I2S_LINK_INPUT_CLASS;
589 		ssp_node.dma_node.time_slot_index = mconfig->time_slot;
590 		ssp_node.dma_node.i2s_instance = mconfig->vbus_id;
591 		node_id.node.vindex = ssp_node.val;
592 		break;
593 
594 	case SKL_DEVICE_DMIC:
595 		node_id.node.dma_type = SKL_DMA_DMIC_LINK_INPUT_CLASS;
596 		node_id.node.vindex = mconfig->vbus_id +
597 					 (mconfig->time_slot);
598 		break;
599 
600 	case SKL_DEVICE_HDALINK:
601 		node_id.node.dma_type =
602 			(SKL_CONN_SOURCE == mconfig->hw_conn_type) ?
603 			SKL_DMA_HDA_LINK_OUTPUT_CLASS :
604 			SKL_DMA_HDA_LINK_INPUT_CLASS;
605 		node_id.node.vindex = params->link_dma_id;
606 		break;
607 
608 	case SKL_DEVICE_HDAHOST:
609 		node_id.node.dma_type =
610 			(SKL_CONN_SOURCE == mconfig->hw_conn_type) ?
611 			SKL_DMA_HDA_HOST_OUTPUT_CLASS :
612 			SKL_DMA_HDA_HOST_INPUT_CLASS;
613 		node_id.node.vindex = params->host_dma_id;
614 		break;
615 
616 	default:
617 		node_id.val = 0xFFFFFFFF;
618 		break;
619 	}
620 
621 	return node_id.val;
622 }
623 
skl_setup_cpr_gateway_cfg(struct skl_dev * skl,struct skl_module_cfg * mconfig,struct skl_cpr_cfg * cpr_mconfig)624 static void skl_setup_cpr_gateway_cfg(struct skl_dev *skl,
625 			struct skl_module_cfg *mconfig,
626 			struct skl_cpr_cfg *cpr_mconfig)
627 {
628 	u32 dma_io_buf;
629 	struct skl_module_res *res;
630 	int res_idx = mconfig->res_idx;
631 
632 	cpr_mconfig->gtw_cfg.node_id = skl_get_node_id(skl, mconfig);
633 
634 	if (cpr_mconfig->gtw_cfg.node_id == SKL_NON_GATEWAY_CPR_NODE_ID) {
635 		cpr_mconfig->cpr_feature_mask = 0;
636 		return;
637 	}
638 
639 	if (skl->nr_modules) {
640 		res = &mconfig->module->resources[mconfig->res_idx];
641 		cpr_mconfig->gtw_cfg.dma_buffer_size = res->dma_buffer_size;
642 		goto skip_buf_size_calc;
643 	} else {
644 		res = &mconfig->module->resources[res_idx];
645 	}
646 
647 	switch (mconfig->hw_conn_type) {
648 	case SKL_CONN_SOURCE:
649 		if (mconfig->dev_type == SKL_DEVICE_HDAHOST)
650 			dma_io_buf =  res->ibs;
651 		else
652 			dma_io_buf =  res->obs;
653 		break;
654 
655 	case SKL_CONN_SINK:
656 		if (mconfig->dev_type == SKL_DEVICE_HDAHOST)
657 			dma_io_buf =  res->obs;
658 		else
659 			dma_io_buf =  res->ibs;
660 		break;
661 
662 	default:
663 		dev_warn(skl->dev, "wrong connection type: %d\n",
664 				mconfig->hw_conn_type);
665 		return;
666 	}
667 
668 	cpr_mconfig->gtw_cfg.dma_buffer_size =
669 				mconfig->dma_buffer_size * dma_io_buf;
670 
671 	/* fallback to 2ms default value */
672 	if (!cpr_mconfig->gtw_cfg.dma_buffer_size) {
673 		if (mconfig->hw_conn_type == SKL_CONN_SOURCE)
674 			cpr_mconfig->gtw_cfg.dma_buffer_size = 2 * res->obs;
675 		else
676 			cpr_mconfig->gtw_cfg.dma_buffer_size = 2 * res->ibs;
677 	}
678 
679 skip_buf_size_calc:
680 	cpr_mconfig->cpr_feature_mask = 0;
681 	cpr_mconfig->gtw_cfg.config_length  = 0;
682 
683 	skl_copy_copier_caps(mconfig, cpr_mconfig);
684 }
685 
686 #define DMA_CONTROL_ID 5
687 #define DMA_I2S_BLOB_SIZE 21
688 
skl_dsp_set_dma_control(struct skl_dev * skl,u32 * caps,u32 caps_size,u32 node_id)689 int skl_dsp_set_dma_control(struct skl_dev *skl, u32 *caps,
690 				u32 caps_size, u32 node_id)
691 {
692 	struct skl_dma_control *dma_ctrl;
693 	struct skl_ipc_large_config_msg msg = {0};
694 	int err = 0;
695 
696 
697 	/*
698 	 * if blob size zero, then return
699 	 */
700 	if (caps_size == 0)
701 		return 0;
702 
703 	msg.large_param_id = DMA_CONTROL_ID;
704 	msg.param_data_size = sizeof(struct skl_dma_control) + caps_size;
705 
706 	dma_ctrl = kzalloc(msg.param_data_size, GFP_KERNEL);
707 	if (dma_ctrl == NULL)
708 		return -ENOMEM;
709 
710 	dma_ctrl->node_id = node_id;
711 
712 	/*
713 	 * NHLT blob may contain additional configs along with i2s blob.
714 	 * firmware expects only the i2s blob size as the config_length.
715 	 * So fix to i2s blob size.
716 	 * size in dwords.
717 	 */
718 	dma_ctrl->config_length = DMA_I2S_BLOB_SIZE;
719 
720 	memcpy(dma_ctrl->config_data, caps, caps_size);
721 
722 	err = skl_ipc_set_large_config(&skl->ipc, &msg, (u32 *)dma_ctrl);
723 
724 	kfree(dma_ctrl);
725 	return err;
726 }
727 EXPORT_SYMBOL_GPL(skl_dsp_set_dma_control);
728 
skl_setup_out_format(struct skl_dev * skl,struct skl_module_cfg * mconfig,struct skl_audio_data_format * out_fmt)729 static void skl_setup_out_format(struct skl_dev *skl,
730 			struct skl_module_cfg *mconfig,
731 			struct skl_audio_data_format *out_fmt)
732 {
733 	struct skl_module *module = mconfig->module;
734 	struct skl_module_iface *fmt = &module->formats[mconfig->fmt_idx];
735 	struct skl_module_fmt *format = &fmt->outputs[0].fmt;
736 
737 	out_fmt->number_of_channels = (u8)format->channels;
738 	out_fmt->s_freq = format->s_freq;
739 	out_fmt->bit_depth = format->bit_depth;
740 	out_fmt->valid_bit_depth = format->valid_bit_depth;
741 	out_fmt->ch_cfg = format->ch_cfg;
742 
743 	out_fmt->channel_map = format->ch_map;
744 	out_fmt->interleaving = format->interleaving_style;
745 	out_fmt->sample_type = format->sample_type;
746 
747 	dev_dbg(skl->dev, "copier out format chan=%d fre=%d bitdepth=%d\n",
748 		out_fmt->number_of_channels, format->s_freq, format->bit_depth);
749 }
750 
751 /*
752  * DSP needs SRC module for frequency conversion, SRC takes base module
753  * configuration and the target frequency as extra parameter passed as src
754  * config
755  */
skl_set_src_format(struct skl_dev * skl,struct skl_module_cfg * mconfig,struct skl_src_module_cfg * src_mconfig)756 static void skl_set_src_format(struct skl_dev *skl,
757 			struct skl_module_cfg *mconfig,
758 			struct skl_src_module_cfg *src_mconfig)
759 {
760 	struct skl_module *module = mconfig->module;
761 	struct skl_module_iface *iface = &module->formats[mconfig->fmt_idx];
762 	struct skl_module_fmt *fmt = &iface->outputs[0].fmt;
763 
764 	skl_set_base_module_format(skl, mconfig,
765 		(struct skl_base_cfg *)src_mconfig);
766 
767 	src_mconfig->src_cfg = fmt->s_freq;
768 }
769 
770 /*
771  * DSP needs updown module to do channel conversion. updown module take base
772  * module configuration and channel configuration
773  * It also take coefficients and now we have defaults applied here
774  */
skl_set_updown_mixer_format(struct skl_dev * skl,struct skl_module_cfg * mconfig,struct skl_up_down_mixer_cfg * mixer_mconfig)775 static void skl_set_updown_mixer_format(struct skl_dev *skl,
776 			struct skl_module_cfg *mconfig,
777 			struct skl_up_down_mixer_cfg *mixer_mconfig)
778 {
779 	struct skl_module *module = mconfig->module;
780 	struct skl_module_iface *iface = &module->formats[mconfig->fmt_idx];
781 	struct skl_module_fmt *fmt = &iface->outputs[0].fmt;
782 
783 	skl_set_base_module_format(skl,	mconfig,
784 		(struct skl_base_cfg *)mixer_mconfig);
785 	mixer_mconfig->out_ch_cfg = fmt->ch_cfg;
786 	mixer_mconfig->ch_map = fmt->ch_map;
787 }
788 
789 /*
790  * 'copier' is DSP internal module which copies data from Host DMA (HDA host
791  * dma) or link (hda link, SSP, PDM)
792  * Here we calculate the copier module parameters, like PCM format, output
793  * format, gateway settings
794  * copier_module_config is sent as input buffer with INIT_INSTANCE IPC msg
795  */
skl_set_copier_format(struct skl_dev * skl,struct skl_module_cfg * mconfig,struct skl_cpr_cfg * cpr_mconfig)796 static void skl_set_copier_format(struct skl_dev *skl,
797 			struct skl_module_cfg *mconfig,
798 			struct skl_cpr_cfg *cpr_mconfig)
799 {
800 	struct skl_audio_data_format *out_fmt = &cpr_mconfig->out_fmt;
801 	struct skl_base_cfg *base_cfg = (struct skl_base_cfg *)cpr_mconfig;
802 
803 	skl_set_base_module_format(skl, mconfig, base_cfg);
804 
805 	skl_setup_out_format(skl, mconfig, out_fmt);
806 	skl_setup_cpr_gateway_cfg(skl, mconfig, cpr_mconfig);
807 }
808 
809 /*
810  * Mic select module allows selecting one or many input channels, thus
811  * acting as a demux.
812  *
813  * Mic select module take base module configuration and out-format
814  * configuration
815  */
skl_set_base_outfmt_format(struct skl_dev * skl,struct skl_module_cfg * mconfig,struct skl_base_outfmt_cfg * base_outfmt_mcfg)816 static void skl_set_base_outfmt_format(struct skl_dev *skl,
817 			struct skl_module_cfg *mconfig,
818 			struct skl_base_outfmt_cfg *base_outfmt_mcfg)
819 {
820 	struct skl_audio_data_format *out_fmt = &base_outfmt_mcfg->out_fmt;
821 	struct skl_base_cfg *base_cfg =
822 				(struct skl_base_cfg *)base_outfmt_mcfg;
823 
824 	skl_set_base_module_format(skl, mconfig, base_cfg);
825 	skl_setup_out_format(skl, mconfig, out_fmt);
826 }
827 
skl_get_module_param_size(struct skl_dev * skl,struct skl_module_cfg * mconfig)828 static u16 skl_get_module_param_size(struct skl_dev *skl,
829 			struct skl_module_cfg *mconfig)
830 {
831 	struct skl_module_res *res;
832 	struct skl_module *module = mconfig->module;
833 	u16 param_size;
834 
835 	switch (mconfig->m_type) {
836 	case SKL_MODULE_TYPE_COPIER:
837 		param_size = sizeof(struct skl_cpr_cfg);
838 		param_size += mconfig->formats_config[SKL_PARAM_INIT].caps_size;
839 		return param_size;
840 
841 	case SKL_MODULE_TYPE_SRCINT:
842 		return sizeof(struct skl_src_module_cfg);
843 
844 	case SKL_MODULE_TYPE_UPDWMIX:
845 		return sizeof(struct skl_up_down_mixer_cfg);
846 
847 	case SKL_MODULE_TYPE_BASE_OUTFMT:
848 	case SKL_MODULE_TYPE_MIC_SELECT:
849 		return sizeof(struct skl_base_outfmt_cfg);
850 
851 	case SKL_MODULE_TYPE_MIXER:
852 	case SKL_MODULE_TYPE_KPB:
853 		return sizeof(struct skl_base_cfg);
854 
855 	case SKL_MODULE_TYPE_ALGO:
856 	default:
857 		res = &module->resources[mconfig->res_idx];
858 
859 		param_size = sizeof(struct skl_base_cfg) + sizeof(struct skl_base_cfg_ext);
860 		param_size += (res->nr_input_pins + res->nr_output_pins) *
861 			      sizeof(struct skl_pin_format);
862 		param_size += mconfig->formats_config[SKL_PARAM_INIT].caps_size;
863 
864 		return param_size;
865 	}
866 
867 	return 0;
868 }
869 
870 /*
871  * DSP firmware supports various modules like copier, SRC, updown etc.
872  * These modules required various parameters to be calculated and sent for
873  * the module initialization to DSP. By default a generic module needs only
874  * base module format configuration
875  */
876 
skl_set_module_format(struct skl_dev * skl,struct skl_module_cfg * module_config,u16 * module_config_size,void ** param_data)877 static int skl_set_module_format(struct skl_dev *skl,
878 			struct skl_module_cfg *module_config,
879 			u16 *module_config_size,
880 			void **param_data)
881 {
882 	u16 param_size;
883 
884 	param_size  = skl_get_module_param_size(skl, module_config);
885 
886 	*param_data = kzalloc(param_size, GFP_KERNEL);
887 	if (NULL == *param_data)
888 		return -ENOMEM;
889 
890 	*module_config_size = param_size;
891 
892 	switch (module_config->m_type) {
893 	case SKL_MODULE_TYPE_COPIER:
894 		skl_set_copier_format(skl, module_config, *param_data);
895 		break;
896 
897 	case SKL_MODULE_TYPE_SRCINT:
898 		skl_set_src_format(skl, module_config, *param_data);
899 		break;
900 
901 	case SKL_MODULE_TYPE_UPDWMIX:
902 		skl_set_updown_mixer_format(skl, module_config, *param_data);
903 		break;
904 
905 	case SKL_MODULE_TYPE_BASE_OUTFMT:
906 	case SKL_MODULE_TYPE_MIC_SELECT:
907 		skl_set_base_outfmt_format(skl, module_config, *param_data);
908 		break;
909 
910 	case SKL_MODULE_TYPE_MIXER:
911 	case SKL_MODULE_TYPE_KPB:
912 		skl_set_base_module_format(skl, module_config, *param_data);
913 		break;
914 
915 	case SKL_MODULE_TYPE_ALGO:
916 	default:
917 		skl_set_base_module_format(skl, module_config, *param_data);
918 		skl_set_base_ext_module_format(skl, module_config,
919 					       *param_data +
920 					       sizeof(struct skl_base_cfg));
921 		break;
922 	}
923 
924 	dev_dbg(skl->dev, "Module type=%d id=%d config size: %d bytes\n",
925 			module_config->m_type, module_config->id.module_id,
926 			param_size);
927 	print_hex_dump_debug("Module params:", DUMP_PREFIX_OFFSET, 8, 4,
928 			*param_data, param_size, false);
929 	return 0;
930 }
931 
skl_get_queue_index(struct skl_module_pin * mpin,struct skl_module_inst_id id,int max)932 static int skl_get_queue_index(struct skl_module_pin *mpin,
933 				struct skl_module_inst_id id, int max)
934 {
935 	int i;
936 
937 	for (i = 0; i < max; i++)  {
938 		if (mpin[i].id.module_id == id.module_id &&
939 			mpin[i].id.instance_id == id.instance_id)
940 			return i;
941 	}
942 
943 	return -EINVAL;
944 }
945 
946 /*
947  * Allocates queue for each module.
948  * if dynamic, the pin_index is allocated 0 to max_pin.
949  * In static, the pin_index is fixed based on module_id and instance id
950  */
skl_alloc_queue(struct skl_module_pin * mpin,struct skl_module_cfg * tgt_cfg,int max)951 static int skl_alloc_queue(struct skl_module_pin *mpin,
952 			struct skl_module_cfg *tgt_cfg, int max)
953 {
954 	int i;
955 	struct skl_module_inst_id id = tgt_cfg->id;
956 	/*
957 	 * if pin in dynamic, find first free pin
958 	 * otherwise find match module and instance id pin as topology will
959 	 * ensure a unique pin is assigned to this so no need to
960 	 * allocate/free
961 	 */
962 	for (i = 0; i < max; i++)  {
963 		if (mpin[i].is_dynamic) {
964 			if (!mpin[i].in_use &&
965 				mpin[i].pin_state == SKL_PIN_UNBIND) {
966 
967 				mpin[i].in_use = true;
968 				mpin[i].id.module_id = id.module_id;
969 				mpin[i].id.instance_id = id.instance_id;
970 				mpin[i].id.pvt_id = id.pvt_id;
971 				mpin[i].tgt_mcfg = tgt_cfg;
972 				return i;
973 			}
974 		} else {
975 			if (mpin[i].id.module_id == id.module_id &&
976 				mpin[i].id.instance_id == id.instance_id &&
977 				mpin[i].pin_state == SKL_PIN_UNBIND) {
978 
979 				mpin[i].tgt_mcfg = tgt_cfg;
980 				return i;
981 			}
982 		}
983 	}
984 
985 	return -EINVAL;
986 }
987 
skl_free_queue(struct skl_module_pin * mpin,int q_index)988 static void skl_free_queue(struct skl_module_pin *mpin, int q_index)
989 {
990 	if (mpin[q_index].is_dynamic) {
991 		mpin[q_index].in_use = false;
992 		mpin[q_index].id.module_id = 0;
993 		mpin[q_index].id.instance_id = 0;
994 		mpin[q_index].id.pvt_id = 0;
995 	}
996 	mpin[q_index].pin_state = SKL_PIN_UNBIND;
997 	mpin[q_index].tgt_mcfg = NULL;
998 }
999 
1000 /* Module state will be set to unint, if all the out pin state is UNBIND */
1001 
skl_clear_module_state(struct skl_module_pin * mpin,int max,struct skl_module_cfg * mcfg)1002 static void skl_clear_module_state(struct skl_module_pin *mpin, int max,
1003 						struct skl_module_cfg *mcfg)
1004 {
1005 	int i;
1006 	bool found = false;
1007 
1008 	for (i = 0; i < max; i++)  {
1009 		if (mpin[i].pin_state == SKL_PIN_UNBIND)
1010 			continue;
1011 		found = true;
1012 		break;
1013 	}
1014 
1015 	if (!found)
1016 		mcfg->m_state = SKL_MODULE_INIT_DONE;
1017 	return;
1018 }
1019 
1020 /*
1021  * A module needs to be instanataited in DSP. A mdoule is present in a
1022  * collection of module referred as a PIPE.
1023  * We first calculate the module format, based on module type and then
1024  * invoke the DSP by sending IPC INIT_INSTANCE using ipc helper
1025  */
skl_init_module(struct skl_dev * skl,struct skl_module_cfg * mconfig)1026 int skl_init_module(struct skl_dev *skl,
1027 			struct skl_module_cfg *mconfig)
1028 {
1029 	u16 module_config_size = 0;
1030 	void *param_data = NULL;
1031 	int ret;
1032 	struct skl_ipc_init_instance_msg msg;
1033 
1034 	dev_dbg(skl->dev, "%s: module_id = %d instance=%d\n", __func__,
1035 		 mconfig->id.module_id, mconfig->id.pvt_id);
1036 
1037 	if (mconfig->pipe->state != SKL_PIPE_CREATED) {
1038 		dev_err(skl->dev, "Pipe not created state= %d pipe_id= %d\n",
1039 				 mconfig->pipe->state, mconfig->pipe->ppl_id);
1040 		return -EIO;
1041 	}
1042 
1043 	ret = skl_set_module_format(skl, mconfig,
1044 			&module_config_size, &param_data);
1045 	if (ret < 0) {
1046 		dev_err(skl->dev, "Failed to set module format ret=%d\n", ret);
1047 		return ret;
1048 	}
1049 
1050 	msg.module_id = mconfig->id.module_id;
1051 	msg.instance_id = mconfig->id.pvt_id;
1052 	msg.ppl_instance_id = mconfig->pipe->ppl_id;
1053 	msg.param_data_size = module_config_size;
1054 	msg.core_id = mconfig->core_id;
1055 	msg.domain = mconfig->domain;
1056 
1057 	ret = skl_ipc_init_instance(&skl->ipc, &msg, param_data);
1058 	if (ret < 0) {
1059 		dev_err(skl->dev, "Failed to init instance ret=%d\n", ret);
1060 		kfree(param_data);
1061 		return ret;
1062 	}
1063 	mconfig->m_state = SKL_MODULE_INIT_DONE;
1064 	kfree(param_data);
1065 	return ret;
1066 }
1067 
skl_dump_bind_info(struct skl_dev * skl,struct skl_module_cfg * src_module,struct skl_module_cfg * dst_module)1068 static void skl_dump_bind_info(struct skl_dev *skl, struct skl_module_cfg
1069 	*src_module, struct skl_module_cfg *dst_module)
1070 {
1071 	dev_dbg(skl->dev, "%s: src module_id = %d  src_instance=%d\n",
1072 		__func__, src_module->id.module_id, src_module->id.pvt_id);
1073 	dev_dbg(skl->dev, "%s: dst_module=%d dst_instance=%d\n", __func__,
1074 		 dst_module->id.module_id, dst_module->id.pvt_id);
1075 
1076 	dev_dbg(skl->dev, "src_module state = %d dst module state = %d\n",
1077 		src_module->m_state, dst_module->m_state);
1078 }
1079 
1080 /*
1081  * On module freeup, we need to unbind the module with modules
1082  * it is already bind.
1083  * Find the pin allocated and unbind then using bind_unbind IPC
1084  */
skl_unbind_modules(struct skl_dev * skl,struct skl_module_cfg * src_mcfg,struct skl_module_cfg * dst_mcfg)1085 int skl_unbind_modules(struct skl_dev *skl,
1086 			struct skl_module_cfg *src_mcfg,
1087 			struct skl_module_cfg *dst_mcfg)
1088 {
1089 	int ret;
1090 	struct skl_ipc_bind_unbind_msg msg;
1091 	struct skl_module_inst_id src_id = src_mcfg->id;
1092 	struct skl_module_inst_id dst_id = dst_mcfg->id;
1093 	int in_max = dst_mcfg->module->max_input_pins;
1094 	int out_max = src_mcfg->module->max_output_pins;
1095 	int src_index, dst_index, src_pin_state, dst_pin_state;
1096 
1097 	skl_dump_bind_info(skl, src_mcfg, dst_mcfg);
1098 
1099 	/* get src queue index */
1100 	src_index = skl_get_queue_index(src_mcfg->m_out_pin, dst_id, out_max);
1101 	if (src_index < 0)
1102 		return 0;
1103 
1104 	msg.src_queue = src_index;
1105 
1106 	/* get dst queue index */
1107 	dst_index  = skl_get_queue_index(dst_mcfg->m_in_pin, src_id, in_max);
1108 	if (dst_index < 0)
1109 		return 0;
1110 
1111 	msg.dst_queue = dst_index;
1112 
1113 	src_pin_state = src_mcfg->m_out_pin[src_index].pin_state;
1114 	dst_pin_state = dst_mcfg->m_in_pin[dst_index].pin_state;
1115 
1116 	if (src_pin_state != SKL_PIN_BIND_DONE ||
1117 		dst_pin_state != SKL_PIN_BIND_DONE)
1118 		return 0;
1119 
1120 	msg.module_id = src_mcfg->id.module_id;
1121 	msg.instance_id = src_mcfg->id.pvt_id;
1122 	msg.dst_module_id = dst_mcfg->id.module_id;
1123 	msg.dst_instance_id = dst_mcfg->id.pvt_id;
1124 	msg.bind = false;
1125 
1126 	ret = skl_ipc_bind_unbind(&skl->ipc, &msg);
1127 	if (!ret) {
1128 		/* free queue only if unbind is success */
1129 		skl_free_queue(src_mcfg->m_out_pin, src_index);
1130 		skl_free_queue(dst_mcfg->m_in_pin, dst_index);
1131 
1132 		/*
1133 		 * check only if src module bind state, bind is
1134 		 * always from src -> sink
1135 		 */
1136 		skl_clear_module_state(src_mcfg->m_out_pin, out_max, src_mcfg);
1137 	}
1138 
1139 	return ret;
1140 }
1141 
1142 #define CPR_SINK_FMT_PARAM_ID 2
1143 
1144 /*
1145  * Once a module is instantiated it need to be 'bind' with other modules in
1146  * the pipeline. For binding we need to find the module pins which are bind
1147  * together
1148  * This function finds the pins and then sends bund_unbind IPC message to
1149  * DSP using IPC helper
1150  */
skl_bind_modules(struct skl_dev * skl,struct skl_module_cfg * src_mcfg,struct skl_module_cfg * dst_mcfg)1151 int skl_bind_modules(struct skl_dev *skl,
1152 			struct skl_module_cfg *src_mcfg,
1153 			struct skl_module_cfg *dst_mcfg)
1154 {
1155 	int ret = 0;
1156 	struct skl_ipc_bind_unbind_msg msg;
1157 	int in_max = dst_mcfg->module->max_input_pins;
1158 	int out_max = src_mcfg->module->max_output_pins;
1159 	int src_index, dst_index;
1160 	struct skl_module_fmt *format;
1161 	struct skl_cpr_pin_fmt pin_fmt;
1162 	struct skl_module *module;
1163 	struct skl_module_iface *fmt;
1164 
1165 	skl_dump_bind_info(skl, src_mcfg, dst_mcfg);
1166 
1167 	if (src_mcfg->m_state < SKL_MODULE_INIT_DONE ||
1168 		dst_mcfg->m_state < SKL_MODULE_INIT_DONE)
1169 		return 0;
1170 
1171 	src_index = skl_alloc_queue(src_mcfg->m_out_pin, dst_mcfg, out_max);
1172 	if (src_index < 0)
1173 		return -EINVAL;
1174 
1175 	msg.src_queue = src_index;
1176 	dst_index = skl_alloc_queue(dst_mcfg->m_in_pin, src_mcfg, in_max);
1177 	if (dst_index < 0) {
1178 		skl_free_queue(src_mcfg->m_out_pin, src_index);
1179 		return -EINVAL;
1180 	}
1181 
1182 	/*
1183 	 * Copier module requires the separate large_config_set_ipc to
1184 	 * configure the pins other than 0
1185 	 */
1186 	if (src_mcfg->m_type == SKL_MODULE_TYPE_COPIER && src_index > 0) {
1187 		pin_fmt.sink_id = src_index;
1188 		module = src_mcfg->module;
1189 		fmt = &module->formats[src_mcfg->fmt_idx];
1190 
1191 		/* Input fmt is same as that of src module input cfg */
1192 		format = &fmt->inputs[0].fmt;
1193 		fill_pin_params(&(pin_fmt.src_fmt), format);
1194 
1195 		format = &fmt->outputs[src_index].fmt;
1196 		fill_pin_params(&(pin_fmt.dst_fmt), format);
1197 		ret = skl_set_module_params(skl, (void *)&pin_fmt,
1198 					sizeof(struct skl_cpr_pin_fmt),
1199 					CPR_SINK_FMT_PARAM_ID, src_mcfg);
1200 
1201 		if (ret < 0)
1202 			goto out;
1203 	}
1204 
1205 	msg.dst_queue = dst_index;
1206 
1207 	dev_dbg(skl->dev, "src queue = %d dst queue =%d\n",
1208 			 msg.src_queue, msg.dst_queue);
1209 
1210 	msg.module_id = src_mcfg->id.module_id;
1211 	msg.instance_id = src_mcfg->id.pvt_id;
1212 	msg.dst_module_id = dst_mcfg->id.module_id;
1213 	msg.dst_instance_id = dst_mcfg->id.pvt_id;
1214 	msg.bind = true;
1215 
1216 	ret = skl_ipc_bind_unbind(&skl->ipc, &msg);
1217 
1218 	if (!ret) {
1219 		src_mcfg->m_state = SKL_MODULE_BIND_DONE;
1220 		src_mcfg->m_out_pin[src_index].pin_state = SKL_PIN_BIND_DONE;
1221 		dst_mcfg->m_in_pin[dst_index].pin_state = SKL_PIN_BIND_DONE;
1222 		return ret;
1223 	}
1224 out:
1225 	/* error case , if IPC fails, clear the queue index */
1226 	skl_free_queue(src_mcfg->m_out_pin, src_index);
1227 	skl_free_queue(dst_mcfg->m_in_pin, dst_index);
1228 
1229 	return ret;
1230 }
1231 
skl_set_pipe_state(struct skl_dev * skl,struct skl_pipe * pipe,enum skl_ipc_pipeline_state state)1232 static int skl_set_pipe_state(struct skl_dev *skl, struct skl_pipe *pipe,
1233 	enum skl_ipc_pipeline_state state)
1234 {
1235 	dev_dbg(skl->dev, "%s: pipe_state = %d\n", __func__, state);
1236 
1237 	return skl_ipc_set_pipeline_state(&skl->ipc, pipe->ppl_id, state);
1238 }
1239 
1240 /*
1241  * A pipeline is a collection of modules. Before a module in instantiated a
1242  * pipeline needs to be created for it.
1243  * This function creates pipeline, by sending create pipeline IPC messages
1244  * to FW
1245  */
skl_create_pipeline(struct skl_dev * skl,struct skl_pipe * pipe)1246 int skl_create_pipeline(struct skl_dev *skl, struct skl_pipe *pipe)
1247 {
1248 	int ret;
1249 
1250 	dev_dbg(skl->dev, "%s: pipe_id = %d\n", __func__, pipe->ppl_id);
1251 
1252 	ret = skl_ipc_create_pipeline(&skl->ipc, pipe->memory_pages,
1253 				pipe->pipe_priority, pipe->ppl_id,
1254 				pipe->lp_mode);
1255 	if (ret < 0) {
1256 		dev_err(skl->dev, "Failed to create pipeline\n");
1257 		return ret;
1258 	}
1259 
1260 	pipe->state = SKL_PIPE_CREATED;
1261 
1262 	return 0;
1263 }
1264 
1265 /*
1266  * A pipeline needs to be deleted on cleanup. If a pipeline is running,
1267  * then pause it first. Before actual deletion, pipeline should enter
1268  * reset state. Finish the procedure by sending delete pipeline IPC.
1269  * DSP will stop the DMA engines and release resources
1270  */
skl_delete_pipe(struct skl_dev * skl,struct skl_pipe * pipe)1271 int skl_delete_pipe(struct skl_dev *skl, struct skl_pipe *pipe)
1272 {
1273 	int ret;
1274 
1275 	dev_dbg(skl->dev, "%s: pipe = %d\n", __func__, pipe->ppl_id);
1276 
1277 	/* If pipe was not created in FW, do not try to delete it */
1278 	if (pipe->state < SKL_PIPE_CREATED)
1279 		return 0;
1280 
1281 	/* If pipe is started, do stop the pipe in FW. */
1282 	if (pipe->state >= SKL_PIPE_STARTED) {
1283 		ret = skl_set_pipe_state(skl, pipe, PPL_PAUSED);
1284 		if (ret < 0) {
1285 			dev_err(skl->dev, "Failed to stop pipeline\n");
1286 			return ret;
1287 		}
1288 
1289 		pipe->state = SKL_PIPE_PAUSED;
1290 	}
1291 
1292 	/* reset pipe state before deletion */
1293 	ret = skl_set_pipe_state(skl, pipe, PPL_RESET);
1294 	if (ret < 0) {
1295 		dev_err(skl->dev, "Failed to reset pipe ret=%d\n", ret);
1296 		return ret;
1297 	}
1298 
1299 	pipe->state = SKL_PIPE_RESET;
1300 
1301 	ret = skl_ipc_delete_pipeline(&skl->ipc, pipe->ppl_id);
1302 	if (ret < 0) {
1303 		dev_err(skl->dev, "Failed to delete pipeline\n");
1304 		return ret;
1305 	}
1306 
1307 	pipe->state = SKL_PIPE_INVALID;
1308 
1309 	return ret;
1310 }
1311 
1312 /*
1313  * A pipeline is also a scheduling entity in DSP which can be run, stopped
1314  * For processing data the pipe need to be run by sending IPC set pipe state
1315  * to DSP
1316  */
skl_run_pipe(struct skl_dev * skl,struct skl_pipe * pipe)1317 int skl_run_pipe(struct skl_dev *skl, struct skl_pipe *pipe)
1318 {
1319 	int ret;
1320 
1321 	dev_dbg(skl->dev, "%s: pipe = %d\n", __func__, pipe->ppl_id);
1322 
1323 	/* If pipe was not created in FW, do not try to pause or delete */
1324 	if (pipe->state < SKL_PIPE_CREATED)
1325 		return 0;
1326 
1327 	/* Pipe has to be paused before it is started */
1328 	ret = skl_set_pipe_state(skl, pipe, PPL_PAUSED);
1329 	if (ret < 0) {
1330 		dev_err(skl->dev, "Failed to pause pipe\n");
1331 		return ret;
1332 	}
1333 
1334 	pipe->state = SKL_PIPE_PAUSED;
1335 
1336 	ret = skl_set_pipe_state(skl, pipe, PPL_RUNNING);
1337 	if (ret < 0) {
1338 		dev_err(skl->dev, "Failed to start pipe\n");
1339 		return ret;
1340 	}
1341 
1342 	pipe->state = SKL_PIPE_STARTED;
1343 
1344 	return 0;
1345 }
1346 
1347 /*
1348  * Stop the pipeline by sending set pipe state IPC
1349  * DSP doesnt implement stop so we always send pause message
1350  */
skl_stop_pipe(struct skl_dev * skl,struct skl_pipe * pipe)1351 int skl_stop_pipe(struct skl_dev *skl, struct skl_pipe *pipe)
1352 {
1353 	int ret;
1354 
1355 	dev_dbg(skl->dev, "In %s pipe=%d\n", __func__, pipe->ppl_id);
1356 
1357 	/* If pipe was not created in FW, do not try to pause or delete */
1358 	if (pipe->state < SKL_PIPE_PAUSED)
1359 		return 0;
1360 
1361 	ret = skl_set_pipe_state(skl, pipe, PPL_PAUSED);
1362 	if (ret < 0) {
1363 		dev_dbg(skl->dev, "Failed to stop pipe\n");
1364 		return ret;
1365 	}
1366 
1367 	pipe->state = SKL_PIPE_PAUSED;
1368 
1369 	return 0;
1370 }
1371 
1372 /*
1373  * Reset the pipeline by sending set pipe state IPC this will reset the DMA
1374  * from the DSP side
1375  */
skl_reset_pipe(struct skl_dev * skl,struct skl_pipe * pipe)1376 int skl_reset_pipe(struct skl_dev *skl, struct skl_pipe *pipe)
1377 {
1378 	int ret;
1379 
1380 	/* If pipe was not created in FW, do not try to pause or delete */
1381 	if (pipe->state < SKL_PIPE_PAUSED)
1382 		return 0;
1383 
1384 	ret = skl_set_pipe_state(skl, pipe, PPL_RESET);
1385 	if (ret < 0) {
1386 		dev_dbg(skl->dev, "Failed to reset pipe ret=%d\n", ret);
1387 		return ret;
1388 	}
1389 
1390 	pipe->state = SKL_PIPE_RESET;
1391 
1392 	return 0;
1393 }
1394 
1395 /* Algo parameter set helper function */
skl_set_module_params(struct skl_dev * skl,u32 * params,int size,u32 param_id,struct skl_module_cfg * mcfg)1396 int skl_set_module_params(struct skl_dev *skl, u32 *params, int size,
1397 				u32 param_id, struct skl_module_cfg *mcfg)
1398 {
1399 	struct skl_ipc_large_config_msg msg;
1400 
1401 	msg.module_id = mcfg->id.module_id;
1402 	msg.instance_id = mcfg->id.pvt_id;
1403 	msg.param_data_size = size;
1404 	msg.large_param_id = param_id;
1405 
1406 	return skl_ipc_set_large_config(&skl->ipc, &msg, params);
1407 }
1408 
skl_get_module_params(struct skl_dev * skl,u32 * params,int size,u32 param_id,struct skl_module_cfg * mcfg)1409 int skl_get_module_params(struct skl_dev *skl, u32 *params, int size,
1410 			  u32 param_id, struct skl_module_cfg *mcfg)
1411 {
1412 	struct skl_ipc_large_config_msg msg;
1413 	size_t bytes = size;
1414 
1415 	msg.module_id = mcfg->id.module_id;
1416 	msg.instance_id = mcfg->id.pvt_id;
1417 	msg.param_data_size = size;
1418 	msg.large_param_id = param_id;
1419 
1420 	return skl_ipc_get_large_config(&skl->ipc, &msg, &params, &bytes);
1421 }
1422