1 // SPDX-License-Identifier: GPL-2.0-only
2 //
3 // Copyright(c) 2021 Intel Corporation. All rights reserved.
4 //
5 // Authors: Cezary Rojewski <cezary.rojewski@intel.com>
6 // Amadeusz Slawinski <amadeuszx.slawinski@linux.intel.com>
7 //
8
9 #include <sound/intel-nhlt.h>
10 #include <sound/pcm_params.h>
11 #include <sound/soc.h>
12 #include "avs.h"
13 #include "path.h"
14 #include "topology.h"
15
16 /* Must be called with adev->comp_list_mutex held. */
17 static struct avs_tplg *
avs_path_find_tplg(struct avs_dev * adev,const char * name)18 avs_path_find_tplg(struct avs_dev *adev, const char *name)
19 {
20 struct avs_soc_component *acomp;
21
22 list_for_each_entry(acomp, &adev->comp_list, node)
23 if (!strcmp(acomp->tplg->name, name))
24 return acomp->tplg;
25 return NULL;
26 }
27
28 static struct avs_path_module *
avs_path_find_module(struct avs_path_pipeline * ppl,u32 template_id)29 avs_path_find_module(struct avs_path_pipeline *ppl, u32 template_id)
30 {
31 struct avs_path_module *mod;
32
33 list_for_each_entry(mod, &ppl->mod_list, node)
34 if (mod->template->id == template_id)
35 return mod;
36 return NULL;
37 }
38
39 static struct avs_path_pipeline *
avs_path_find_pipeline(struct avs_path * path,u32 template_id)40 avs_path_find_pipeline(struct avs_path *path, u32 template_id)
41 {
42 struct avs_path_pipeline *ppl;
43
44 list_for_each_entry(ppl, &path->ppl_list, node)
45 if (ppl->template->id == template_id)
46 return ppl;
47 return NULL;
48 }
49
50 static struct avs_path *
avs_path_find_path(struct avs_dev * adev,const char * name,u32 template_id)51 avs_path_find_path(struct avs_dev *adev, const char *name, u32 template_id)
52 {
53 struct avs_tplg_path_template *pos, *template = NULL;
54 struct avs_tplg *tplg;
55 struct avs_path *path;
56
57 tplg = avs_path_find_tplg(adev, name);
58 if (!tplg)
59 return NULL;
60
61 list_for_each_entry(pos, &tplg->path_tmpl_list, node) {
62 if (pos->id == template_id) {
63 template = pos;
64 break;
65 }
66 }
67 if (!template)
68 return NULL;
69
70 spin_lock(&adev->path_list_lock);
71 /* Only one variant of given path template may be instantiated at a time. */
72 list_for_each_entry(path, &adev->path_list, node) {
73 if (path->template->owner == template) {
74 spin_unlock(&adev->path_list_lock);
75 return path;
76 }
77 }
78
79 spin_unlock(&adev->path_list_lock);
80 return NULL;
81 }
82
avs_test_hw_params(struct snd_pcm_hw_params * params,struct avs_audio_format * fmt)83 static bool avs_test_hw_params(struct snd_pcm_hw_params *params,
84 struct avs_audio_format *fmt)
85 {
86 return (params_rate(params) == fmt->sampling_freq &&
87 params_channels(params) == fmt->num_channels &&
88 params_physical_width(params) == fmt->bit_depth &&
89 params_width(params) == fmt->valid_bit_depth);
90 }
91
92 static struct avs_tplg_path *
avs_path_find_variant(struct avs_dev * adev,struct avs_tplg_path_template * template,struct snd_pcm_hw_params * fe_params,struct snd_pcm_hw_params * be_params)93 avs_path_find_variant(struct avs_dev *adev,
94 struct avs_tplg_path_template *template,
95 struct snd_pcm_hw_params *fe_params,
96 struct snd_pcm_hw_params *be_params)
97 {
98 struct avs_tplg_path *variant;
99
100 list_for_each_entry(variant, &template->path_list, node) {
101 dev_dbg(adev->dev, "check FE rate %d chn %d vbd %d bd %d\n",
102 variant->fe_fmt->sampling_freq, variant->fe_fmt->num_channels,
103 variant->fe_fmt->valid_bit_depth, variant->fe_fmt->bit_depth);
104 dev_dbg(adev->dev, "check BE rate %d chn %d vbd %d bd %d\n",
105 variant->be_fmt->sampling_freq, variant->be_fmt->num_channels,
106 variant->be_fmt->valid_bit_depth, variant->be_fmt->bit_depth);
107
108 if (variant->fe_fmt && avs_test_hw_params(fe_params, variant->fe_fmt) &&
109 variant->be_fmt && avs_test_hw_params(be_params, variant->be_fmt))
110 return variant;
111 }
112
113 return NULL;
114 }
115
116 __maybe_unused
avs_dma_type_is_host(u32 dma_type)117 static bool avs_dma_type_is_host(u32 dma_type)
118 {
119 return dma_type == AVS_DMA_HDA_HOST_OUTPUT ||
120 dma_type == AVS_DMA_HDA_HOST_INPUT;
121 }
122
123 __maybe_unused
avs_dma_type_is_link(u32 dma_type)124 static bool avs_dma_type_is_link(u32 dma_type)
125 {
126 return !avs_dma_type_is_host(dma_type);
127 }
128
129 __maybe_unused
avs_dma_type_is_output(u32 dma_type)130 static bool avs_dma_type_is_output(u32 dma_type)
131 {
132 return dma_type == AVS_DMA_HDA_HOST_OUTPUT ||
133 dma_type == AVS_DMA_HDA_LINK_OUTPUT ||
134 dma_type == AVS_DMA_I2S_LINK_OUTPUT;
135 }
136
137 __maybe_unused
avs_dma_type_is_input(u32 dma_type)138 static bool avs_dma_type_is_input(u32 dma_type)
139 {
140 return !avs_dma_type_is_output(dma_type);
141 }
142
avs_copier_create(struct avs_dev * adev,struct avs_path_module * mod)143 static int avs_copier_create(struct avs_dev *adev, struct avs_path_module *mod)
144 {
145 struct nhlt_acpi_table *nhlt = adev->nhlt;
146 struct avs_tplg_module *t = mod->template;
147 struct avs_copier_cfg *cfg;
148 struct nhlt_specific_cfg *ep_blob;
149 union avs_connector_node_id node_id = {0};
150 size_t cfg_size, data_size = 0;
151 void *data = NULL;
152 u32 dma_type;
153 int ret;
154
155 dma_type = t->cfg_ext->copier.dma_type;
156 node_id.dma_type = dma_type;
157
158 switch (dma_type) {
159 struct avs_audio_format *fmt;
160 int direction;
161
162 case AVS_DMA_I2S_LINK_OUTPUT:
163 case AVS_DMA_I2S_LINK_INPUT:
164 if (avs_dma_type_is_input(dma_type))
165 direction = SNDRV_PCM_STREAM_CAPTURE;
166 else
167 direction = SNDRV_PCM_STREAM_PLAYBACK;
168
169 if (t->cfg_ext->copier.blob_fmt)
170 fmt = t->cfg_ext->copier.blob_fmt;
171 else if (direction == SNDRV_PCM_STREAM_CAPTURE)
172 fmt = t->in_fmt;
173 else
174 fmt = t->cfg_ext->copier.out_fmt;
175
176 ep_blob = intel_nhlt_get_endpoint_blob(adev->dev,
177 nhlt, t->cfg_ext->copier.vindex.i2s.instance,
178 NHLT_LINK_SSP, fmt->valid_bit_depth, fmt->bit_depth,
179 fmt->num_channels, fmt->sampling_freq, direction,
180 NHLT_DEVICE_I2S);
181 if (!ep_blob) {
182 dev_err(adev->dev, "no I2S ep_blob found\n");
183 return -ENOENT;
184 }
185
186 data = ep_blob->caps;
187 data_size = ep_blob->size;
188 /* I2S gateway's vindex is statically assigned in topology */
189 node_id.vindex = t->cfg_ext->copier.vindex.val;
190
191 break;
192
193 case AVS_DMA_DMIC_LINK_INPUT:
194 direction = SNDRV_PCM_STREAM_CAPTURE;
195
196 if (t->cfg_ext->copier.blob_fmt)
197 fmt = t->cfg_ext->copier.blob_fmt;
198 else
199 fmt = t->in_fmt;
200
201 ep_blob = intel_nhlt_get_endpoint_blob(adev->dev, nhlt, 0,
202 NHLT_LINK_DMIC, fmt->valid_bit_depth,
203 fmt->bit_depth, fmt->num_channels,
204 fmt->sampling_freq, direction, NHLT_DEVICE_DMIC);
205 if (!ep_blob) {
206 dev_err(adev->dev, "no DMIC ep_blob found\n");
207 return -ENOENT;
208 }
209
210 data = ep_blob->caps;
211 data_size = ep_blob->size;
212 /* DMIC gateway's vindex is statically assigned in topology */
213 node_id.vindex = t->cfg_ext->copier.vindex.val;
214
215 break;
216
217 case AVS_DMA_HDA_HOST_OUTPUT:
218 case AVS_DMA_HDA_HOST_INPUT:
219 /* HOST gateway's vindex is dynamically assigned with DMA id */
220 node_id.vindex = mod->owner->owner->dma_id;
221 break;
222
223 case AVS_DMA_HDA_LINK_OUTPUT:
224 case AVS_DMA_HDA_LINK_INPUT:
225 node_id.vindex = t->cfg_ext->copier.vindex.val |
226 mod->owner->owner->dma_id;
227 break;
228
229 case INVALID_OBJECT_ID:
230 default:
231 node_id = INVALID_NODE_ID;
232 break;
233 }
234
235 cfg_size = sizeof(*cfg) + data_size;
236 /* Every config-BLOB contains gateway attributes. */
237 if (data_size)
238 cfg_size -= sizeof(cfg->gtw_cfg.config.attrs);
239
240 cfg = kzalloc(cfg_size, GFP_KERNEL);
241 if (!cfg)
242 return -ENOMEM;
243
244 cfg->base.cpc = t->cfg_base->cpc;
245 cfg->base.ibs = t->cfg_base->ibs;
246 cfg->base.obs = t->cfg_base->obs;
247 cfg->base.is_pages = t->cfg_base->is_pages;
248 cfg->base.audio_fmt = *t->in_fmt;
249 cfg->out_fmt = *t->cfg_ext->copier.out_fmt;
250 cfg->feature_mask = t->cfg_ext->copier.feature_mask;
251 cfg->gtw_cfg.node_id = node_id;
252 cfg->gtw_cfg.dma_buffer_size = t->cfg_ext->copier.dma_buffer_size;
253 /* config_length in DWORDs */
254 cfg->gtw_cfg.config_length = DIV_ROUND_UP(data_size, 4);
255 if (data)
256 memcpy(&cfg->gtw_cfg.config, data, data_size);
257
258 mod->gtw_attrs = cfg->gtw_cfg.config.attrs;
259
260 ret = avs_dsp_init_module(adev, mod->module_id, mod->owner->instance_id,
261 t->core_id, t->domain, cfg, cfg_size,
262 &mod->instance_id);
263 kfree(cfg);
264 return ret;
265 }
266
avs_updown_mix_create(struct avs_dev * adev,struct avs_path_module * mod)267 static int avs_updown_mix_create(struct avs_dev *adev, struct avs_path_module *mod)
268 {
269 struct avs_tplg_module *t = mod->template;
270 struct avs_updown_mixer_cfg cfg;
271 int i;
272
273 cfg.base.cpc = t->cfg_base->cpc;
274 cfg.base.ibs = t->cfg_base->ibs;
275 cfg.base.obs = t->cfg_base->obs;
276 cfg.base.is_pages = t->cfg_base->is_pages;
277 cfg.base.audio_fmt = *t->in_fmt;
278 cfg.out_channel_config = t->cfg_ext->updown_mix.out_channel_config;
279 cfg.coefficients_select = t->cfg_ext->updown_mix.coefficients_select;
280 for (i = 0; i < AVS_CHANNELS_MAX; i++)
281 cfg.coefficients[i] = t->cfg_ext->updown_mix.coefficients[i];
282 cfg.channel_map = t->cfg_ext->updown_mix.channel_map;
283
284 return avs_dsp_init_module(adev, mod->module_id, mod->owner->instance_id,
285 t->core_id, t->domain, &cfg, sizeof(cfg),
286 &mod->instance_id);
287 }
288
avs_src_create(struct avs_dev * adev,struct avs_path_module * mod)289 static int avs_src_create(struct avs_dev *adev, struct avs_path_module *mod)
290 {
291 struct avs_tplg_module *t = mod->template;
292 struct avs_src_cfg cfg;
293
294 cfg.base.cpc = t->cfg_base->cpc;
295 cfg.base.ibs = t->cfg_base->ibs;
296 cfg.base.obs = t->cfg_base->obs;
297 cfg.base.is_pages = t->cfg_base->is_pages;
298 cfg.base.audio_fmt = *t->in_fmt;
299 cfg.out_freq = t->cfg_ext->src.out_freq;
300
301 return avs_dsp_init_module(adev, mod->module_id, mod->owner->instance_id,
302 t->core_id, t->domain, &cfg, sizeof(cfg),
303 &mod->instance_id);
304 }
305
avs_asrc_create(struct avs_dev * adev,struct avs_path_module * mod)306 static int avs_asrc_create(struct avs_dev *adev, struct avs_path_module *mod)
307 {
308 struct avs_tplg_module *t = mod->template;
309 struct avs_asrc_cfg cfg;
310
311 cfg.base.cpc = t->cfg_base->cpc;
312 cfg.base.ibs = t->cfg_base->ibs;
313 cfg.base.obs = t->cfg_base->obs;
314 cfg.base.is_pages = t->cfg_base->is_pages;
315 cfg.base.audio_fmt = *t->in_fmt;
316 cfg.out_freq = t->cfg_ext->asrc.out_freq;
317 cfg.mode = t->cfg_ext->asrc.mode;
318 cfg.disable_jitter_buffer = t->cfg_ext->asrc.disable_jitter_buffer;
319
320 return avs_dsp_init_module(adev, mod->module_id, mod->owner->instance_id,
321 t->core_id, t->domain, &cfg, sizeof(cfg),
322 &mod->instance_id);
323 }
324
avs_aec_create(struct avs_dev * adev,struct avs_path_module * mod)325 static int avs_aec_create(struct avs_dev *adev, struct avs_path_module *mod)
326 {
327 struct avs_tplg_module *t = mod->template;
328 struct avs_aec_cfg cfg;
329
330 cfg.base.cpc = t->cfg_base->cpc;
331 cfg.base.ibs = t->cfg_base->ibs;
332 cfg.base.obs = t->cfg_base->obs;
333 cfg.base.is_pages = t->cfg_base->is_pages;
334 cfg.base.audio_fmt = *t->in_fmt;
335 cfg.ref_fmt = *t->cfg_ext->aec.ref_fmt;
336 cfg.out_fmt = *t->cfg_ext->aec.out_fmt;
337 cfg.cpc_lp_mode = t->cfg_ext->aec.cpc_lp_mode;
338
339 return avs_dsp_init_module(adev, mod->module_id, mod->owner->instance_id,
340 t->core_id, t->domain, &cfg, sizeof(cfg),
341 &mod->instance_id);
342 }
343
avs_mux_create(struct avs_dev * adev,struct avs_path_module * mod)344 static int avs_mux_create(struct avs_dev *adev, struct avs_path_module *mod)
345 {
346 struct avs_tplg_module *t = mod->template;
347 struct avs_mux_cfg cfg;
348
349 cfg.base.cpc = t->cfg_base->cpc;
350 cfg.base.ibs = t->cfg_base->ibs;
351 cfg.base.obs = t->cfg_base->obs;
352 cfg.base.is_pages = t->cfg_base->is_pages;
353 cfg.base.audio_fmt = *t->in_fmt;
354 cfg.ref_fmt = *t->cfg_ext->mux.ref_fmt;
355 cfg.out_fmt = *t->cfg_ext->mux.out_fmt;
356
357 return avs_dsp_init_module(adev, mod->module_id, mod->owner->instance_id,
358 t->core_id, t->domain, &cfg, sizeof(cfg),
359 &mod->instance_id);
360 }
361
avs_wov_create(struct avs_dev * adev,struct avs_path_module * mod)362 static int avs_wov_create(struct avs_dev *adev, struct avs_path_module *mod)
363 {
364 struct avs_tplg_module *t = mod->template;
365 struct avs_wov_cfg cfg;
366
367 cfg.base.cpc = t->cfg_base->cpc;
368 cfg.base.ibs = t->cfg_base->ibs;
369 cfg.base.obs = t->cfg_base->obs;
370 cfg.base.is_pages = t->cfg_base->is_pages;
371 cfg.base.audio_fmt = *t->in_fmt;
372 cfg.cpc_lp_mode = t->cfg_ext->wov.cpc_lp_mode;
373
374 return avs_dsp_init_module(adev, mod->module_id, mod->owner->instance_id,
375 t->core_id, t->domain, &cfg, sizeof(cfg),
376 &mod->instance_id);
377 }
378
avs_micsel_create(struct avs_dev * adev,struct avs_path_module * mod)379 static int avs_micsel_create(struct avs_dev *adev, struct avs_path_module *mod)
380 {
381 struct avs_tplg_module *t = mod->template;
382 struct avs_micsel_cfg cfg;
383
384 cfg.base.cpc = t->cfg_base->cpc;
385 cfg.base.ibs = t->cfg_base->ibs;
386 cfg.base.obs = t->cfg_base->obs;
387 cfg.base.is_pages = t->cfg_base->is_pages;
388 cfg.base.audio_fmt = *t->in_fmt;
389 cfg.out_fmt = *t->cfg_ext->micsel.out_fmt;
390
391 return avs_dsp_init_module(adev, mod->module_id, mod->owner->instance_id,
392 t->core_id, t->domain, &cfg, sizeof(cfg),
393 &mod->instance_id);
394 }
395
avs_modbase_create(struct avs_dev * adev,struct avs_path_module * mod)396 static int avs_modbase_create(struct avs_dev *adev, struct avs_path_module *mod)
397 {
398 struct avs_tplg_module *t = mod->template;
399 struct avs_modcfg_base cfg;
400
401 cfg.cpc = t->cfg_base->cpc;
402 cfg.ibs = t->cfg_base->ibs;
403 cfg.obs = t->cfg_base->obs;
404 cfg.is_pages = t->cfg_base->is_pages;
405 cfg.audio_fmt = *t->in_fmt;
406
407 return avs_dsp_init_module(adev, mod->module_id, mod->owner->instance_id,
408 t->core_id, t->domain, &cfg, sizeof(cfg),
409 &mod->instance_id);
410 }
411
avs_modext_create(struct avs_dev * adev,struct avs_path_module * mod)412 static int avs_modext_create(struct avs_dev *adev, struct avs_path_module *mod)
413 {
414 struct avs_tplg_module *t = mod->template;
415 struct avs_tplg_modcfg_ext *tcfg = t->cfg_ext;
416 struct avs_modcfg_ext *cfg;
417 size_t cfg_size, num_pins;
418 int ret, i;
419
420 num_pins = tcfg->generic.num_input_pins + tcfg->generic.num_output_pins;
421 cfg_size = sizeof(*cfg) + sizeof(*cfg->pin_fmts) * num_pins;
422
423 cfg = kzalloc(cfg_size, GFP_KERNEL);
424 if (!cfg)
425 return -ENOMEM;
426
427 cfg->base.cpc = t->cfg_base->cpc;
428 cfg->base.ibs = t->cfg_base->ibs;
429 cfg->base.obs = t->cfg_base->obs;
430 cfg->base.is_pages = t->cfg_base->is_pages;
431 cfg->base.audio_fmt = *t->in_fmt;
432 cfg->num_input_pins = tcfg->generic.num_input_pins;
433 cfg->num_output_pins = tcfg->generic.num_output_pins;
434
435 /* configure pin formats */
436 for (i = 0; i < num_pins; i++) {
437 struct avs_tplg_pin_format *tpin = &tcfg->generic.pin_fmts[i];
438 struct avs_pin_format *pin = &cfg->pin_fmts[i];
439
440 pin->pin_index = tpin->pin_index;
441 pin->iobs = tpin->iobs;
442 pin->audio_fmt = *tpin->fmt;
443 }
444
445 ret = avs_dsp_init_module(adev, mod->module_id, mod->owner->instance_id,
446 t->core_id, t->domain, cfg, cfg_size,
447 &mod->instance_id);
448 kfree(cfg);
449 return ret;
450 }
451
avs_probe_create(struct avs_dev * adev,struct avs_path_module * mod)452 static int avs_probe_create(struct avs_dev *adev, struct avs_path_module *mod)
453 {
454 dev_err(adev->dev, "Probe module can't be instantiated by topology");
455 return -EINVAL;
456 }
457
458 struct avs_module_create {
459 guid_t *guid;
460 int (*create)(struct avs_dev *adev, struct avs_path_module *mod);
461 };
462
463 static struct avs_module_create avs_module_create[] = {
464 { &AVS_MIXIN_MOD_UUID, avs_modbase_create },
465 { &AVS_MIXOUT_MOD_UUID, avs_modbase_create },
466 { &AVS_KPBUFF_MOD_UUID, avs_modbase_create },
467 { &AVS_COPIER_MOD_UUID, avs_copier_create },
468 { &AVS_MICSEL_MOD_UUID, avs_micsel_create },
469 { &AVS_MUX_MOD_UUID, avs_mux_create },
470 { &AVS_UPDWMIX_MOD_UUID, avs_updown_mix_create },
471 { &AVS_SRCINTC_MOD_UUID, avs_src_create },
472 { &AVS_AEC_MOD_UUID, avs_aec_create },
473 { &AVS_ASRC_MOD_UUID, avs_asrc_create },
474 { &AVS_INTELWOV_MOD_UUID, avs_wov_create },
475 { &AVS_PROBE_MOD_UUID, avs_probe_create },
476 };
477
avs_path_module_type_create(struct avs_dev * adev,struct avs_path_module * mod)478 static int avs_path_module_type_create(struct avs_dev *adev, struct avs_path_module *mod)
479 {
480 const guid_t *type = &mod->template->cfg_ext->type;
481
482 for (int i = 0; i < ARRAY_SIZE(avs_module_create); i++)
483 if (guid_equal(type, avs_module_create[i].guid))
484 return avs_module_create[i].create(adev, mod);
485
486 return avs_modext_create(adev, mod);
487 }
488
avs_path_module_free(struct avs_dev * adev,struct avs_path_module * mod)489 static void avs_path_module_free(struct avs_dev *adev, struct avs_path_module *mod)
490 {
491 kfree(mod);
492 }
493
494 static struct avs_path_module *
avs_path_module_create(struct avs_dev * adev,struct avs_path_pipeline * owner,struct avs_tplg_module * template)495 avs_path_module_create(struct avs_dev *adev,
496 struct avs_path_pipeline *owner,
497 struct avs_tplg_module *template)
498 {
499 struct avs_path_module *mod;
500 int module_id, ret;
501
502 module_id = avs_get_module_id(adev, &template->cfg_ext->type);
503 if (module_id < 0)
504 return ERR_PTR(module_id);
505
506 mod = kzalloc(sizeof(*mod), GFP_KERNEL);
507 if (!mod)
508 return ERR_PTR(-ENOMEM);
509
510 mod->template = template;
511 mod->module_id = module_id;
512 mod->owner = owner;
513 INIT_LIST_HEAD(&mod->node);
514
515 ret = avs_path_module_type_create(adev, mod);
516 if (ret) {
517 dev_err(adev->dev, "module-type create failed: %d\n", ret);
518 kfree(mod);
519 return ERR_PTR(ret);
520 }
521
522 return mod;
523 }
524
avs_path_binding_arm(struct avs_dev * adev,struct avs_path_binding * binding)525 static int avs_path_binding_arm(struct avs_dev *adev, struct avs_path_binding *binding)
526 {
527 struct avs_path_module *this_mod, *target_mod;
528 struct avs_path_pipeline *target_ppl;
529 struct avs_path *target_path;
530 struct avs_tplg_binding *t;
531
532 t = binding->template;
533 this_mod = avs_path_find_module(binding->owner,
534 t->mod_id);
535 if (!this_mod) {
536 dev_err(adev->dev, "path mod %d not found\n", t->mod_id);
537 return -EINVAL;
538 }
539
540 /* update with target_tplg_name too */
541 target_path = avs_path_find_path(adev, t->target_tplg_name,
542 t->target_path_tmpl_id);
543 if (!target_path) {
544 dev_err(adev->dev, "target path %s:%d not found\n",
545 t->target_tplg_name, t->target_path_tmpl_id);
546 return -EINVAL;
547 }
548
549 target_ppl = avs_path_find_pipeline(target_path,
550 t->target_ppl_id);
551 if (!target_ppl) {
552 dev_err(adev->dev, "target ppl %d not found\n", t->target_ppl_id);
553 return -EINVAL;
554 }
555
556 target_mod = avs_path_find_module(target_ppl, t->target_mod_id);
557 if (!target_mod) {
558 dev_err(adev->dev, "target mod %d not found\n", t->target_mod_id);
559 return -EINVAL;
560 }
561
562 if (t->is_sink) {
563 binding->sink = this_mod;
564 binding->sink_pin = t->mod_pin;
565 binding->source = target_mod;
566 binding->source_pin = t->target_mod_pin;
567 } else {
568 binding->sink = target_mod;
569 binding->sink_pin = t->target_mod_pin;
570 binding->source = this_mod;
571 binding->source_pin = t->mod_pin;
572 }
573
574 return 0;
575 }
576
avs_path_binding_free(struct avs_dev * adev,struct avs_path_binding * binding)577 static void avs_path_binding_free(struct avs_dev *adev, struct avs_path_binding *binding)
578 {
579 kfree(binding);
580 }
581
avs_path_binding_create(struct avs_dev * adev,struct avs_path_pipeline * owner,struct avs_tplg_binding * t)582 static struct avs_path_binding *avs_path_binding_create(struct avs_dev *adev,
583 struct avs_path_pipeline *owner,
584 struct avs_tplg_binding *t)
585 {
586 struct avs_path_binding *binding;
587
588 binding = kzalloc(sizeof(*binding), GFP_KERNEL);
589 if (!binding)
590 return ERR_PTR(-ENOMEM);
591
592 binding->template = t;
593 binding->owner = owner;
594 INIT_LIST_HEAD(&binding->node);
595
596 return binding;
597 }
598
avs_path_pipeline_arm(struct avs_dev * adev,struct avs_path_pipeline * ppl)599 static int avs_path_pipeline_arm(struct avs_dev *adev,
600 struct avs_path_pipeline *ppl)
601 {
602 struct avs_path_module *mod;
603
604 list_for_each_entry(mod, &ppl->mod_list, node) {
605 struct avs_path_module *source, *sink;
606 int ret;
607
608 /*
609 * Only one module (so it's implicitly last) or it is the last
610 * one, either way we don't have next module to bind it to.
611 */
612 if (mod == list_last_entry(&ppl->mod_list,
613 struct avs_path_module, node))
614 break;
615
616 /* bind current module to next module on list */
617 source = mod;
618 sink = list_next_entry(mod, node);
619 if (!source || !sink)
620 return -EINVAL;
621
622 ret = avs_ipc_bind(adev, source->module_id, source->instance_id,
623 sink->module_id, sink->instance_id, 0, 0);
624 if (ret)
625 return AVS_IPC_RET(ret);
626 }
627
628 return 0;
629 }
630
avs_path_pipeline_free(struct avs_dev * adev,struct avs_path_pipeline * ppl)631 static void avs_path_pipeline_free(struct avs_dev *adev,
632 struct avs_path_pipeline *ppl)
633 {
634 struct avs_path_binding *binding, *bsave;
635 struct avs_path_module *mod, *save;
636
637 list_for_each_entry_safe(binding, bsave, &ppl->binding_list, node) {
638 list_del(&binding->node);
639 avs_path_binding_free(adev, binding);
640 }
641
642 avs_dsp_delete_pipeline(adev, ppl->instance_id);
643
644 /* Unload resources occupied by owned modules */
645 list_for_each_entry_safe(mod, save, &ppl->mod_list, node) {
646 avs_dsp_delete_module(adev, mod->module_id, mod->instance_id,
647 mod->owner->instance_id,
648 mod->template->core_id);
649 avs_path_module_free(adev, mod);
650 }
651
652 list_del(&ppl->node);
653 kfree(ppl);
654 }
655
656 static struct avs_path_pipeline *
avs_path_pipeline_create(struct avs_dev * adev,struct avs_path * owner,struct avs_tplg_pipeline * template)657 avs_path_pipeline_create(struct avs_dev *adev, struct avs_path *owner,
658 struct avs_tplg_pipeline *template)
659 {
660 struct avs_path_pipeline *ppl;
661 struct avs_tplg_pplcfg *cfg = template->cfg;
662 struct avs_tplg_module *tmod;
663 int ret, i;
664
665 ppl = kzalloc(sizeof(*ppl), GFP_KERNEL);
666 if (!ppl)
667 return ERR_PTR(-ENOMEM);
668
669 ppl->template = template;
670 ppl->owner = owner;
671 INIT_LIST_HEAD(&ppl->binding_list);
672 INIT_LIST_HEAD(&ppl->mod_list);
673 INIT_LIST_HEAD(&ppl->node);
674
675 ret = avs_dsp_create_pipeline(adev, cfg->req_size, cfg->priority,
676 cfg->lp, cfg->attributes,
677 &ppl->instance_id);
678 if (ret) {
679 dev_err(adev->dev, "error creating pipeline %d\n", ret);
680 kfree(ppl);
681 return ERR_PTR(ret);
682 }
683
684 list_for_each_entry(tmod, &template->mod_list, node) {
685 struct avs_path_module *mod;
686
687 mod = avs_path_module_create(adev, ppl, tmod);
688 if (IS_ERR(mod)) {
689 ret = PTR_ERR(mod);
690 dev_err(adev->dev, "error creating module %d\n", ret);
691 goto init_err;
692 }
693
694 list_add_tail(&mod->node, &ppl->mod_list);
695 }
696
697 for (i = 0; i < template->num_bindings; i++) {
698 struct avs_path_binding *binding;
699
700 binding = avs_path_binding_create(adev, ppl, template->bindings[i]);
701 if (IS_ERR(binding)) {
702 ret = PTR_ERR(binding);
703 dev_err(adev->dev, "error creating binding %d\n", ret);
704 goto init_err;
705 }
706
707 list_add_tail(&binding->node, &ppl->binding_list);
708 }
709
710 return ppl;
711
712 init_err:
713 avs_path_pipeline_free(adev, ppl);
714 return ERR_PTR(ret);
715 }
716
avs_path_init(struct avs_dev * adev,struct avs_path * path,struct avs_tplg_path * template,u32 dma_id)717 static int avs_path_init(struct avs_dev *adev, struct avs_path *path,
718 struct avs_tplg_path *template, u32 dma_id)
719 {
720 struct avs_tplg_pipeline *tppl;
721
722 path->owner = adev;
723 path->template = template;
724 path->dma_id = dma_id;
725 INIT_LIST_HEAD(&path->ppl_list);
726 INIT_LIST_HEAD(&path->node);
727
728 /* create all the pipelines */
729 list_for_each_entry(tppl, &template->ppl_list, node) {
730 struct avs_path_pipeline *ppl;
731
732 ppl = avs_path_pipeline_create(adev, path, tppl);
733 if (IS_ERR(ppl))
734 return PTR_ERR(ppl);
735
736 list_add_tail(&ppl->node, &path->ppl_list);
737 }
738
739 spin_lock(&adev->path_list_lock);
740 list_add_tail(&path->node, &adev->path_list);
741 spin_unlock(&adev->path_list_lock);
742
743 return 0;
744 }
745
avs_path_arm(struct avs_dev * adev,struct avs_path * path)746 static int avs_path_arm(struct avs_dev *adev, struct avs_path *path)
747 {
748 struct avs_path_pipeline *ppl;
749 struct avs_path_binding *binding;
750 int ret;
751
752 list_for_each_entry(ppl, &path->ppl_list, node) {
753 /*
754 * Arm all ppl bindings before binding internal modules
755 * as it costs no IPCs which isn't true for the latter.
756 */
757 list_for_each_entry(binding, &ppl->binding_list, node) {
758 ret = avs_path_binding_arm(adev, binding);
759 if (ret < 0)
760 return ret;
761 }
762
763 ret = avs_path_pipeline_arm(adev, ppl);
764 if (ret < 0)
765 return ret;
766 }
767
768 return 0;
769 }
770
avs_path_free_unlocked(struct avs_path * path)771 static void avs_path_free_unlocked(struct avs_path *path)
772 {
773 struct avs_path_pipeline *ppl, *save;
774
775 spin_lock(&path->owner->path_list_lock);
776 list_del(&path->node);
777 spin_unlock(&path->owner->path_list_lock);
778
779 list_for_each_entry_safe(ppl, save, &path->ppl_list, node)
780 avs_path_pipeline_free(path->owner, ppl);
781
782 kfree(path);
783 }
784
avs_path_create_unlocked(struct avs_dev * adev,u32 dma_id,struct avs_tplg_path * template)785 static struct avs_path *avs_path_create_unlocked(struct avs_dev *adev, u32 dma_id,
786 struct avs_tplg_path *template)
787 {
788 struct avs_path *path;
789 int ret;
790
791 path = kzalloc(sizeof(*path), GFP_KERNEL);
792 if (!path)
793 return ERR_PTR(-ENOMEM);
794
795 ret = avs_path_init(adev, path, template, dma_id);
796 if (ret < 0)
797 goto err;
798
799 ret = avs_path_arm(adev, path);
800 if (ret < 0)
801 goto err;
802
803 path->state = AVS_PPL_STATE_INVALID;
804 return path;
805 err:
806 avs_path_free_unlocked(path);
807 return ERR_PTR(ret);
808 }
809
avs_path_free(struct avs_path * path)810 void avs_path_free(struct avs_path *path)
811 {
812 struct avs_dev *adev = path->owner;
813
814 mutex_lock(&adev->path_mutex);
815 avs_path_free_unlocked(path);
816 mutex_unlock(&adev->path_mutex);
817 }
818
avs_path_create(struct avs_dev * adev,u32 dma_id,struct avs_tplg_path_template * template,struct snd_pcm_hw_params * fe_params,struct snd_pcm_hw_params * be_params)819 struct avs_path *avs_path_create(struct avs_dev *adev, u32 dma_id,
820 struct avs_tplg_path_template *template,
821 struct snd_pcm_hw_params *fe_params,
822 struct snd_pcm_hw_params *be_params)
823 {
824 struct avs_tplg_path *variant;
825 struct avs_path *path;
826
827 variant = avs_path_find_variant(adev, template, fe_params, be_params);
828 if (!variant) {
829 dev_err(adev->dev, "no matching variant found\n");
830 return ERR_PTR(-ENOENT);
831 }
832
833 /* Serialize path and its components creation. */
834 mutex_lock(&adev->path_mutex);
835 /* Satisfy needs of avs_path_find_tplg(). */
836 mutex_lock(&adev->comp_list_mutex);
837
838 path = avs_path_create_unlocked(adev, dma_id, variant);
839
840 mutex_unlock(&adev->comp_list_mutex);
841 mutex_unlock(&adev->path_mutex);
842
843 return path;
844 }
845
avs_path_bind_prepare(struct avs_dev * adev,struct avs_path_binding * binding)846 static int avs_path_bind_prepare(struct avs_dev *adev,
847 struct avs_path_binding *binding)
848 {
849 const struct avs_audio_format *src_fmt, *sink_fmt;
850 struct avs_tplg_module *tsource = binding->source->template;
851 struct avs_path_module *source = binding->source;
852 int ret;
853
854 /*
855 * only copier modules about to be bound
856 * to output pin other than 0 need preparation
857 */
858 if (!binding->source_pin)
859 return 0;
860 if (!guid_equal(&tsource->cfg_ext->type, &AVS_COPIER_MOD_UUID))
861 return 0;
862
863 src_fmt = tsource->in_fmt;
864 sink_fmt = binding->sink->template->in_fmt;
865
866 ret = avs_ipc_copier_set_sink_format(adev, source->module_id,
867 source->instance_id, binding->source_pin,
868 src_fmt, sink_fmt);
869 if (ret) {
870 dev_err(adev->dev, "config copier failed: %d\n", ret);
871 return AVS_IPC_RET(ret);
872 }
873
874 return 0;
875 }
876
avs_path_bind(struct avs_path * path)877 int avs_path_bind(struct avs_path *path)
878 {
879 struct avs_path_pipeline *ppl;
880 struct avs_dev *adev = path->owner;
881 int ret;
882
883 list_for_each_entry(ppl, &path->ppl_list, node) {
884 struct avs_path_binding *binding;
885
886 list_for_each_entry(binding, &ppl->binding_list, node) {
887 struct avs_path_module *source, *sink;
888
889 source = binding->source;
890 sink = binding->sink;
891
892 ret = avs_path_bind_prepare(adev, binding);
893 if (ret < 0)
894 return ret;
895
896 ret = avs_ipc_bind(adev, source->module_id,
897 source->instance_id, sink->module_id,
898 sink->instance_id, binding->sink_pin,
899 binding->source_pin);
900 if (ret) {
901 dev_err(adev->dev, "bind path failed: %d\n", ret);
902 return AVS_IPC_RET(ret);
903 }
904 }
905 }
906
907 return 0;
908 }
909
avs_path_unbind(struct avs_path * path)910 int avs_path_unbind(struct avs_path *path)
911 {
912 struct avs_path_pipeline *ppl;
913 struct avs_dev *adev = path->owner;
914 int ret;
915
916 list_for_each_entry(ppl, &path->ppl_list, node) {
917 struct avs_path_binding *binding;
918
919 list_for_each_entry(binding, &ppl->binding_list, node) {
920 struct avs_path_module *source, *sink;
921
922 source = binding->source;
923 sink = binding->sink;
924
925 ret = avs_ipc_unbind(adev, source->module_id,
926 source->instance_id, sink->module_id,
927 sink->instance_id, binding->sink_pin,
928 binding->source_pin);
929 if (ret) {
930 dev_err(adev->dev, "unbind path failed: %d\n", ret);
931 return AVS_IPC_RET(ret);
932 }
933 }
934 }
935
936 return 0;
937 }
938
avs_path_reset(struct avs_path * path)939 int avs_path_reset(struct avs_path *path)
940 {
941 struct avs_path_pipeline *ppl;
942 struct avs_dev *adev = path->owner;
943 int ret;
944
945 if (path->state == AVS_PPL_STATE_RESET)
946 return 0;
947
948 list_for_each_entry(ppl, &path->ppl_list, node) {
949 ret = avs_ipc_set_pipeline_state(adev, ppl->instance_id,
950 AVS_PPL_STATE_RESET);
951 if (ret) {
952 dev_err(adev->dev, "reset path failed: %d\n", ret);
953 path->state = AVS_PPL_STATE_INVALID;
954 return AVS_IPC_RET(ret);
955 }
956 }
957
958 path->state = AVS_PPL_STATE_RESET;
959 return 0;
960 }
961
avs_path_pause(struct avs_path * path)962 int avs_path_pause(struct avs_path *path)
963 {
964 struct avs_path_pipeline *ppl;
965 struct avs_dev *adev = path->owner;
966 int ret;
967
968 if (path->state == AVS_PPL_STATE_PAUSED)
969 return 0;
970
971 list_for_each_entry_reverse(ppl, &path->ppl_list, node) {
972 ret = avs_ipc_set_pipeline_state(adev, ppl->instance_id,
973 AVS_PPL_STATE_PAUSED);
974 if (ret) {
975 dev_err(adev->dev, "pause path failed: %d\n", ret);
976 path->state = AVS_PPL_STATE_INVALID;
977 return AVS_IPC_RET(ret);
978 }
979 }
980
981 path->state = AVS_PPL_STATE_PAUSED;
982 return 0;
983 }
984
avs_path_run(struct avs_path * path,int trigger)985 int avs_path_run(struct avs_path *path, int trigger)
986 {
987 struct avs_path_pipeline *ppl;
988 struct avs_dev *adev = path->owner;
989 int ret;
990
991 if (path->state == AVS_PPL_STATE_RUNNING && trigger == AVS_TPLG_TRIGGER_AUTO)
992 return 0;
993
994 list_for_each_entry(ppl, &path->ppl_list, node) {
995 if (ppl->template->cfg->trigger != trigger)
996 continue;
997
998 ret = avs_ipc_set_pipeline_state(adev, ppl->instance_id,
999 AVS_PPL_STATE_RUNNING);
1000 if (ret) {
1001 dev_err(adev->dev, "run path failed: %d\n", ret);
1002 path->state = AVS_PPL_STATE_INVALID;
1003 return AVS_IPC_RET(ret);
1004 }
1005 }
1006
1007 path->state = AVS_PPL_STATE_RUNNING;
1008 return 0;
1009 }
1010