xref: /linux/sound/soc/intel/avs/path.c (revision 33e02dc69afbd8f1b85a51d74d72f139ba4ca623)
1 // SPDX-License-Identifier: GPL-2.0-only
2 //
3 // Copyright(c) 2021 Intel Corporation
4 //
5 // Authors: Cezary Rojewski <cezary.rojewski@intel.com>
6 //          Amadeusz Slawinski <amadeuszx.slawinski@linux.intel.com>
7 //
8 
9 #include <linux/acpi.h>
10 #include <acpi/nhlt.h>
11 #include <sound/pcm_params.h>
12 #include <sound/soc.h>
13 #include "avs.h"
14 #include "control.h"
15 #include "path.h"
16 #include "topology.h"
17 
18 /* Must be called with adev->comp_list_mutex held. */
19 static struct avs_tplg *
avs_path_find_tplg(struct avs_dev * adev,const char * name)20 avs_path_find_tplg(struct avs_dev *adev, const char *name)
21 {
22 	struct avs_soc_component *acomp;
23 
24 	list_for_each_entry(acomp, &adev->comp_list, node)
25 		if (!strcmp(acomp->tplg->name, name))
26 			return acomp->tplg;
27 	return NULL;
28 }
29 
30 static struct avs_path_module *
avs_path_find_module(struct avs_path_pipeline * ppl,u32 template_id)31 avs_path_find_module(struct avs_path_pipeline *ppl, u32 template_id)
32 {
33 	struct avs_path_module *mod;
34 
35 	list_for_each_entry(mod, &ppl->mod_list, node)
36 		if (mod->template->id == template_id)
37 			return mod;
38 	return NULL;
39 }
40 
41 static struct avs_path_pipeline *
avs_path_find_pipeline(struct avs_path * path,u32 template_id)42 avs_path_find_pipeline(struct avs_path *path, u32 template_id)
43 {
44 	struct avs_path_pipeline *ppl;
45 
46 	list_for_each_entry(ppl, &path->ppl_list, node)
47 		if (ppl->template->id == template_id)
48 			return ppl;
49 	return NULL;
50 }
51 
52 static struct avs_path *
avs_path_find_path(struct avs_dev * adev,const char * name,u32 template_id)53 avs_path_find_path(struct avs_dev *adev, const char *name, u32 template_id)
54 {
55 	struct avs_tplg_path_template *pos, *template = NULL;
56 	struct avs_tplg *tplg;
57 	struct avs_path *path;
58 
59 	tplg = avs_path_find_tplg(adev, name);
60 	if (!tplg)
61 		return NULL;
62 
63 	list_for_each_entry(pos, &tplg->path_tmpl_list, node) {
64 		if (pos->id == template_id) {
65 			template = pos;
66 			break;
67 		}
68 	}
69 	if (!template)
70 		return NULL;
71 
72 	spin_lock(&adev->path_list_lock);
73 	/* Only one variant of given path template may be instantiated at a time. */
74 	list_for_each_entry(path, &adev->path_list, node) {
75 		if (path->template->owner == template) {
76 			spin_unlock(&adev->path_list_lock);
77 			return path;
78 		}
79 	}
80 
81 	spin_unlock(&adev->path_list_lock);
82 	return NULL;
83 }
84 
avs_test_hw_params(struct snd_pcm_hw_params * params,struct avs_audio_format * fmt)85 static bool avs_test_hw_params(struct snd_pcm_hw_params *params,
86 			       struct avs_audio_format *fmt)
87 {
88 	return (params_rate(params) == fmt->sampling_freq &&
89 		params_channels(params) == fmt->num_channels &&
90 		params_physical_width(params) == fmt->bit_depth &&
91 		snd_pcm_hw_params_bits(params) == fmt->valid_bit_depth);
92 }
93 
94 static struct avs_tplg_path *
avs_path_find_variant(struct avs_dev * adev,struct avs_tplg_path_template * template,struct snd_pcm_hw_params * fe_params,struct snd_pcm_hw_params * be_params)95 avs_path_find_variant(struct avs_dev *adev,
96 		      struct avs_tplg_path_template *template,
97 		      struct snd_pcm_hw_params *fe_params,
98 		      struct snd_pcm_hw_params *be_params)
99 {
100 	struct avs_tplg_path *variant;
101 
102 	list_for_each_entry(variant, &template->path_list, node) {
103 		dev_dbg(adev->dev, "check FE rate %d chn %d vbd %d bd %d\n",
104 			variant->fe_fmt->sampling_freq, variant->fe_fmt->num_channels,
105 			variant->fe_fmt->valid_bit_depth, variant->fe_fmt->bit_depth);
106 		dev_dbg(adev->dev, "check BE rate %d chn %d vbd %d bd %d\n",
107 			variant->be_fmt->sampling_freq, variant->be_fmt->num_channels,
108 			variant->be_fmt->valid_bit_depth, variant->be_fmt->bit_depth);
109 
110 		if (variant->fe_fmt && avs_test_hw_params(fe_params, variant->fe_fmt) &&
111 		    variant->be_fmt && avs_test_hw_params(be_params, variant->be_fmt))
112 			return variant;
113 	}
114 
115 	return NULL;
116 }
117 
118 __maybe_unused
avs_dma_type_is_host(u32 dma_type)119 static bool avs_dma_type_is_host(u32 dma_type)
120 {
121 	return dma_type == AVS_DMA_HDA_HOST_OUTPUT ||
122 	       dma_type == AVS_DMA_HDA_HOST_INPUT;
123 }
124 
125 __maybe_unused
avs_dma_type_is_link(u32 dma_type)126 static bool avs_dma_type_is_link(u32 dma_type)
127 {
128 	return !avs_dma_type_is_host(dma_type);
129 }
130 
131 __maybe_unused
avs_dma_type_is_output(u32 dma_type)132 static bool avs_dma_type_is_output(u32 dma_type)
133 {
134 	return dma_type == AVS_DMA_HDA_HOST_OUTPUT ||
135 	       dma_type == AVS_DMA_HDA_LINK_OUTPUT ||
136 	       dma_type == AVS_DMA_I2S_LINK_OUTPUT;
137 }
138 
139 __maybe_unused
avs_dma_type_is_input(u32 dma_type)140 static bool avs_dma_type_is_input(u32 dma_type)
141 {
142 	return !avs_dma_type_is_output(dma_type);
143 }
144 
avs_copier_create(struct avs_dev * adev,struct avs_path_module * mod)145 static int avs_copier_create(struct avs_dev *adev, struct avs_path_module *mod)
146 {
147 	struct avs_tplg_module *t = mod->template;
148 	struct avs_copier_cfg *cfg;
149 	struct acpi_nhlt_format_config *ep_blob;
150 	struct acpi_nhlt_endpoint *ep;
151 	union avs_connector_node_id node_id = {0};
152 	size_t cfg_size, data_size;
153 	void *data = NULL;
154 	u32 dma_type;
155 	int ret;
156 
157 	data_size = sizeof(cfg->gtw_cfg.config);
158 	dma_type = t->cfg_ext->copier.dma_type;
159 	node_id.dma_type = dma_type;
160 
161 	switch (dma_type) {
162 		struct avs_audio_format *fmt;
163 		int direction;
164 
165 	case AVS_DMA_I2S_LINK_OUTPUT:
166 	case AVS_DMA_I2S_LINK_INPUT:
167 		if (avs_dma_type_is_input(dma_type))
168 			direction = SNDRV_PCM_STREAM_CAPTURE;
169 		else
170 			direction = SNDRV_PCM_STREAM_PLAYBACK;
171 
172 		if (t->cfg_ext->copier.blob_fmt)
173 			fmt = t->cfg_ext->copier.blob_fmt;
174 		else if (direction == SNDRV_PCM_STREAM_CAPTURE)
175 			fmt = t->in_fmt;
176 		else
177 			fmt = t->cfg_ext->copier.out_fmt;
178 
179 		ep = acpi_nhlt_find_endpoint(ACPI_NHLT_LINKTYPE_SSP,
180 					     ACPI_NHLT_DEVICETYPE_CODEC, direction,
181 					     t->cfg_ext->copier.vindex.i2s.instance);
182 		ep_blob = acpi_nhlt_endpoint_find_fmtcfg(ep, fmt->num_channels, fmt->sampling_freq,
183 							 fmt->valid_bit_depth, fmt->bit_depth);
184 		if (!ep_blob) {
185 			dev_err(adev->dev, "no I2S ep_blob found\n");
186 			return -ENOENT;
187 		}
188 
189 		data = ep_blob->config.capabilities;
190 		data_size = ep_blob->config.capabilities_size;
191 		/* I2S gateway's vindex is statically assigned in topology */
192 		node_id.vindex = t->cfg_ext->copier.vindex.val;
193 
194 		break;
195 
196 	case AVS_DMA_DMIC_LINK_INPUT:
197 		direction = SNDRV_PCM_STREAM_CAPTURE;
198 
199 		if (t->cfg_ext->copier.blob_fmt)
200 			fmt = t->cfg_ext->copier.blob_fmt;
201 		else
202 			fmt = t->in_fmt;
203 
204 		ep = acpi_nhlt_find_endpoint(ACPI_NHLT_LINKTYPE_PDM, -1, direction, 0);
205 		ep_blob = acpi_nhlt_endpoint_find_fmtcfg(ep, fmt->num_channels, fmt->sampling_freq,
206 							 fmt->valid_bit_depth, fmt->bit_depth);
207 		if (!ep_blob) {
208 			dev_err(adev->dev, "no DMIC ep_blob found\n");
209 			return -ENOENT;
210 		}
211 
212 		data = ep_blob->config.capabilities;
213 		data_size = ep_blob->config.capabilities_size;
214 		/* DMIC gateway's vindex is statically assigned in topology */
215 		node_id.vindex = t->cfg_ext->copier.vindex.val;
216 
217 		break;
218 
219 	case AVS_DMA_HDA_HOST_OUTPUT:
220 	case AVS_DMA_HDA_HOST_INPUT:
221 		/* HOST gateway's vindex is dynamically assigned with DMA id */
222 		node_id.vindex = mod->owner->owner->dma_id;
223 		break;
224 
225 	case AVS_DMA_HDA_LINK_OUTPUT:
226 	case AVS_DMA_HDA_LINK_INPUT:
227 		node_id.vindex = t->cfg_ext->copier.vindex.val |
228 				 mod->owner->owner->dma_id;
229 		break;
230 
231 	case INVALID_OBJECT_ID:
232 	default:
233 		node_id = INVALID_NODE_ID;
234 		break;
235 	}
236 
237 	cfg_size = offsetof(struct avs_copier_cfg, gtw_cfg.config) + data_size;
238 	if (cfg_size > AVS_MAILBOX_SIZE)
239 		return -EINVAL;
240 
241 	cfg = adev->modcfg_buf;
242 	memset(cfg, 0, cfg_size);
243 	cfg->base.cpc = t->cfg_base->cpc;
244 	cfg->base.ibs = t->cfg_base->ibs;
245 	cfg->base.obs = t->cfg_base->obs;
246 	cfg->base.is_pages = t->cfg_base->is_pages;
247 	cfg->base.audio_fmt = *t->in_fmt;
248 	cfg->out_fmt = *t->cfg_ext->copier.out_fmt;
249 	cfg->feature_mask = t->cfg_ext->copier.feature_mask;
250 	cfg->gtw_cfg.node_id = node_id;
251 	cfg->gtw_cfg.dma_buffer_size = t->cfg_ext->copier.dma_buffer_size;
252 	/* config_length in DWORDs */
253 	cfg->gtw_cfg.config_length = DIV_ROUND_UP(data_size, 4);
254 	if (data)
255 		memcpy(&cfg->gtw_cfg.config.blob, data, data_size);
256 
257 	mod->gtw_attrs = cfg->gtw_cfg.config.attrs;
258 
259 	ret = avs_dsp_init_module(adev, mod->module_id, mod->owner->instance_id,
260 				  t->core_id, t->domain, cfg, cfg_size,
261 				  &mod->instance_id);
262 	return ret;
263 }
264 
avs_get_module_control(struct avs_path_module * mod)265 static struct avs_control_data *avs_get_module_control(struct avs_path_module *mod)
266 {
267 	struct avs_tplg_module *t = mod->template;
268 	struct avs_tplg_path_template *path_tmpl;
269 	struct snd_soc_dapm_widget *w;
270 	int i;
271 
272 	path_tmpl = t->owner->owner->owner;
273 	w = path_tmpl->w;
274 
275 	for (i = 0; i < w->num_kcontrols; i++) {
276 		struct avs_control_data *ctl_data;
277 		struct soc_mixer_control *mc;
278 
279 		mc = (struct soc_mixer_control *)w->kcontrols[i]->private_value;
280 		ctl_data = (struct avs_control_data *)mc->dobj.private;
281 		if (ctl_data->id == t->ctl_id)
282 			return ctl_data;
283 	}
284 
285 	return NULL;
286 }
287 
avs_peakvol_create(struct avs_dev * adev,struct avs_path_module * mod)288 static int avs_peakvol_create(struct avs_dev *adev, struct avs_path_module *mod)
289 {
290 	struct avs_tplg_module *t = mod->template;
291 	struct avs_control_data *ctl_data;
292 	struct avs_peakvol_cfg *cfg;
293 	int volume = S32_MAX;
294 	size_t cfg_size;
295 	int ret;
296 
297 	ctl_data = avs_get_module_control(mod);
298 	if (ctl_data)
299 		volume = ctl_data->volume;
300 
301 	/* As 2+ channels controls are unsupported, have a single block for all channels. */
302 	cfg_size = struct_size(cfg, vols, 1);
303 	if (cfg_size > AVS_MAILBOX_SIZE)
304 		return -EINVAL;
305 
306 	cfg = adev->modcfg_buf;
307 	memset(cfg, 0, cfg_size);
308 	cfg->base.cpc = t->cfg_base->cpc;
309 	cfg->base.ibs = t->cfg_base->ibs;
310 	cfg->base.obs = t->cfg_base->obs;
311 	cfg->base.is_pages = t->cfg_base->is_pages;
312 	cfg->base.audio_fmt = *t->in_fmt;
313 	cfg->vols[0].target_volume = volume;
314 	cfg->vols[0].channel_id = AVS_ALL_CHANNELS_MASK;
315 	cfg->vols[0].curve_type = AVS_AUDIO_CURVE_NONE;
316 	cfg->vols[0].curve_duration = 0;
317 
318 	ret = avs_dsp_init_module(adev, mod->module_id, mod->owner->instance_id, t->core_id,
319 				  t->domain, cfg, cfg_size, &mod->instance_id);
320 
321 	return ret;
322 }
323 
avs_updown_mix_create(struct avs_dev * adev,struct avs_path_module * mod)324 static int avs_updown_mix_create(struct avs_dev *adev, struct avs_path_module *mod)
325 {
326 	struct avs_tplg_module *t = mod->template;
327 	struct avs_updown_mixer_cfg cfg;
328 	int i;
329 
330 	cfg.base.cpc = t->cfg_base->cpc;
331 	cfg.base.ibs = t->cfg_base->ibs;
332 	cfg.base.obs = t->cfg_base->obs;
333 	cfg.base.is_pages = t->cfg_base->is_pages;
334 	cfg.base.audio_fmt = *t->in_fmt;
335 	cfg.out_channel_config = t->cfg_ext->updown_mix.out_channel_config;
336 	cfg.coefficients_select = t->cfg_ext->updown_mix.coefficients_select;
337 	for (i = 0; i < AVS_CHANNELS_MAX; i++)
338 		cfg.coefficients[i] = t->cfg_ext->updown_mix.coefficients[i];
339 	cfg.channel_map = t->cfg_ext->updown_mix.channel_map;
340 
341 	return avs_dsp_init_module(adev, mod->module_id, mod->owner->instance_id,
342 				   t->core_id, t->domain, &cfg, sizeof(cfg),
343 				   &mod->instance_id);
344 }
345 
avs_src_create(struct avs_dev * adev,struct avs_path_module * mod)346 static int avs_src_create(struct avs_dev *adev, struct avs_path_module *mod)
347 {
348 	struct avs_tplg_module *t = mod->template;
349 	struct avs_src_cfg cfg;
350 
351 	cfg.base.cpc = t->cfg_base->cpc;
352 	cfg.base.ibs = t->cfg_base->ibs;
353 	cfg.base.obs = t->cfg_base->obs;
354 	cfg.base.is_pages = t->cfg_base->is_pages;
355 	cfg.base.audio_fmt = *t->in_fmt;
356 	cfg.out_freq = t->cfg_ext->src.out_freq;
357 
358 	return avs_dsp_init_module(adev, mod->module_id, mod->owner->instance_id,
359 				   t->core_id, t->domain, &cfg, sizeof(cfg),
360 				   &mod->instance_id);
361 }
362 
avs_asrc_create(struct avs_dev * adev,struct avs_path_module * mod)363 static int avs_asrc_create(struct avs_dev *adev, struct avs_path_module *mod)
364 {
365 	struct avs_tplg_module *t = mod->template;
366 	struct avs_asrc_cfg cfg;
367 
368 	memset(&cfg, 0, sizeof(cfg));
369 	cfg.base.cpc = t->cfg_base->cpc;
370 	cfg.base.ibs = t->cfg_base->ibs;
371 	cfg.base.obs = t->cfg_base->obs;
372 	cfg.base.is_pages = t->cfg_base->is_pages;
373 	cfg.base.audio_fmt = *t->in_fmt;
374 	cfg.out_freq = t->cfg_ext->asrc.out_freq;
375 	cfg.mode = t->cfg_ext->asrc.mode;
376 	cfg.disable_jitter_buffer = t->cfg_ext->asrc.disable_jitter_buffer;
377 
378 	return avs_dsp_init_module(adev, mod->module_id, mod->owner->instance_id,
379 				   t->core_id, t->domain, &cfg, sizeof(cfg),
380 				   &mod->instance_id);
381 }
382 
avs_aec_create(struct avs_dev * adev,struct avs_path_module * mod)383 static int avs_aec_create(struct avs_dev *adev, struct avs_path_module *mod)
384 {
385 	struct avs_tplg_module *t = mod->template;
386 	struct avs_aec_cfg cfg;
387 
388 	cfg.base.cpc = t->cfg_base->cpc;
389 	cfg.base.ibs = t->cfg_base->ibs;
390 	cfg.base.obs = t->cfg_base->obs;
391 	cfg.base.is_pages = t->cfg_base->is_pages;
392 	cfg.base.audio_fmt = *t->in_fmt;
393 	cfg.ref_fmt = *t->cfg_ext->aec.ref_fmt;
394 	cfg.out_fmt = *t->cfg_ext->aec.out_fmt;
395 	cfg.cpc_lp_mode = t->cfg_ext->aec.cpc_lp_mode;
396 
397 	return avs_dsp_init_module(adev, mod->module_id, mod->owner->instance_id,
398 				   t->core_id, t->domain, &cfg, sizeof(cfg),
399 				   &mod->instance_id);
400 }
401 
avs_mux_create(struct avs_dev * adev,struct avs_path_module * mod)402 static int avs_mux_create(struct avs_dev *adev, struct avs_path_module *mod)
403 {
404 	struct avs_tplg_module *t = mod->template;
405 	struct avs_mux_cfg cfg;
406 
407 	cfg.base.cpc = t->cfg_base->cpc;
408 	cfg.base.ibs = t->cfg_base->ibs;
409 	cfg.base.obs = t->cfg_base->obs;
410 	cfg.base.is_pages = t->cfg_base->is_pages;
411 	cfg.base.audio_fmt = *t->in_fmt;
412 	cfg.ref_fmt = *t->cfg_ext->mux.ref_fmt;
413 	cfg.out_fmt = *t->cfg_ext->mux.out_fmt;
414 
415 	return avs_dsp_init_module(adev, mod->module_id, mod->owner->instance_id,
416 				   t->core_id, t->domain, &cfg, sizeof(cfg),
417 				   &mod->instance_id);
418 }
419 
avs_wov_create(struct avs_dev * adev,struct avs_path_module * mod)420 static int avs_wov_create(struct avs_dev *adev, struct avs_path_module *mod)
421 {
422 	struct avs_tplg_module *t = mod->template;
423 	struct avs_wov_cfg cfg;
424 
425 	cfg.base.cpc = t->cfg_base->cpc;
426 	cfg.base.ibs = t->cfg_base->ibs;
427 	cfg.base.obs = t->cfg_base->obs;
428 	cfg.base.is_pages = t->cfg_base->is_pages;
429 	cfg.base.audio_fmt = *t->in_fmt;
430 	cfg.cpc_lp_mode = t->cfg_ext->wov.cpc_lp_mode;
431 
432 	return avs_dsp_init_module(adev, mod->module_id, mod->owner->instance_id,
433 				   t->core_id, t->domain, &cfg, sizeof(cfg),
434 				   &mod->instance_id);
435 }
436 
avs_micsel_create(struct avs_dev * adev,struct avs_path_module * mod)437 static int avs_micsel_create(struct avs_dev *adev, struct avs_path_module *mod)
438 {
439 	struct avs_tplg_module *t = mod->template;
440 	struct avs_micsel_cfg cfg;
441 
442 	cfg.base.cpc = t->cfg_base->cpc;
443 	cfg.base.ibs = t->cfg_base->ibs;
444 	cfg.base.obs = t->cfg_base->obs;
445 	cfg.base.is_pages = t->cfg_base->is_pages;
446 	cfg.base.audio_fmt = *t->in_fmt;
447 	cfg.out_fmt = *t->cfg_ext->micsel.out_fmt;
448 
449 	return avs_dsp_init_module(adev, mod->module_id, mod->owner->instance_id,
450 				   t->core_id, t->domain, &cfg, sizeof(cfg),
451 				   &mod->instance_id);
452 }
453 
avs_modbase_create(struct avs_dev * adev,struct avs_path_module * mod)454 static int avs_modbase_create(struct avs_dev *adev, struct avs_path_module *mod)
455 {
456 	struct avs_tplg_module *t = mod->template;
457 	struct avs_modcfg_base cfg;
458 
459 	cfg.cpc = t->cfg_base->cpc;
460 	cfg.ibs = t->cfg_base->ibs;
461 	cfg.obs = t->cfg_base->obs;
462 	cfg.is_pages = t->cfg_base->is_pages;
463 	cfg.audio_fmt = *t->in_fmt;
464 
465 	return avs_dsp_init_module(adev, mod->module_id, mod->owner->instance_id,
466 				   t->core_id, t->domain, &cfg, sizeof(cfg),
467 				   &mod->instance_id);
468 }
469 
avs_modext_create(struct avs_dev * adev,struct avs_path_module * mod)470 static int avs_modext_create(struct avs_dev *adev, struct avs_path_module *mod)
471 {
472 	struct avs_tplg_module *t = mod->template;
473 	struct avs_tplg_modcfg_ext *tcfg = t->cfg_ext;
474 	struct avs_modcfg_ext *cfg;
475 	size_t cfg_size, num_pins;
476 	int ret, i;
477 
478 	num_pins = tcfg->generic.num_input_pins + tcfg->generic.num_output_pins;
479 	cfg_size = struct_size(cfg, pin_fmts, num_pins);
480 
481 	if (cfg_size > AVS_MAILBOX_SIZE)
482 		return -EINVAL;
483 
484 	cfg = adev->modcfg_buf;
485 	memset(cfg, 0, cfg_size);
486 	cfg->base.cpc = t->cfg_base->cpc;
487 	cfg->base.ibs = t->cfg_base->ibs;
488 	cfg->base.obs = t->cfg_base->obs;
489 	cfg->base.is_pages = t->cfg_base->is_pages;
490 	cfg->base.audio_fmt = *t->in_fmt;
491 	cfg->num_input_pins = tcfg->generic.num_input_pins;
492 	cfg->num_output_pins = tcfg->generic.num_output_pins;
493 
494 	/* configure pin formats */
495 	for (i = 0; i < num_pins; i++) {
496 		struct avs_tplg_pin_format *tpin = &tcfg->generic.pin_fmts[i];
497 		struct avs_pin_format *pin = &cfg->pin_fmts[i];
498 
499 		pin->pin_index = tpin->pin_index;
500 		pin->iobs = tpin->iobs;
501 		pin->audio_fmt = *tpin->fmt;
502 	}
503 
504 	ret = avs_dsp_init_module(adev, mod->module_id, mod->owner->instance_id,
505 				  t->core_id, t->domain, cfg, cfg_size,
506 				  &mod->instance_id);
507 	return ret;
508 }
509 
avs_probe_create(struct avs_dev * adev,struct avs_path_module * mod)510 static int avs_probe_create(struct avs_dev *adev, struct avs_path_module *mod)
511 {
512 	dev_err(adev->dev, "Probe module can't be instantiated by topology");
513 	return -EINVAL;
514 }
515 
516 struct avs_module_create {
517 	guid_t *guid;
518 	int (*create)(struct avs_dev *adev, struct avs_path_module *mod);
519 };
520 
521 static struct avs_module_create avs_module_create[] = {
522 	{ &AVS_MIXIN_MOD_UUID, avs_modbase_create },
523 	{ &AVS_MIXOUT_MOD_UUID, avs_modbase_create },
524 	{ &AVS_KPBUFF_MOD_UUID, avs_modbase_create },
525 	{ &AVS_COPIER_MOD_UUID, avs_copier_create },
526 	{ &AVS_PEAKVOL_MOD_UUID, avs_peakvol_create },
527 	{ &AVS_GAIN_MOD_UUID, avs_peakvol_create },
528 	{ &AVS_MICSEL_MOD_UUID, avs_micsel_create },
529 	{ &AVS_MUX_MOD_UUID, avs_mux_create },
530 	{ &AVS_UPDWMIX_MOD_UUID, avs_updown_mix_create },
531 	{ &AVS_SRCINTC_MOD_UUID, avs_src_create },
532 	{ &AVS_AEC_MOD_UUID, avs_aec_create },
533 	{ &AVS_ASRC_MOD_UUID, avs_asrc_create },
534 	{ &AVS_INTELWOV_MOD_UUID, avs_wov_create },
535 	{ &AVS_PROBE_MOD_UUID, avs_probe_create },
536 };
537 
avs_path_module_type_create(struct avs_dev * adev,struct avs_path_module * mod)538 static int avs_path_module_type_create(struct avs_dev *adev, struct avs_path_module *mod)
539 {
540 	const guid_t *type = &mod->template->cfg_ext->type;
541 
542 	for (int i = 0; i < ARRAY_SIZE(avs_module_create); i++)
543 		if (guid_equal(type, avs_module_create[i].guid))
544 			return avs_module_create[i].create(adev, mod);
545 
546 	return avs_modext_create(adev, mod);
547 }
548 
avs_path_module_send_init_configs(struct avs_dev * adev,struct avs_path_module * mod)549 static int avs_path_module_send_init_configs(struct avs_dev *adev, struct avs_path_module *mod)
550 {
551 	struct avs_soc_component *acomp;
552 
553 	acomp = to_avs_soc_component(mod->template->owner->owner->owner->owner->comp);
554 
555 	u32 num_ids = mod->template->num_config_ids;
556 	u32 *ids = mod->template->config_ids;
557 
558 	for (int i = 0; i < num_ids; i++) {
559 		struct avs_tplg_init_config *config = &acomp->tplg->init_configs[ids[i]];
560 		size_t len = config->length;
561 		void *data = config->data;
562 		u32 param = config->param;
563 		int ret;
564 
565 		ret = avs_ipc_set_large_config(adev, mod->module_id, mod->instance_id,
566 					       param, data, len);
567 		if (ret) {
568 			dev_err(adev->dev, "send initial module config failed: %d\n", ret);
569 			return AVS_IPC_RET(ret);
570 		}
571 	}
572 
573 	return 0;
574 }
575 
avs_path_module_free(struct avs_dev * adev,struct avs_path_module * mod)576 static void avs_path_module_free(struct avs_dev *adev, struct avs_path_module *mod)
577 {
578 	kfree(mod);
579 }
580 
581 static struct avs_path_module *
avs_path_module_create(struct avs_dev * adev,struct avs_path_pipeline * owner,struct avs_tplg_module * template)582 avs_path_module_create(struct avs_dev *adev,
583 		       struct avs_path_pipeline *owner,
584 		       struct avs_tplg_module *template)
585 {
586 	struct avs_path_module *mod;
587 	int module_id, ret;
588 
589 	module_id = avs_get_module_id(adev, &template->cfg_ext->type);
590 	if (module_id < 0)
591 		return ERR_PTR(module_id);
592 
593 	mod = kzalloc(sizeof(*mod), GFP_KERNEL);
594 	if (!mod)
595 		return ERR_PTR(-ENOMEM);
596 
597 	mod->template = template;
598 	mod->module_id = module_id;
599 	mod->owner = owner;
600 	INIT_LIST_HEAD(&mod->node);
601 
602 	ret = avs_path_module_type_create(adev, mod);
603 	if (ret) {
604 		dev_err(adev->dev, "module-type create failed: %d\n", ret);
605 		kfree(mod);
606 		return ERR_PTR(ret);
607 	}
608 
609 	ret = avs_path_module_send_init_configs(adev, mod);
610 	if (ret) {
611 		kfree(mod);
612 		return ERR_PTR(ret);
613 	}
614 
615 	return mod;
616 }
617 
avs_path_binding_arm(struct avs_dev * adev,struct avs_path_binding * binding)618 static int avs_path_binding_arm(struct avs_dev *adev, struct avs_path_binding *binding)
619 {
620 	struct avs_path_module *this_mod, *target_mod;
621 	struct avs_path_pipeline *target_ppl;
622 	struct avs_path *target_path;
623 	struct avs_tplg_binding *t;
624 
625 	t = binding->template;
626 	this_mod = avs_path_find_module(binding->owner,
627 					t->mod_id);
628 	if (!this_mod) {
629 		dev_err(adev->dev, "path mod %d not found\n", t->mod_id);
630 		return -EINVAL;
631 	}
632 
633 	/* update with target_tplg_name too */
634 	target_path = avs_path_find_path(adev, t->target_tplg_name,
635 					 t->target_path_tmpl_id);
636 	if (!target_path) {
637 		dev_err(adev->dev, "target path %s:%d not found\n",
638 			t->target_tplg_name, t->target_path_tmpl_id);
639 		return -EINVAL;
640 	}
641 
642 	target_ppl = avs_path_find_pipeline(target_path,
643 					    t->target_ppl_id);
644 	if (!target_ppl) {
645 		dev_err(adev->dev, "target ppl %d not found\n", t->target_ppl_id);
646 		return -EINVAL;
647 	}
648 
649 	target_mod = avs_path_find_module(target_ppl, t->target_mod_id);
650 	if (!target_mod) {
651 		dev_err(adev->dev, "target mod %d not found\n", t->target_mod_id);
652 		return -EINVAL;
653 	}
654 
655 	if (t->is_sink) {
656 		binding->sink = this_mod;
657 		binding->sink_pin = t->mod_pin;
658 		binding->source = target_mod;
659 		binding->source_pin = t->target_mod_pin;
660 	} else {
661 		binding->sink = target_mod;
662 		binding->sink_pin = t->target_mod_pin;
663 		binding->source = this_mod;
664 		binding->source_pin = t->mod_pin;
665 	}
666 
667 	return 0;
668 }
669 
avs_path_binding_free(struct avs_dev * adev,struct avs_path_binding * binding)670 static void avs_path_binding_free(struct avs_dev *adev, struct avs_path_binding *binding)
671 {
672 	kfree(binding);
673 }
674 
avs_path_binding_create(struct avs_dev * adev,struct avs_path_pipeline * owner,struct avs_tplg_binding * t)675 static struct avs_path_binding *avs_path_binding_create(struct avs_dev *adev,
676 							struct avs_path_pipeline *owner,
677 							struct avs_tplg_binding *t)
678 {
679 	struct avs_path_binding *binding;
680 
681 	binding = kzalloc(sizeof(*binding), GFP_KERNEL);
682 	if (!binding)
683 		return ERR_PTR(-ENOMEM);
684 
685 	binding->template = t;
686 	binding->owner = owner;
687 	INIT_LIST_HEAD(&binding->node);
688 
689 	return binding;
690 }
691 
avs_path_pipeline_arm(struct avs_dev * adev,struct avs_path_pipeline * ppl)692 static int avs_path_pipeline_arm(struct avs_dev *adev,
693 				 struct avs_path_pipeline *ppl)
694 {
695 	struct avs_path_module *mod;
696 
697 	list_for_each_entry(mod, &ppl->mod_list, node) {
698 		struct avs_path_module *source, *sink;
699 		int ret;
700 
701 		/*
702 		 * Only one module (so it's implicitly last) or it is the last
703 		 * one, either way we don't have next module to bind it to.
704 		 */
705 		if (mod == list_last_entry(&ppl->mod_list,
706 					   struct avs_path_module, node))
707 			break;
708 
709 		/* bind current module to next module on list */
710 		source = mod;
711 		sink = list_next_entry(mod, node);
712 
713 		ret = avs_ipc_bind(adev, source->module_id, source->instance_id,
714 				   sink->module_id, sink->instance_id, 0, 0);
715 		if (ret)
716 			return AVS_IPC_RET(ret);
717 	}
718 
719 	return 0;
720 }
721 
avs_path_pipeline_free(struct avs_dev * adev,struct avs_path_pipeline * ppl)722 static void avs_path_pipeline_free(struct avs_dev *adev,
723 				   struct avs_path_pipeline *ppl)
724 {
725 	struct avs_path_binding *binding, *bsave;
726 	struct avs_path_module *mod, *save;
727 
728 	list_for_each_entry_safe(binding, bsave, &ppl->binding_list, node) {
729 		list_del(&binding->node);
730 		avs_path_binding_free(adev, binding);
731 	}
732 
733 	avs_dsp_delete_pipeline(adev, ppl->instance_id);
734 
735 	/* Unload resources occupied by owned modules */
736 	list_for_each_entry_safe(mod, save, &ppl->mod_list, node) {
737 		avs_dsp_delete_module(adev, mod->module_id, mod->instance_id,
738 				      mod->owner->instance_id,
739 				      mod->template->core_id);
740 		avs_path_module_free(adev, mod);
741 	}
742 
743 	list_del(&ppl->node);
744 	kfree(ppl);
745 }
746 
747 static struct avs_path_pipeline *
avs_path_pipeline_create(struct avs_dev * adev,struct avs_path * owner,struct avs_tplg_pipeline * template)748 avs_path_pipeline_create(struct avs_dev *adev, struct avs_path *owner,
749 			 struct avs_tplg_pipeline *template)
750 {
751 	struct avs_path_pipeline *ppl;
752 	struct avs_tplg_pplcfg *cfg = template->cfg;
753 	struct avs_tplg_module *tmod;
754 	int ret, i;
755 
756 	ppl = kzalloc(sizeof(*ppl), GFP_KERNEL);
757 	if (!ppl)
758 		return ERR_PTR(-ENOMEM);
759 
760 	ppl->template = template;
761 	ppl->owner = owner;
762 	INIT_LIST_HEAD(&ppl->binding_list);
763 	INIT_LIST_HEAD(&ppl->mod_list);
764 	INIT_LIST_HEAD(&ppl->node);
765 
766 	ret = avs_dsp_create_pipeline(adev, cfg->req_size, cfg->priority,
767 				      cfg->lp, cfg->attributes,
768 				      &ppl->instance_id);
769 	if (ret) {
770 		dev_err(adev->dev, "error creating pipeline %d\n", ret);
771 		kfree(ppl);
772 		return ERR_PTR(ret);
773 	}
774 
775 	list_for_each_entry(tmod, &template->mod_list, node) {
776 		struct avs_path_module *mod;
777 
778 		mod = avs_path_module_create(adev, ppl, tmod);
779 		if (IS_ERR(mod)) {
780 			ret = PTR_ERR(mod);
781 			dev_err(adev->dev, "error creating module %d\n", ret);
782 			goto init_err;
783 		}
784 
785 		list_add_tail(&mod->node, &ppl->mod_list);
786 	}
787 
788 	for (i = 0; i < template->num_bindings; i++) {
789 		struct avs_path_binding *binding;
790 
791 		binding = avs_path_binding_create(adev, ppl, template->bindings[i]);
792 		if (IS_ERR(binding)) {
793 			ret = PTR_ERR(binding);
794 			dev_err(adev->dev, "error creating binding %d\n", ret);
795 			goto init_err;
796 		}
797 
798 		list_add_tail(&binding->node, &ppl->binding_list);
799 	}
800 
801 	return ppl;
802 
803 init_err:
804 	avs_path_pipeline_free(adev, ppl);
805 	return ERR_PTR(ret);
806 }
807 
avs_path_init(struct avs_dev * adev,struct avs_path * path,struct avs_tplg_path * template,u32 dma_id)808 static int avs_path_init(struct avs_dev *adev, struct avs_path *path,
809 			 struct avs_tplg_path *template, u32 dma_id)
810 {
811 	struct avs_tplg_pipeline *tppl;
812 
813 	path->owner = adev;
814 	path->template = template;
815 	path->dma_id = dma_id;
816 	INIT_LIST_HEAD(&path->ppl_list);
817 	INIT_LIST_HEAD(&path->node);
818 
819 	/* create all the pipelines */
820 	list_for_each_entry(tppl, &template->ppl_list, node) {
821 		struct avs_path_pipeline *ppl;
822 
823 		ppl = avs_path_pipeline_create(adev, path, tppl);
824 		if (IS_ERR(ppl))
825 			return PTR_ERR(ppl);
826 
827 		list_add_tail(&ppl->node, &path->ppl_list);
828 	}
829 
830 	spin_lock(&adev->path_list_lock);
831 	list_add_tail(&path->node, &adev->path_list);
832 	spin_unlock(&adev->path_list_lock);
833 
834 	return 0;
835 }
836 
avs_path_arm(struct avs_dev * adev,struct avs_path * path)837 static int avs_path_arm(struct avs_dev *adev, struct avs_path *path)
838 {
839 	struct avs_path_pipeline *ppl;
840 	struct avs_path_binding *binding;
841 	int ret;
842 
843 	list_for_each_entry(ppl, &path->ppl_list, node) {
844 		/*
845 		 * Arm all ppl bindings before binding internal modules
846 		 * as it costs no IPCs which isn't true for the latter.
847 		 */
848 		list_for_each_entry(binding, &ppl->binding_list, node) {
849 			ret = avs_path_binding_arm(adev, binding);
850 			if (ret < 0)
851 				return ret;
852 		}
853 
854 		ret = avs_path_pipeline_arm(adev, ppl);
855 		if (ret < 0)
856 			return ret;
857 	}
858 
859 	return 0;
860 }
861 
avs_path_free_unlocked(struct avs_path * path)862 static void avs_path_free_unlocked(struct avs_path *path)
863 {
864 	struct avs_path_pipeline *ppl, *save;
865 
866 	spin_lock(&path->owner->path_list_lock);
867 	list_del(&path->node);
868 	spin_unlock(&path->owner->path_list_lock);
869 
870 	list_for_each_entry_safe(ppl, save, &path->ppl_list, node)
871 		avs_path_pipeline_free(path->owner, ppl);
872 
873 	kfree(path);
874 }
875 
avs_path_create_unlocked(struct avs_dev * adev,u32 dma_id,struct avs_tplg_path * template)876 static struct avs_path *avs_path_create_unlocked(struct avs_dev *adev, u32 dma_id,
877 						 struct avs_tplg_path *template)
878 {
879 	struct avs_path *path;
880 	int ret;
881 
882 	path = kzalloc(sizeof(*path), GFP_KERNEL);
883 	if (!path)
884 		return ERR_PTR(-ENOMEM);
885 
886 	ret = avs_path_init(adev, path, template, dma_id);
887 	if (ret < 0)
888 		goto err;
889 
890 	ret = avs_path_arm(adev, path);
891 	if (ret < 0)
892 		goto err;
893 
894 	path->state = AVS_PPL_STATE_INVALID;
895 	return path;
896 err:
897 	avs_path_free_unlocked(path);
898 	return ERR_PTR(ret);
899 }
900 
avs_path_free(struct avs_path * path)901 void avs_path_free(struct avs_path *path)
902 {
903 	struct avs_dev *adev = path->owner;
904 
905 	mutex_lock(&adev->path_mutex);
906 	avs_path_free_unlocked(path);
907 	mutex_unlock(&adev->path_mutex);
908 }
909 
avs_path_create(struct avs_dev * adev,u32 dma_id,struct avs_tplg_path_template * template,struct snd_pcm_hw_params * fe_params,struct snd_pcm_hw_params * be_params)910 struct avs_path *avs_path_create(struct avs_dev *adev, u32 dma_id,
911 				 struct avs_tplg_path_template *template,
912 				 struct snd_pcm_hw_params *fe_params,
913 				 struct snd_pcm_hw_params *be_params)
914 {
915 	struct avs_tplg_path *variant;
916 	struct avs_path *path;
917 
918 	variant = avs_path_find_variant(adev, template, fe_params, be_params);
919 	if (!variant) {
920 		dev_err(adev->dev, "no matching variant found\n");
921 		return ERR_PTR(-ENOENT);
922 	}
923 
924 	/* Serialize path and its components creation. */
925 	mutex_lock(&adev->path_mutex);
926 	/* Satisfy needs of avs_path_find_tplg(). */
927 	mutex_lock(&adev->comp_list_mutex);
928 
929 	path = avs_path_create_unlocked(adev, dma_id, variant);
930 
931 	mutex_unlock(&adev->comp_list_mutex);
932 	mutex_unlock(&adev->path_mutex);
933 
934 	return path;
935 }
936 
avs_path_bind_prepare(struct avs_dev * adev,struct avs_path_binding * binding)937 static int avs_path_bind_prepare(struct avs_dev *adev,
938 				 struct avs_path_binding *binding)
939 {
940 	const struct avs_audio_format *src_fmt, *sink_fmt;
941 	struct avs_tplg_module *tsource = binding->source->template;
942 	struct avs_path_module *source = binding->source;
943 	int ret;
944 
945 	/*
946 	 * only copier modules about to be bound
947 	 * to output pin other than 0 need preparation
948 	 */
949 	if (!binding->source_pin)
950 		return 0;
951 	if (!guid_equal(&tsource->cfg_ext->type, &AVS_COPIER_MOD_UUID))
952 		return 0;
953 
954 	src_fmt = tsource->in_fmt;
955 	sink_fmt = binding->sink->template->in_fmt;
956 
957 	ret = avs_ipc_copier_set_sink_format(adev, source->module_id,
958 					     source->instance_id, binding->source_pin,
959 					     src_fmt, sink_fmt);
960 	if (ret) {
961 		dev_err(adev->dev, "config copier failed: %d\n", ret);
962 		return AVS_IPC_RET(ret);
963 	}
964 
965 	return 0;
966 }
967 
avs_path_bind(struct avs_path * path)968 int avs_path_bind(struct avs_path *path)
969 {
970 	struct avs_path_pipeline *ppl;
971 	struct avs_dev *adev = path->owner;
972 	int ret;
973 
974 	list_for_each_entry(ppl, &path->ppl_list, node) {
975 		struct avs_path_binding *binding;
976 
977 		list_for_each_entry(binding, &ppl->binding_list, node) {
978 			struct avs_path_module *source, *sink;
979 
980 			source = binding->source;
981 			sink = binding->sink;
982 
983 			ret = avs_path_bind_prepare(adev, binding);
984 			if (ret < 0)
985 				return ret;
986 
987 			ret = avs_ipc_bind(adev, source->module_id,
988 					   source->instance_id, sink->module_id,
989 					   sink->instance_id, binding->sink_pin,
990 					   binding->source_pin);
991 			if (ret) {
992 				dev_err(adev->dev, "bind path failed: %d\n", ret);
993 				return AVS_IPC_RET(ret);
994 			}
995 		}
996 	}
997 
998 	return 0;
999 }
1000 
avs_path_unbind(struct avs_path * path)1001 int avs_path_unbind(struct avs_path *path)
1002 {
1003 	struct avs_path_pipeline *ppl;
1004 	struct avs_dev *adev = path->owner;
1005 	int ret;
1006 
1007 	list_for_each_entry(ppl, &path->ppl_list, node) {
1008 		struct avs_path_binding *binding;
1009 
1010 		list_for_each_entry(binding, &ppl->binding_list, node) {
1011 			struct avs_path_module *source, *sink;
1012 
1013 			source = binding->source;
1014 			sink = binding->sink;
1015 
1016 			ret = avs_ipc_unbind(adev, source->module_id,
1017 					     source->instance_id, sink->module_id,
1018 					     sink->instance_id, binding->sink_pin,
1019 					     binding->source_pin);
1020 			if (ret) {
1021 				dev_err(adev->dev, "unbind path failed: %d\n", ret);
1022 				return AVS_IPC_RET(ret);
1023 			}
1024 		}
1025 	}
1026 
1027 	return 0;
1028 }
1029 
avs_path_reset(struct avs_path * path)1030 int avs_path_reset(struct avs_path *path)
1031 {
1032 	struct avs_path_pipeline *ppl;
1033 	struct avs_dev *adev = path->owner;
1034 	int ret;
1035 
1036 	if (path->state == AVS_PPL_STATE_RESET)
1037 		return 0;
1038 
1039 	list_for_each_entry(ppl, &path->ppl_list, node) {
1040 		ret = avs_ipc_set_pipeline_state(adev, ppl->instance_id,
1041 						 AVS_PPL_STATE_RESET);
1042 		if (ret) {
1043 			dev_err(adev->dev, "reset path failed: %d\n", ret);
1044 			path->state = AVS_PPL_STATE_INVALID;
1045 			return AVS_IPC_RET(ret);
1046 		}
1047 	}
1048 
1049 	path->state = AVS_PPL_STATE_RESET;
1050 	return 0;
1051 }
1052 
avs_path_pause(struct avs_path * path)1053 int avs_path_pause(struct avs_path *path)
1054 {
1055 	struct avs_path_pipeline *ppl;
1056 	struct avs_dev *adev = path->owner;
1057 	int ret;
1058 
1059 	if (path->state == AVS_PPL_STATE_PAUSED)
1060 		return 0;
1061 
1062 	list_for_each_entry_reverse(ppl, &path->ppl_list, node) {
1063 		ret = avs_ipc_set_pipeline_state(adev, ppl->instance_id,
1064 						 AVS_PPL_STATE_PAUSED);
1065 		if (ret) {
1066 			dev_err(adev->dev, "pause path failed: %d\n", ret);
1067 			path->state = AVS_PPL_STATE_INVALID;
1068 			return AVS_IPC_RET(ret);
1069 		}
1070 	}
1071 
1072 	path->state = AVS_PPL_STATE_PAUSED;
1073 	return 0;
1074 }
1075 
avs_path_run(struct avs_path * path,int trigger)1076 int avs_path_run(struct avs_path *path, int trigger)
1077 {
1078 	struct avs_path_pipeline *ppl;
1079 	struct avs_dev *adev = path->owner;
1080 	int ret;
1081 
1082 	if (path->state == AVS_PPL_STATE_RUNNING && trigger == AVS_TPLG_TRIGGER_AUTO)
1083 		return 0;
1084 
1085 	list_for_each_entry(ppl, &path->ppl_list, node) {
1086 		if (ppl->template->cfg->trigger != trigger)
1087 			continue;
1088 
1089 		ret = avs_ipc_set_pipeline_state(adev, ppl->instance_id,
1090 						 AVS_PPL_STATE_RUNNING);
1091 		if (ret) {
1092 			dev_err(adev->dev, "run path failed: %d\n", ret);
1093 			path->state = AVS_PPL_STATE_INVALID;
1094 			return AVS_IPC_RET(ret);
1095 		}
1096 	}
1097 
1098 	path->state = AVS_PPL_STATE_RUNNING;
1099 	return 0;
1100 }
1101