xref: /linux/sound/soc/sof/intel/hda-sdw-bpt.c (revision af0bc3ac9a9e830cb52b718ecb237c4e76a466be)
1 // SPDX-License-Identifier: (GPL-2.0-only OR BSD-3-Clause)
2 //
3 // This file is provided under a dual BSD/GPLv2 license.  When using or
4 // redistributing this file, you may do so under either license.
5 //
6 // Copyright(c) 2025 Intel Corporation.
7 //
8 
9 /*
10  * Hardware interface for SoundWire BPT support with HDA DMA
11  */
12 
13 #include <linux/lcm.h>
14 #include <sound/hdaudio_ext.h>
15 #include <sound/hda-mlink.h>
16 #include <sound/hda-sdw-bpt.h>
17 #include <sound/sof.h>
18 #include <sound/sof/ipc4/header.h>
19 #include "../ops.h"
20 #include "../sof-priv.h"
21 #include "../ipc4-priv.h"
22 #include "hda.h"
23 
24 #define BPT_FREQUENCY		192000 /* The max rate defined in rate_bits[] hdac_device.c */
25 #define BPT_MULTIPLIER		((BPT_FREQUENCY / 48000) - 1)
26 #define BPT_CHAIN_DMA_FIFO_MS	10
27 /*
28  * This routine is directly inspired by sof_ipc4_chain_dma_trigger(),
29  * with major simplifications since there are no pipelines defined
30  * and no dependency on ALSA hw_params
31  */
32 static int chain_dma_trigger(struct snd_sof_dev *sdev, unsigned int stream_tag,
33 			     int direction, int state)
34 {
35 	struct sof_ipc4_fw_data *ipc4_data = sdev->private;
36 	bool allocate, enable, set_fifo_size;
37 	struct sof_ipc4_msg msg = {{ 0 }};
38 	int dma_id;
39 
40 	if (sdev->pdata->ipc_type != SOF_IPC_TYPE_4)
41 		return -EOPNOTSUPP;
42 
43 	switch (state) {
44 	case SOF_IPC4_PIPE_RUNNING: /* Allocate and start the chain */
45 		allocate = true;
46 		enable = true;
47 		set_fifo_size = true;
48 		break;
49 	case SOF_IPC4_PIPE_PAUSED: /* Stop the chain */
50 		allocate = true;
51 		enable = false;
52 		set_fifo_size = false;
53 		break;
54 	case SOF_IPC4_PIPE_RESET: /* Deallocate chain resources and remove the chain */
55 		allocate = false;
56 		enable = false;
57 		set_fifo_size = false;
58 		break;
59 	default:
60 		dev_err(sdev->dev, "Unexpected state %d", state);
61 		return -EINVAL;
62 	}
63 
64 	msg.primary = SOF_IPC4_MSG_TYPE_SET(SOF_IPC4_GLB_CHAIN_DMA);
65 	msg.primary |= SOF_IPC4_MSG_DIR(SOF_IPC4_MSG_REQUEST);
66 	msg.primary |= SOF_IPC4_MSG_TARGET(SOF_IPC4_FW_GEN_MSG);
67 
68 	/* for BPT/BRA we can use the same stream tag for host and link */
69 	dma_id = stream_tag - 1;
70 	if (direction == SNDRV_PCM_STREAM_CAPTURE)
71 		dma_id += ipc4_data->num_playback_streams;
72 
73 	msg.primary |=  SOF_IPC4_GLB_CHAIN_DMA_HOST_ID(dma_id);
74 	msg.primary |=  SOF_IPC4_GLB_CHAIN_DMA_LINK_ID(dma_id);
75 
76 	/* For BPT/BRA we use 32 bits so SCS is not set */
77 
78 	/* CHAIN DMA needs at least 2ms */
79 	if (set_fifo_size)
80 		msg.extension |=  SOF_IPC4_GLB_EXT_CHAIN_DMA_FIFO_SIZE(BPT_FREQUENCY / 1000 *
81 								       BPT_CHAIN_DMA_FIFO_MS *
82 								       sizeof(u32));
83 
84 	if (allocate)
85 		msg.primary |= SOF_IPC4_GLB_CHAIN_DMA_ALLOCATE_MASK;
86 
87 	if (enable)
88 		msg.primary |= SOF_IPC4_GLB_CHAIN_DMA_ENABLE_MASK;
89 
90 	return sof_ipc_tx_message_no_reply(sdev->ipc, &msg, 0);
91 }
92 
93 static int hda_sdw_bpt_dma_prepare(struct device *dev, struct hdac_ext_stream **sdw_bpt_stream,
94 				   struct snd_dma_buffer *dmab_bdl, u32 bpt_num_bytes,
95 				   unsigned int num_channels, int direction)
96 {
97 	struct snd_sof_dev *sdev = dev_get_drvdata(dev);
98 	struct hdac_ext_stream *bpt_stream;
99 	unsigned int format = HDA_CL_STREAM_FORMAT;
100 
101 	if (!sdev->dspless_mode_selected) {
102 		int ret;
103 
104 		/*
105 		 * Make sure that the DSP is booted up, which might not be the
106 		 * case if the on-demand DSP boot is used
107 		 */
108 		ret = snd_sof_boot_dsp_firmware(sdev);
109 		if (ret)
110 			return ret;
111 	}
112 	/*
113 	 * the baseline format needs to be adjusted to
114 	 * bandwidth requirements
115 	 */
116 	format |= (num_channels - 1);
117 	format |= BPT_MULTIPLIER << AC_FMT_MULT_SHIFT;
118 
119 	dev_dbg(dev, "direction %d format_val %#x\n", direction, format);
120 
121 	bpt_stream = hda_data_stream_prepare(dev, format, bpt_num_bytes, dmab_bdl,
122 					     false, direction, false, true);
123 	if (IS_ERR(bpt_stream)) {
124 		dev_err(sdev->dev, "%s: SDW BPT DMA prepare failed: dir %d\n",
125 			__func__, direction);
126 		return PTR_ERR(bpt_stream);
127 	}
128 	*sdw_bpt_stream = bpt_stream;
129 
130 	if (!sdev->dspless_mode_selected) {
131 		struct hdac_stream *hstream;
132 		u32 mask;
133 
134 		/* decouple host and link DMA if the DSP is used */
135 		hstream = &bpt_stream->hstream;
136 		mask = BIT(hstream->index);
137 
138 		snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR, SOF_HDA_REG_PP_PPCTL, mask, mask);
139 
140 		snd_hdac_ext_stream_reset(bpt_stream);
141 
142 		snd_hdac_ext_stream_setup(bpt_stream, format);
143 	}
144 
145 	if (hdac_stream(bpt_stream)->direction == SNDRV_PCM_STREAM_PLAYBACK) {
146 		struct hdac_bus *bus = sof_to_bus(sdev);
147 		struct hdac_ext_link *hlink;
148 		int stream_tag;
149 
150 		stream_tag = hdac_stream(bpt_stream)->stream_tag;
151 		hlink = hdac_bus_eml_sdw_get_hlink(bus);
152 
153 		snd_hdac_ext_bus_link_set_stream_id(hlink, stream_tag);
154 	}
155 	return 0;
156 }
157 
158 static int hda_sdw_bpt_dma_deprepare(struct device *dev, struct hdac_ext_stream *sdw_bpt_stream,
159 				     struct snd_dma_buffer *dmab_bdl)
160 {
161 	struct snd_sof_dev *sdev = dev_get_drvdata(dev);
162 	struct hdac_stream *hstream;
163 	u32 mask;
164 	int ret;
165 
166 	ret = hda_data_stream_cleanup(sdev->dev, dmab_bdl, false, sdw_bpt_stream, true);
167 	if (ret < 0) {
168 		dev_err(sdev->dev, "%s: SDW BPT DMA cleanup failed\n",
169 			__func__);
170 		return ret;
171 	}
172 
173 	if (hdac_stream(sdw_bpt_stream)->direction == SNDRV_PCM_STREAM_PLAYBACK) {
174 		struct hdac_bus *bus = sof_to_bus(sdev);
175 		struct hdac_ext_link *hlink;
176 		int stream_tag;
177 
178 		stream_tag = hdac_stream(sdw_bpt_stream)->stream_tag;
179 		hlink = hdac_bus_eml_sdw_get_hlink(bus);
180 
181 		snd_hdac_ext_bus_link_clear_stream_id(hlink, stream_tag);
182 	}
183 
184 	if (!sdev->dspless_mode_selected) {
185 		/* Release CHAIN_DMA resources */
186 		ret = chain_dma_trigger(sdev, hdac_stream(sdw_bpt_stream)->stream_tag,
187 					hdac_stream(sdw_bpt_stream)->direction,
188 					SOF_IPC4_PIPE_RESET);
189 		if (ret < 0)
190 			dev_err(sdev->dev, "%s: chain_dma_trigger PIPE_RESET failed: %d\n",
191 				__func__, ret);
192 
193 		/* couple host and link DMA */
194 		hstream = &sdw_bpt_stream->hstream;
195 		mask = BIT(hstream->index);
196 
197 		snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR, SOF_HDA_REG_PP_PPCTL, mask, 0);
198 	}
199 
200 	return 0;
201 }
202 
203 static int hda_sdw_bpt_dma_enable(struct device *dev, struct hdac_ext_stream *sdw_bpt_stream)
204 {
205 	struct snd_sof_dev *sdev = dev_get_drvdata(dev);
206 	int ret;
207 
208 	ret = hda_cl_trigger(sdev->dev, sdw_bpt_stream, SNDRV_PCM_TRIGGER_START);
209 	if (ret < 0)
210 		dev_err(sdev->dev, "%s: SDW BPT DMA trigger start failed\n", __func__);
211 
212 	if (!sdev->dspless_mode_selected) {
213 		/* the chain DMA needs to be programmed before the DMAs */
214 		ret = chain_dma_trigger(sdev, hdac_stream(sdw_bpt_stream)->stream_tag,
215 					hdac_stream(sdw_bpt_stream)->direction,
216 					SOF_IPC4_PIPE_RUNNING);
217 		if (ret < 0) {
218 			dev_err(sdev->dev, "%s: chain_dma_trigger failed: %d\n",
219 				__func__, ret);
220 			hda_cl_trigger(sdev->dev, sdw_bpt_stream, SNDRV_PCM_TRIGGER_STOP);
221 			return ret;
222 		}
223 		snd_hdac_ext_stream_start(sdw_bpt_stream);
224 	}
225 
226 	return ret;
227 }
228 
229 static int hda_sdw_bpt_dma_disable(struct device *dev, struct hdac_ext_stream *sdw_bpt_stream)
230 {
231 	struct snd_sof_dev *sdev = dev_get_drvdata(dev);
232 	int ret;
233 
234 	if (!sdev->dspless_mode_selected) {
235 		snd_hdac_ext_stream_clear(sdw_bpt_stream);
236 
237 		ret = chain_dma_trigger(sdev, hdac_stream(sdw_bpt_stream)->stream_tag,
238 					hdac_stream(sdw_bpt_stream)->direction,
239 					SOF_IPC4_PIPE_PAUSED);
240 		if (ret < 0)
241 			dev_err(sdev->dev, "%s: chain_dma_trigger PIPE_PAUSED failed: %d\n",
242 				__func__, ret);
243 	}
244 
245 	ret = hda_cl_trigger(sdev->dev, sdw_bpt_stream, SNDRV_PCM_TRIGGER_STOP);
246 	if (ret < 0)
247 		dev_err(sdev->dev, "%s: SDW BPT DMA trigger stop failed\n", __func__);
248 
249 	return ret;
250 }
251 
252 #define FIFO_ALIGNMENT	64
253 
254 unsigned int hda_sdw_bpt_get_buf_size_alignment(unsigned int dma_bandwidth)
255 {
256 	unsigned int num_channels = DIV_ROUND_UP(dma_bandwidth, BPT_FREQUENCY * 32);
257 	unsigned int data_block = num_channels * 4;
258 	unsigned int alignment = lcm(data_block, FIFO_ALIGNMENT);
259 
260 	return alignment;
261 }
262 EXPORT_SYMBOL_NS(hda_sdw_bpt_get_buf_size_alignment, "SND_SOC_SOF_INTEL_HDA_SDW_BPT");
263 
264 int hda_sdw_bpt_open(struct device *dev, int link_id, struct hdac_ext_stream **bpt_tx_stream,
265 		     struct snd_dma_buffer *dmab_tx_bdl, u32 bpt_tx_num_bytes,
266 		     u32 tx_dma_bandwidth, struct hdac_ext_stream **bpt_rx_stream,
267 		     struct snd_dma_buffer *dmab_rx_bdl, u32 bpt_rx_num_bytes,
268 		     u32 rx_dma_bandwidth)
269 {
270 	struct snd_sof_dev *sdev = dev_get_drvdata(dev);
271 	unsigned int num_channels_tx;
272 	unsigned int num_channels_rx;
273 	int ret1;
274 	int ret;
275 
276 	num_channels_tx = DIV_ROUND_UP(tx_dma_bandwidth, BPT_FREQUENCY * 32);
277 
278 	ret = hda_sdw_bpt_dma_prepare(dev, bpt_tx_stream, dmab_tx_bdl, bpt_tx_num_bytes,
279 				      num_channels_tx, SNDRV_PCM_STREAM_PLAYBACK);
280 	if (ret < 0) {
281 		dev_err(dev, "%s: hda_sdw_bpt_dma_prepare failed for TX: %d\n",
282 			__func__, ret);
283 		return ret;
284 	}
285 
286 	num_channels_rx = DIV_ROUND_UP(rx_dma_bandwidth, BPT_FREQUENCY * 32);
287 
288 	ret = hda_sdw_bpt_dma_prepare(dev, bpt_rx_stream, dmab_rx_bdl, bpt_rx_num_bytes,
289 				      num_channels_rx, SNDRV_PCM_STREAM_CAPTURE);
290 	if (ret < 0) {
291 		dev_err(dev, "%s: hda_sdw_bpt_dma_prepare failed for RX: %d\n",
292 			__func__, ret);
293 
294 		ret1 = hda_sdw_bpt_dma_deprepare(dev, *bpt_tx_stream, dmab_tx_bdl);
295 		if (ret1 < 0)
296 			dev_err(dev, "%s: hda_sdw_bpt_dma_deprepare failed for TX: %d\n",
297 				__func__, ret1);
298 		return ret;
299 	}
300 
301 	/* we need to map the channels in PCMSyCM registers */
302 	ret = hdac_bus_eml_sdw_map_stream_ch(sof_to_bus(sdev), link_id,
303 					     0, /* cpu_dai->id -> PDI0 */
304 					     GENMASK(num_channels_tx - 1, 0),
305 					     hdac_stream(*bpt_tx_stream)->stream_tag,
306 					     SNDRV_PCM_STREAM_PLAYBACK);
307 	if (ret < 0) {
308 		dev_err(dev, "%s: hdac_bus_eml_sdw_map_stream_ch failed for TX: %d\n",
309 			__func__, ret);
310 		goto close;
311 	}
312 
313 	ret = hdac_bus_eml_sdw_map_stream_ch(sof_to_bus(sdev), link_id,
314 					     1, /* cpu_dai->id -> PDI1 */
315 					     GENMASK(num_channels_rx - 1, 0),
316 					     hdac_stream(*bpt_rx_stream)->stream_tag,
317 					     SNDRV_PCM_STREAM_CAPTURE);
318 	if (!ret)
319 		return 0;
320 
321 	dev_err(dev, "%s: hdac_bus_eml_sdw_map_stream_ch failed for RX: %d\n",
322 		__func__, ret);
323 
324 close:
325 	ret1 = hda_sdw_bpt_close(dev, *bpt_tx_stream, dmab_tx_bdl, *bpt_rx_stream, dmab_rx_bdl);
326 	if (ret1 < 0)
327 		dev_err(dev, "%s: hda_sdw_bpt_close failed: %d\n",
328 			__func__, ret1);
329 
330 	return ret;
331 }
332 EXPORT_SYMBOL_NS(hda_sdw_bpt_open, "SND_SOC_SOF_INTEL_HDA_SDW_BPT");
333 
334 int hda_sdw_bpt_send_async(struct device *dev, struct hdac_ext_stream *bpt_tx_stream,
335 			   struct hdac_ext_stream *bpt_rx_stream)
336 {
337 	int ret1;
338 	int ret;
339 
340 	ret = hda_sdw_bpt_dma_enable(dev, bpt_tx_stream);
341 	if (ret < 0) {
342 		dev_err(dev, "%s: hda_sdw_bpt_dma_enable failed for TX: %d\n",
343 			__func__, ret);
344 		return ret;
345 	}
346 
347 	ret = hda_sdw_bpt_dma_enable(dev, bpt_rx_stream);
348 	if (ret < 0) {
349 		dev_err(dev, "%s: hda_sdw_bpt_dma_enable failed for RX: %d\n",
350 			__func__, ret);
351 
352 		ret1 = hda_sdw_bpt_dma_disable(dev, bpt_tx_stream);
353 		if (ret1 < 0)
354 			dev_err(dev, "%s: hda_sdw_bpt_dma_disable failed for TX: %d\n",
355 				__func__, ret1);
356 	}
357 
358 	return ret;
359 }
360 EXPORT_SYMBOL_NS(hda_sdw_bpt_send_async, "SND_SOC_SOF_INTEL_HDA_SDW_BPT");
361 
362 /*
363  * 3s is several orders of magnitude larger than what is needed for a
364  * typical firmware download.
365  */
366 #define HDA_BPT_IOC_TIMEOUT_MS 3000
367 
368 int hda_sdw_bpt_wait(struct device *dev, struct hdac_ext_stream *bpt_tx_stream,
369 		     struct hdac_ext_stream *bpt_rx_stream)
370 {
371 	struct sof_intel_hda_stream *hda_tx_stream;
372 	struct sof_intel_hda_stream *hda_rx_stream;
373 	snd_pcm_uframes_t tx_position;
374 	snd_pcm_uframes_t rx_position;
375 	unsigned long time_tx_left;
376 	unsigned long time_rx_left;
377 	int ret = 0;
378 	int ret1;
379 	int i;
380 
381 	hda_tx_stream = container_of(bpt_tx_stream, struct sof_intel_hda_stream, hext_stream);
382 	hda_rx_stream = container_of(bpt_rx_stream, struct sof_intel_hda_stream, hext_stream);
383 
384 	time_tx_left = wait_for_completion_timeout(&hda_tx_stream->ioc,
385 						   msecs_to_jiffies(HDA_BPT_IOC_TIMEOUT_MS));
386 	if (!time_tx_left) {
387 		tx_position = hda_dsp_stream_get_position(hdac_stream(bpt_tx_stream),
388 							  SNDRV_PCM_STREAM_PLAYBACK, false);
389 		dev_err(dev, "%s: SDW BPT TX DMA did not complete: %ld\n",
390 			__func__, tx_position);
391 		ret = -ETIMEDOUT;
392 		goto dma_disable;
393 	}
394 
395 	/* Make sure the DMA is flushed */
396 	i = 0;
397 	do {
398 		tx_position = hda_dsp_stream_get_position(hdac_stream(bpt_tx_stream),
399 							  SNDRV_PCM_STREAM_PLAYBACK, false);
400 		usleep_range(1000, 1010);
401 		i++;
402 	} while (tx_position && i < HDA_BPT_IOC_TIMEOUT_MS);
403 	if (tx_position) {
404 		dev_err(dev, "%s: SDW BPT TX DMA position %ld was not cleared\n",
405 			__func__, tx_position);
406 		ret = -ETIMEDOUT;
407 		goto dma_disable;
408 	}
409 
410 	/* the wait should be minimal here */
411 	time_rx_left = wait_for_completion_timeout(&hda_rx_stream->ioc,
412 						   msecs_to_jiffies(HDA_BPT_IOC_TIMEOUT_MS));
413 	if (!time_rx_left) {
414 		rx_position = hda_dsp_stream_get_position(hdac_stream(bpt_rx_stream),
415 							  SNDRV_PCM_STREAM_CAPTURE, false);
416 		dev_err(dev, "%s: SDW BPT RX DMA did not complete: %ld\n",
417 			__func__, rx_position);
418 		ret = -ETIMEDOUT;
419 		goto dma_disable;
420 	}
421 
422 	/* Make sure the DMA is flushed */
423 	i = 0;
424 	do {
425 		rx_position = hda_dsp_stream_get_position(hdac_stream(bpt_rx_stream),
426 							  SNDRV_PCM_STREAM_CAPTURE, false);
427 		usleep_range(1000, 1010);
428 		i++;
429 	} while (rx_position && i < HDA_BPT_IOC_TIMEOUT_MS);
430 	if (rx_position) {
431 		dev_err(dev, "%s: SDW BPT RX DMA position %ld was not cleared\n",
432 			__func__, rx_position);
433 		ret = -ETIMEDOUT;
434 		goto dma_disable;
435 	}
436 
437 dma_disable:
438 	ret1 = hda_sdw_bpt_dma_disable(dev, bpt_rx_stream);
439 	if (!ret)
440 		ret = ret1;
441 
442 	ret1 = hda_sdw_bpt_dma_disable(dev, bpt_tx_stream);
443 	if (!ret)
444 		ret = ret1;
445 
446 	return ret;
447 }
448 EXPORT_SYMBOL_NS(hda_sdw_bpt_wait, "SND_SOC_SOF_INTEL_HDA_SDW_BPT");
449 
450 int hda_sdw_bpt_close(struct device *dev, struct hdac_ext_stream *bpt_tx_stream,
451 		      struct snd_dma_buffer *dmab_tx_bdl, struct hdac_ext_stream *bpt_rx_stream,
452 		      struct snd_dma_buffer *dmab_rx_bdl)
453 {
454 	int ret;
455 	int ret1;
456 
457 	ret = hda_sdw_bpt_dma_deprepare(dev, bpt_rx_stream, dmab_rx_bdl);
458 
459 	ret1 = hda_sdw_bpt_dma_deprepare(dev, bpt_tx_stream, dmab_tx_bdl);
460 	if (!ret)
461 		ret = ret1;
462 
463 	return ret;
464 }
465 EXPORT_SYMBOL_NS(hda_sdw_bpt_close, "SND_SOC_SOF_INTEL_HDA_SDW_BPT");
466 
467 MODULE_LICENSE("Dual BSD/GPL");
468 MODULE_DESCRIPTION("SOF helpers for HDaudio SoundWire BPT");
469 MODULE_IMPORT_NS("SND_SOC_SOF_INTEL_HDA_COMMON");
470 MODULE_IMPORT_NS("SND_SOC_SOF_HDA_MLINK");
471