xref: /linux/sound/soc/sof/intel/hda-sdw-bpt.c (revision db9c4387391e09209d44d41c2791512ac45b9e3c)
1 // SPDX-License-Identifier: (GPL-2.0-only OR BSD-3-Clause)
2 //
3 // This file is provided under a dual BSD/GPLv2 license.  When using or
4 // redistributing this file, you may do so under either license.
5 //
6 // Copyright(c) 2025 Intel Corporation.
7 //
8 
9 /*
10  * Hardware interface for SoundWire BPT support with HDA DMA
11  */
12 
13 #include <linux/lcm.h>
14 #include <sound/hdaudio_ext.h>
15 #include <sound/hda-mlink.h>
16 #include <sound/hda-sdw-bpt.h>
17 #include <sound/sof.h>
18 #include <sound/sof/ipc4/header.h>
19 #include "../ops.h"
20 #include "../sof-priv.h"
21 #include "../ipc4-priv.h"
22 #include "hda.h"
23 
24 #define BPT_FREQUENCY		192000 /* The max rate defined in rate_bits[] hdac_device.c */
25 #define BPT_MULTIPLIER		((BPT_FREQUENCY / 48000) - 1)
26 #define BPT_CHAIN_DMA_FIFO_MS	10
27 /*
28  * This routine is directly inspired by sof_ipc4_chain_dma_trigger(),
29  * with major simplifications since there are no pipelines defined
30  * and no dependency on ALSA hw_params
31  */
chain_dma_trigger(struct snd_sof_dev * sdev,unsigned int stream_tag,int direction,int state)32 static int chain_dma_trigger(struct snd_sof_dev *sdev, unsigned int stream_tag,
33 			     int direction, int state)
34 {
35 	struct sof_ipc4_fw_data *ipc4_data = sdev->private;
36 	bool allocate, enable, set_fifo_size;
37 	struct sof_ipc4_msg msg = {{ 0 }};
38 	int dma_id;
39 
40 	if (sdev->pdata->ipc_type != SOF_IPC_TYPE_4)
41 		return -EOPNOTSUPP;
42 
43 	switch (state) {
44 	case SOF_IPC4_PIPE_RUNNING: /* Allocate and start the chain */
45 		allocate = true;
46 		enable = true;
47 		set_fifo_size = true;
48 		break;
49 	case SOF_IPC4_PIPE_PAUSED: /* Stop the chain */
50 		allocate = true;
51 		enable = false;
52 		set_fifo_size = false;
53 		break;
54 	case SOF_IPC4_PIPE_RESET: /* Deallocate chain resources and remove the chain */
55 		allocate = false;
56 		enable = false;
57 		set_fifo_size = false;
58 		break;
59 	default:
60 		dev_err(sdev->dev, "Unexpected state %d", state);
61 		return -EINVAL;
62 	}
63 
64 	msg.primary = SOF_IPC4_MSG_TYPE_SET(SOF_IPC4_GLB_CHAIN_DMA);
65 	msg.primary |= SOF_IPC4_MSG_DIR(SOF_IPC4_MSG_REQUEST);
66 	msg.primary |= SOF_IPC4_MSG_TARGET(SOF_IPC4_FW_GEN_MSG);
67 
68 	/* for BPT/BRA we can use the same stream tag for host and link */
69 	dma_id = stream_tag - 1;
70 	if (direction == SNDRV_PCM_STREAM_CAPTURE)
71 		dma_id += ipc4_data->num_playback_streams;
72 
73 	msg.primary |=  SOF_IPC4_GLB_CHAIN_DMA_HOST_ID(dma_id);
74 	msg.primary |=  SOF_IPC4_GLB_CHAIN_DMA_LINK_ID(dma_id);
75 
76 	/* For BPT/BRA we use 32 bits so SCS is not set */
77 
78 	/* CHAIN DMA needs at least 2ms */
79 	if (set_fifo_size)
80 		msg.extension |=  SOF_IPC4_GLB_EXT_CHAIN_DMA_FIFO_SIZE(BPT_FREQUENCY / 1000 *
81 								       BPT_CHAIN_DMA_FIFO_MS *
82 								       sizeof(u32));
83 
84 	if (allocate)
85 		msg.primary |= SOF_IPC4_GLB_CHAIN_DMA_ALLOCATE_MASK;
86 
87 	if (enable)
88 		msg.primary |= SOF_IPC4_GLB_CHAIN_DMA_ENABLE_MASK;
89 
90 	return sof_ipc_tx_message_no_reply(sdev->ipc, &msg, 0);
91 }
92 
hda_sdw_bpt_dma_prepare(struct device * dev,struct hdac_ext_stream ** sdw_bpt_stream,struct snd_dma_buffer * dmab_bdl,u32 bpt_num_bytes,unsigned int num_channels,int direction)93 static int hda_sdw_bpt_dma_prepare(struct device *dev, struct hdac_ext_stream **sdw_bpt_stream,
94 				   struct snd_dma_buffer *dmab_bdl, u32 bpt_num_bytes,
95 				   unsigned int num_channels, int direction)
96 {
97 	struct snd_sof_dev *sdev = dev_get_drvdata(dev);
98 	struct hdac_ext_stream *bpt_stream;
99 	unsigned int format = HDA_CL_STREAM_FORMAT;
100 
101 	/*
102 	 * the baseline format needs to be adjusted to
103 	 * bandwidth requirements
104 	 */
105 	format |= (num_channels - 1);
106 	format |= BPT_MULTIPLIER << AC_FMT_MULT_SHIFT;
107 
108 	dev_dbg(dev, "direction %d format_val %#x\n", direction, format);
109 
110 	bpt_stream = hda_cl_prepare(dev, format, bpt_num_bytes, dmab_bdl, false, direction, false);
111 	if (IS_ERR(bpt_stream)) {
112 		dev_err(sdev->dev, "%s: SDW BPT DMA prepare failed: dir %d\n",
113 			__func__, direction);
114 		return PTR_ERR(bpt_stream);
115 	}
116 	*sdw_bpt_stream = bpt_stream;
117 
118 	if (!sdev->dspless_mode_selected) {
119 		struct hdac_stream *hstream;
120 		u32 mask;
121 
122 		/* decouple host and link DMA if the DSP is used */
123 		hstream = &bpt_stream->hstream;
124 		mask = BIT(hstream->index);
125 
126 		snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR, SOF_HDA_REG_PP_PPCTL, mask, mask);
127 
128 		snd_hdac_ext_stream_reset(bpt_stream);
129 
130 		snd_hdac_ext_stream_setup(bpt_stream, format);
131 	}
132 
133 	if (hdac_stream(bpt_stream)->direction == SNDRV_PCM_STREAM_PLAYBACK) {
134 		struct hdac_bus *bus = sof_to_bus(sdev);
135 		struct hdac_ext_link *hlink;
136 		int stream_tag;
137 
138 		stream_tag = hdac_stream(bpt_stream)->stream_tag;
139 		hlink = hdac_bus_eml_sdw_get_hlink(bus);
140 
141 		snd_hdac_ext_bus_link_set_stream_id(hlink, stream_tag);
142 	}
143 	return 0;
144 }
145 
hda_sdw_bpt_dma_deprepare(struct device * dev,struct hdac_ext_stream * sdw_bpt_stream,struct snd_dma_buffer * dmab_bdl)146 static int hda_sdw_bpt_dma_deprepare(struct device *dev, struct hdac_ext_stream *sdw_bpt_stream,
147 				     struct snd_dma_buffer *dmab_bdl)
148 {
149 	struct snd_sof_dev *sdev = dev_get_drvdata(dev);
150 	struct hdac_stream *hstream;
151 	u32 mask;
152 	int ret;
153 
154 	ret = hda_cl_cleanup(sdev->dev, dmab_bdl, false, sdw_bpt_stream);
155 	if (ret < 0) {
156 		dev_err(sdev->dev, "%s: SDW BPT DMA cleanup failed\n",
157 			__func__);
158 		return ret;
159 	}
160 
161 	if (hdac_stream(sdw_bpt_stream)->direction == SNDRV_PCM_STREAM_PLAYBACK) {
162 		struct hdac_bus *bus = sof_to_bus(sdev);
163 		struct hdac_ext_link *hlink;
164 		int stream_tag;
165 
166 		stream_tag = hdac_stream(sdw_bpt_stream)->stream_tag;
167 		hlink = hdac_bus_eml_sdw_get_hlink(bus);
168 
169 		snd_hdac_ext_bus_link_clear_stream_id(hlink, stream_tag);
170 	}
171 
172 	if (!sdev->dspless_mode_selected) {
173 		/* Release CHAIN_DMA resources */
174 		ret = chain_dma_trigger(sdev, hdac_stream(sdw_bpt_stream)->stream_tag,
175 					hdac_stream(sdw_bpt_stream)->direction,
176 					SOF_IPC4_PIPE_RESET);
177 		if (ret < 0)
178 			dev_err(sdev->dev, "%s: chain_dma_trigger PIPE_RESET failed: %d\n",
179 				__func__, ret);
180 
181 		/* couple host and link DMA */
182 		hstream = &sdw_bpt_stream->hstream;
183 		mask = BIT(hstream->index);
184 
185 		snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR, SOF_HDA_REG_PP_PPCTL, mask, 0);
186 	}
187 
188 	return 0;
189 }
190 
hda_sdw_bpt_dma_enable(struct device * dev,struct hdac_ext_stream * sdw_bpt_stream)191 static int hda_sdw_bpt_dma_enable(struct device *dev, struct hdac_ext_stream *sdw_bpt_stream)
192 {
193 	struct snd_sof_dev *sdev = dev_get_drvdata(dev);
194 	int ret;
195 
196 	ret = hda_cl_trigger(sdev->dev, sdw_bpt_stream, SNDRV_PCM_TRIGGER_START);
197 	if (ret < 0)
198 		dev_err(sdev->dev, "%s: SDW BPT DMA trigger start failed\n", __func__);
199 
200 	if (!sdev->dspless_mode_selected) {
201 		/* the chain DMA needs to be programmed before the DMAs */
202 		ret = chain_dma_trigger(sdev, hdac_stream(sdw_bpt_stream)->stream_tag,
203 					hdac_stream(sdw_bpt_stream)->direction,
204 					SOF_IPC4_PIPE_RUNNING);
205 		if (ret < 0) {
206 			dev_err(sdev->dev, "%s: chain_dma_trigger failed: %d\n",
207 				__func__, ret);
208 			hda_cl_trigger(sdev->dev, sdw_bpt_stream, SNDRV_PCM_TRIGGER_STOP);
209 			return ret;
210 		}
211 		snd_hdac_ext_stream_start(sdw_bpt_stream);
212 	}
213 
214 	return ret;
215 }
216 
hda_sdw_bpt_dma_disable(struct device * dev,struct hdac_ext_stream * sdw_bpt_stream)217 static int hda_sdw_bpt_dma_disable(struct device *dev, struct hdac_ext_stream *sdw_bpt_stream)
218 {
219 	struct snd_sof_dev *sdev = dev_get_drvdata(dev);
220 	int ret;
221 
222 	if (!sdev->dspless_mode_selected) {
223 		snd_hdac_ext_stream_clear(sdw_bpt_stream);
224 
225 		ret = chain_dma_trigger(sdev, hdac_stream(sdw_bpt_stream)->stream_tag,
226 					hdac_stream(sdw_bpt_stream)->direction,
227 					SOF_IPC4_PIPE_PAUSED);
228 		if (ret < 0)
229 			dev_err(sdev->dev, "%s: chain_dma_trigger PIPE_PAUSED failed: %d\n",
230 				__func__, ret);
231 	}
232 
233 	ret = hda_cl_trigger(sdev->dev, sdw_bpt_stream, SNDRV_PCM_TRIGGER_STOP);
234 	if (ret < 0)
235 		dev_err(sdev->dev, "%s: SDW BPT DMA trigger stop failed\n", __func__);
236 
237 	return ret;
238 }
239 
240 #define FIFO_ALIGNMENT	64
241 
hda_sdw_bpt_get_buf_size_alignment(unsigned int dma_bandwidth)242 unsigned int hda_sdw_bpt_get_buf_size_alignment(unsigned int dma_bandwidth)
243 {
244 	unsigned int num_channels = DIV_ROUND_UP(dma_bandwidth, BPT_FREQUENCY * 32);
245 	unsigned int data_block = num_channels * 4;
246 	unsigned int alignment = lcm(data_block, FIFO_ALIGNMENT);
247 
248 	return alignment;
249 }
250 EXPORT_SYMBOL_NS(hda_sdw_bpt_get_buf_size_alignment, "SND_SOC_SOF_INTEL_HDA_SDW_BPT");
251 
hda_sdw_bpt_open(struct device * dev,int link_id,struct hdac_ext_stream ** bpt_tx_stream,struct snd_dma_buffer * dmab_tx_bdl,u32 bpt_tx_num_bytes,u32 tx_dma_bandwidth,struct hdac_ext_stream ** bpt_rx_stream,struct snd_dma_buffer * dmab_rx_bdl,u32 bpt_rx_num_bytes,u32 rx_dma_bandwidth)252 int hda_sdw_bpt_open(struct device *dev, int link_id, struct hdac_ext_stream **bpt_tx_stream,
253 		     struct snd_dma_buffer *dmab_tx_bdl, u32 bpt_tx_num_bytes,
254 		     u32 tx_dma_bandwidth, struct hdac_ext_stream **bpt_rx_stream,
255 		     struct snd_dma_buffer *dmab_rx_bdl, u32 bpt_rx_num_bytes,
256 		     u32 rx_dma_bandwidth)
257 {
258 	struct snd_sof_dev *sdev = dev_get_drvdata(dev);
259 	unsigned int num_channels_tx;
260 	unsigned int num_channels_rx;
261 	int ret1;
262 	int ret;
263 
264 	num_channels_tx = DIV_ROUND_UP(tx_dma_bandwidth, BPT_FREQUENCY * 32);
265 
266 	ret = hda_sdw_bpt_dma_prepare(dev, bpt_tx_stream, dmab_tx_bdl, bpt_tx_num_bytes,
267 				      num_channels_tx, SNDRV_PCM_STREAM_PLAYBACK);
268 	if (ret < 0) {
269 		dev_err(dev, "%s: hda_sdw_bpt_dma_prepare failed for TX: %d\n",
270 			__func__, ret);
271 		return ret;
272 	}
273 
274 	num_channels_rx = DIV_ROUND_UP(rx_dma_bandwidth, BPT_FREQUENCY * 32);
275 
276 	ret = hda_sdw_bpt_dma_prepare(dev, bpt_rx_stream, dmab_rx_bdl, bpt_rx_num_bytes,
277 				      num_channels_rx, SNDRV_PCM_STREAM_CAPTURE);
278 	if (ret < 0) {
279 		dev_err(dev, "%s: hda_sdw_bpt_dma_prepare failed for RX: %d\n",
280 			__func__, ret);
281 
282 		ret1 = hda_sdw_bpt_dma_deprepare(dev, *bpt_tx_stream, dmab_tx_bdl);
283 		if (ret1 < 0)
284 			dev_err(dev, "%s: hda_sdw_bpt_dma_deprepare failed for TX: %d\n",
285 				__func__, ret1);
286 		return ret;
287 	}
288 
289 	/* we need to map the channels in PCMSyCM registers */
290 	ret = hdac_bus_eml_sdw_map_stream_ch(sof_to_bus(sdev), link_id,
291 					     0, /* cpu_dai->id -> PDI0 */
292 					     GENMASK(num_channels_tx - 1, 0),
293 					     hdac_stream(*bpt_tx_stream)->stream_tag,
294 					     SNDRV_PCM_STREAM_PLAYBACK);
295 	if (ret < 0) {
296 		dev_err(dev, "%s: hdac_bus_eml_sdw_map_stream_ch failed for TX: %d\n",
297 			__func__, ret);
298 		goto close;
299 	}
300 
301 	ret = hdac_bus_eml_sdw_map_stream_ch(sof_to_bus(sdev), link_id,
302 					     1, /* cpu_dai->id -> PDI1 */
303 					     GENMASK(num_channels_rx - 1, 0),
304 					     hdac_stream(*bpt_rx_stream)->stream_tag,
305 					     SNDRV_PCM_STREAM_CAPTURE);
306 	if (!ret)
307 		return 0;
308 
309 	dev_err(dev, "%s: hdac_bus_eml_sdw_map_stream_ch failed for RX: %d\n",
310 		__func__, ret);
311 
312 close:
313 	ret1 = hda_sdw_bpt_close(dev, *bpt_tx_stream, dmab_tx_bdl, *bpt_rx_stream, dmab_rx_bdl);
314 	if (ret1 < 0)
315 		dev_err(dev, "%s: hda_sdw_bpt_close failed: %d\n",
316 			__func__, ret1);
317 
318 	return ret;
319 }
320 EXPORT_SYMBOL_NS(hda_sdw_bpt_open, "SND_SOC_SOF_INTEL_HDA_SDW_BPT");
321 
hda_sdw_bpt_send_async(struct device * dev,struct hdac_ext_stream * bpt_tx_stream,struct hdac_ext_stream * bpt_rx_stream)322 int hda_sdw_bpt_send_async(struct device *dev, struct hdac_ext_stream *bpt_tx_stream,
323 			   struct hdac_ext_stream *bpt_rx_stream)
324 {
325 	int ret1;
326 	int ret;
327 
328 	ret = hda_sdw_bpt_dma_enable(dev, bpt_tx_stream);
329 	if (ret < 0) {
330 		dev_err(dev, "%s: hda_sdw_bpt_dma_enable failed for TX: %d\n",
331 			__func__, ret);
332 		return ret;
333 	}
334 
335 	ret = hda_sdw_bpt_dma_enable(dev, bpt_rx_stream);
336 	if (ret < 0) {
337 		dev_err(dev, "%s: hda_sdw_bpt_dma_enable failed for RX: %d\n",
338 			__func__, ret);
339 
340 		ret1 = hda_sdw_bpt_dma_disable(dev, bpt_tx_stream);
341 		if (ret1 < 0)
342 			dev_err(dev, "%s: hda_sdw_bpt_dma_disable failed for TX: %d\n",
343 				__func__, ret1);
344 	}
345 
346 	return ret;
347 }
348 EXPORT_SYMBOL_NS(hda_sdw_bpt_send_async, "SND_SOC_SOF_INTEL_HDA_SDW_BPT");
349 
350 /*
351  * 3s is several orders of magnitude larger than what is needed for a
352  * typical firmware download.
353  */
354 #define HDA_BPT_IOC_TIMEOUT_MS 3000
355 
hda_sdw_bpt_wait(struct device * dev,struct hdac_ext_stream * bpt_tx_stream,struct hdac_ext_stream * bpt_rx_stream)356 int hda_sdw_bpt_wait(struct device *dev, struct hdac_ext_stream *bpt_tx_stream,
357 		     struct hdac_ext_stream *bpt_rx_stream)
358 {
359 	struct sof_intel_hda_stream *hda_tx_stream;
360 	struct sof_intel_hda_stream *hda_rx_stream;
361 	snd_pcm_uframes_t tx_position;
362 	snd_pcm_uframes_t rx_position;
363 	unsigned long time_tx_left;
364 	unsigned long time_rx_left;
365 	int ret = 0;
366 	int ret1;
367 	int i;
368 
369 	hda_tx_stream = container_of(bpt_tx_stream, struct sof_intel_hda_stream, hext_stream);
370 	hda_rx_stream = container_of(bpt_rx_stream, struct sof_intel_hda_stream, hext_stream);
371 
372 	time_tx_left = wait_for_completion_timeout(&hda_tx_stream->ioc,
373 						   msecs_to_jiffies(HDA_BPT_IOC_TIMEOUT_MS));
374 	if (!time_tx_left) {
375 		tx_position = hda_dsp_stream_get_position(hdac_stream(bpt_tx_stream),
376 							  SNDRV_PCM_STREAM_PLAYBACK, false);
377 		dev_err(dev, "%s: SDW BPT TX DMA did not complete: %ld\n",
378 			__func__, tx_position);
379 		ret = -ETIMEDOUT;
380 		goto dma_disable;
381 	}
382 
383 	/* Make sure the DMA is flushed */
384 	i = 0;
385 	do {
386 		tx_position = hda_dsp_stream_get_position(hdac_stream(bpt_tx_stream),
387 							  SNDRV_PCM_STREAM_PLAYBACK, false);
388 		usleep_range(1000, 1010);
389 		i++;
390 	} while (tx_position && i < HDA_BPT_IOC_TIMEOUT_MS);
391 	if (tx_position) {
392 		dev_err(dev, "%s: SDW BPT TX DMA position %ld was not cleared\n",
393 			__func__, tx_position);
394 		ret = -ETIMEDOUT;
395 		goto dma_disable;
396 	}
397 
398 	/* the wait should be minimal here */
399 	time_rx_left = wait_for_completion_timeout(&hda_rx_stream->ioc,
400 						   msecs_to_jiffies(HDA_BPT_IOC_TIMEOUT_MS));
401 	if (!time_rx_left) {
402 		rx_position = hda_dsp_stream_get_position(hdac_stream(bpt_rx_stream),
403 							  SNDRV_PCM_STREAM_CAPTURE, false);
404 		dev_err(dev, "%s: SDW BPT RX DMA did not complete: %ld\n",
405 			__func__, rx_position);
406 		ret = -ETIMEDOUT;
407 		goto dma_disable;
408 	}
409 
410 	/* Make sure the DMA is flushed */
411 	i = 0;
412 	do {
413 		rx_position = hda_dsp_stream_get_position(hdac_stream(bpt_rx_stream),
414 							  SNDRV_PCM_STREAM_CAPTURE, false);
415 		usleep_range(1000, 1010);
416 		i++;
417 	} while (rx_position && i < HDA_BPT_IOC_TIMEOUT_MS);
418 	if (rx_position) {
419 		dev_err(dev, "%s: SDW BPT RX DMA position %ld was not cleared\n",
420 			__func__, rx_position);
421 		ret = -ETIMEDOUT;
422 		goto dma_disable;
423 	}
424 
425 dma_disable:
426 	ret1 = hda_sdw_bpt_dma_disable(dev, bpt_rx_stream);
427 	if (!ret)
428 		ret = ret1;
429 
430 	ret1 = hda_sdw_bpt_dma_disable(dev, bpt_tx_stream);
431 	if (!ret)
432 		ret = ret1;
433 
434 	return ret;
435 }
436 EXPORT_SYMBOL_NS(hda_sdw_bpt_wait, "SND_SOC_SOF_INTEL_HDA_SDW_BPT");
437 
hda_sdw_bpt_close(struct device * dev,struct hdac_ext_stream * bpt_tx_stream,struct snd_dma_buffer * dmab_tx_bdl,struct hdac_ext_stream * bpt_rx_stream,struct snd_dma_buffer * dmab_rx_bdl)438 int hda_sdw_bpt_close(struct device *dev, struct hdac_ext_stream *bpt_tx_stream,
439 		      struct snd_dma_buffer *dmab_tx_bdl, struct hdac_ext_stream *bpt_rx_stream,
440 		      struct snd_dma_buffer *dmab_rx_bdl)
441 {
442 	int ret;
443 	int ret1;
444 
445 	ret = hda_sdw_bpt_dma_deprepare(dev, bpt_rx_stream, dmab_rx_bdl);
446 
447 	ret1 = hda_sdw_bpt_dma_deprepare(dev, bpt_tx_stream, dmab_tx_bdl);
448 	if (!ret)
449 		ret = ret1;
450 
451 	return ret;
452 }
453 EXPORT_SYMBOL_NS(hda_sdw_bpt_close, "SND_SOC_SOF_INTEL_HDA_SDW_BPT");
454 
455 MODULE_LICENSE("Dual BSD/GPL");
456 MODULE_DESCRIPTION("SOF helpers for HDaudio SoundWire BPT");
457 MODULE_IMPORT_NS("SND_SOC_SOF_INTEL_HDA_COMMON");
458 MODULE_IMPORT_NS("SND_SOC_SOF_HDA_MLINK");
459