xref: /linux/sound/soc/sof/intel/hda-stream.c (revision 9e3d4f794cbe9a4e286b3052cb97908005807aee)
1 // SPDX-License-Identifier: (GPL-2.0-only OR BSD-3-Clause)
2 //
3 // This file is provided under a dual BSD/GPLv2 license.  When using or
4 // redistributing this file, you may do so under either license.
5 //
6 // Copyright(c) 2018 Intel Corporation
7 //
8 // Authors: Liam Girdwood <liam.r.girdwood@linux.intel.com>
9 //	    Ranjani Sridharan <ranjani.sridharan@linux.intel.com>
10 //	    Rander Wang <rander.wang@intel.com>
11 //          Keyon Jie <yang.jie@linux.intel.com>
12 //
13 
14 /*
15  * Hardware interface for generic Intel audio DSP HDA IP
16  */
17 
18 #include <sound/hdaudio_ext.h>
19 #include <sound/hda_register.h>
20 #include <sound/sof.h>
21 #include <trace/events/sof_intel.h>
22 #include "../ops.h"
23 #include "../sof-audio.h"
24 #include "../ipc4-priv.h"
25 #include "hda.h"
26 
27 int sof_hda_position_quirk = SOF_HDA_POSITION_QUIRK_USE_DPIB_REGISTERS;
28 module_param_named(position_quirk, sof_hda_position_quirk, int, 0444);
29 MODULE_PARM_DESC(position_quirk, "SOF HDaudio position quirk");
30 EXPORT_SYMBOL_NS(sof_hda_position_quirk, "SND_SOC_SOF_INTEL_HDA_COMMON");
31 
32 #define HDA_LTRP_GB_VALUE_US	95
33 
34 static inline const char *hda_hstream_direction_str(struct hdac_stream *hstream)
35 {
36 	if (hstream->direction == SNDRV_PCM_STREAM_PLAYBACK)
37 		return "Playback";
38 	else
39 		return "Capture";
40 }
41 
42 static char *hda_hstream_dbg_get_stream_info_str(struct hdac_stream *hstream)
43 {
44 	struct snd_soc_pcm_runtime *rtd;
45 
46 	if (hstream->substream)
47 		rtd = snd_soc_substream_to_rtd(hstream->substream);
48 	else if (hstream->cstream)
49 		rtd = hstream->cstream->private_data;
50 	else
51 		/* Non audio DMA user, like dma-trace */
52 		return kasprintf(GFP_KERNEL, "-- (%s, stream_tag: %u)",
53 				 hda_hstream_direction_str(hstream),
54 				 hstream->stream_tag);
55 
56 	return kasprintf(GFP_KERNEL, "dai_link \"%s\" (%s, stream_tag: %u)",
57 			 rtd->dai_link->name, hda_hstream_direction_str(hstream),
58 			 hstream->stream_tag);
59 }
60 
61 /*
62  * set up one of BDL entries for a stream
63  */
64 static int hda_setup_bdle(struct snd_sof_dev *sdev,
65 			  struct snd_dma_buffer *dmab,
66 			  struct hdac_stream *hstream,
67 			  struct sof_intel_dsp_bdl **bdlp,
68 			  int offset, int size, int ioc)
69 {
70 	struct hdac_bus *bus = sof_to_bus(sdev);
71 	struct sof_intel_dsp_bdl *bdl = *bdlp;
72 
73 	while (size > 0) {
74 		dma_addr_t addr;
75 		int chunk;
76 
77 		if (hstream->frags >= HDA_DSP_MAX_BDL_ENTRIES) {
78 			dev_err(sdev->dev, "error: stream frags exceeded\n");
79 			return -EINVAL;
80 		}
81 
82 		addr = snd_sgbuf_get_addr(dmab, offset);
83 		/* program BDL addr */
84 		bdl->addr_l = cpu_to_le32(lower_32_bits(addr));
85 		bdl->addr_h = cpu_to_le32(upper_32_bits(addr));
86 		/* program BDL size */
87 		chunk = snd_sgbuf_get_chunk_size(dmab, offset, size);
88 		/* one BDLE should not cross 4K boundary */
89 		if (bus->align_bdle_4k) {
90 			u32 remain = 0x1000 - (offset & 0xfff);
91 
92 			if (chunk > remain)
93 				chunk = remain;
94 		}
95 		bdl->size = cpu_to_le32(chunk);
96 		/* only program IOC when the whole segment is processed */
97 		size -= chunk;
98 		bdl->ioc = (size || !ioc) ? 0 : cpu_to_le32(0x01);
99 		bdl++;
100 		hstream->frags++;
101 		offset += chunk;
102 	}
103 
104 	*bdlp = bdl;
105 	return offset;
106 }
107 
108 /*
109  * set up Buffer Descriptor List (BDL) for host memory transfer
110  * BDL describes the location of the individual buffers and is little endian.
111  */
112 int hda_dsp_stream_setup_bdl(struct snd_sof_dev *sdev,
113 			     struct snd_dma_buffer *dmab,
114 			     struct hdac_stream *hstream)
115 {
116 	struct sof_intel_hda_dev *hda = sdev->pdata->hw_pdata;
117 	struct sof_intel_dsp_bdl *bdl;
118 	int i, offset, period_bytes, periods;
119 	int remain, ioc;
120 
121 	period_bytes = hstream->period_bytes;
122 	dev_dbg(sdev->dev, "period_bytes: %#x, bufsize: %#x\n", period_bytes,
123 		hstream->bufsize);
124 
125 	if (!period_bytes) {
126 		unsigned int chunk_size;
127 
128 		chunk_size = snd_sgbuf_get_chunk_size(dmab, 0, hstream->bufsize);
129 
130 		period_bytes = hstream->bufsize;
131 
132 		/*
133 		 * HDA spec demands that the LVI value must be at least one
134 		 * before the DMA operation can begin. This means that there
135 		 * must be at least two BDLE present for the transfer.
136 		 *
137 		 * If the buffer is not a single continuous area then the
138 		 * hda_setup_bdle() will create multiple BDLEs for each segment.
139 		 * If the memory is a single continuous area, force it to be
140 		 * split into two 'periods', otherwise the transfer will be
141 		 * split to multiple BDLE for each chunk in hda_setup_bdle()
142 		 *
143 		 * Note: period_bytes == 0 can only happen for firmware or
144 		 * library loading. The data size is 4K aligned, which ensures
145 		 * that the second chunk's start address will be 128-byte
146 		 * aligned.
147 		 */
148 		if (chunk_size == hstream->bufsize)
149 			period_bytes /= 2;
150 	}
151 
152 	periods = hstream->bufsize / period_bytes;
153 
154 	dev_dbg(sdev->dev, "periods: %d\n", periods);
155 
156 	remain = hstream->bufsize % period_bytes;
157 	if (remain)
158 		periods++;
159 
160 	/* program the initial BDL entries */
161 	bdl = (struct sof_intel_dsp_bdl *)hstream->bdl.area;
162 	offset = 0;
163 	hstream->frags = 0;
164 
165 	/*
166 	 * set IOC if don't use position IPC
167 	 * and period_wakeup needed.
168 	 */
169 	ioc = hda->no_ipc_position ?
170 	      !hstream->no_period_wakeup : 0;
171 
172 	for (i = 0; i < periods; i++) {
173 		if (i == (periods - 1) && remain)
174 			/* set the last small entry */
175 			offset = hda_setup_bdle(sdev, dmab,
176 						hstream, &bdl, offset,
177 						remain, 0);
178 		else
179 			offset = hda_setup_bdle(sdev, dmab,
180 						hstream, &bdl, offset,
181 						period_bytes, ioc);
182 	}
183 
184 	return offset;
185 }
186 
187 int hda_dsp_stream_spib_config(struct snd_sof_dev *sdev,
188 			       struct hdac_ext_stream *hext_stream,
189 			       int enable, u32 size)
190 {
191 	struct hdac_stream *hstream = &hext_stream->hstream;
192 	u32 mask;
193 
194 	if (!sdev->bar[HDA_DSP_SPIB_BAR]) {
195 		dev_err(sdev->dev, "error: address of spib capability is NULL\n");
196 		return -EINVAL;
197 	}
198 
199 	mask = (1 << hstream->index);
200 
201 	/* enable/disable SPIB for the stream */
202 	snd_sof_dsp_update_bits(sdev, HDA_DSP_SPIB_BAR,
203 				SOF_HDA_ADSP_REG_CL_SPBFIFO_SPBFCCTL, mask,
204 				enable << hstream->index);
205 
206 	/* set the SPIB value */
207 	sof_io_write(sdev, hstream->spib_addr, size);
208 
209 	return 0;
210 }
211 
212 /* get next unused stream */
213 struct hdac_ext_stream *
214 hda_dsp_stream_get(struct snd_sof_dev *sdev, int direction, u32 flags)
215 {
216 	const struct sof_intel_dsp_desc *chip_info =  get_chip_info(sdev->pdata);
217 	struct sof_intel_hda_dev *hda = sdev->pdata->hw_pdata;
218 	struct hdac_bus *bus = sof_to_bus(sdev);
219 	struct sof_intel_hda_stream *hda_stream;
220 	struct hdac_ext_stream *hext_stream = NULL;
221 	struct hdac_stream *s;
222 
223 	spin_lock_irq(&bus->reg_lock);
224 
225 	/* get an unused stream */
226 	list_for_each_entry(s, &bus->stream_list, list) {
227 		if (s->direction == direction && !s->opened) {
228 			hext_stream = stream_to_hdac_ext_stream(s);
229 			hda_stream = container_of(hext_stream,
230 						  struct sof_intel_hda_stream,
231 						  hext_stream);
232 			/* check if the host DMA channel is reserved */
233 			if (hda_stream->host_reserved)
234 				continue;
235 
236 			s->opened = true;
237 			break;
238 		}
239 	}
240 
241 	spin_unlock_irq(&bus->reg_lock);
242 
243 	/* stream found ? */
244 	if (!hext_stream) {
245 		dev_err(sdev->dev, "error: no free %s streams\n", snd_pcm_direction_name(direction));
246 		return hext_stream;
247 	}
248 
249 	hda_stream->flags = flags;
250 
251 	/*
252 	 * Prevent DMI Link L1 entry for streams that don't support it.
253 	 * Workaround to address a known issue with host DMA that results
254 	 * in xruns during pause/release in capture scenarios. This is not needed for the ACE IP.
255 	 */
256 	if (chip_info->hw_ip_version < SOF_INTEL_ACE_1_0 &&
257 	    !(flags & SOF_HDA_STREAM_DMI_L1_COMPATIBLE)) {
258 		snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
259 					HDA_VS_INTEL_EM2,
260 					HDA_VS_INTEL_EM2_L1SEN, 0);
261 		hda->l1_disabled = true;
262 	}
263 
264 	return hext_stream;
265 }
266 
267 /* free a stream */
268 int hda_dsp_stream_put(struct snd_sof_dev *sdev, int direction, int stream_tag)
269 {
270 	const struct sof_intel_dsp_desc *chip_info =  get_chip_info(sdev->pdata);
271 	struct sof_intel_hda_dev *hda = sdev->pdata->hw_pdata;
272 	struct hdac_bus *bus = sof_to_bus(sdev);
273 	struct sof_intel_hda_stream *hda_stream;
274 	struct hdac_ext_stream *hext_stream;
275 	struct hdac_stream *s;
276 	bool dmi_l1_enable = true;
277 	bool found = false;
278 
279 	spin_lock_irq(&bus->reg_lock);
280 
281 	/*
282 	 * close stream matching the stream tag and check if there are any open streams
283 	 * that are DMI L1 incompatible.
284 	 */
285 	list_for_each_entry(s, &bus->stream_list, list) {
286 		hext_stream = stream_to_hdac_ext_stream(s);
287 		hda_stream = container_of(hext_stream, struct sof_intel_hda_stream, hext_stream);
288 
289 		if (!s->opened)
290 			continue;
291 
292 		if (s->direction == direction && s->stream_tag == stream_tag) {
293 			s->opened = false;
294 			found = true;
295 		} else if (!(hda_stream->flags & SOF_HDA_STREAM_DMI_L1_COMPATIBLE)) {
296 			dmi_l1_enable = false;
297 		}
298 	}
299 
300 	spin_unlock_irq(&bus->reg_lock);
301 
302 	/* Enable DMI L1 if permitted */
303 	if (chip_info->hw_ip_version < SOF_INTEL_ACE_1_0 && dmi_l1_enable) {
304 		snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, HDA_VS_INTEL_EM2,
305 					HDA_VS_INTEL_EM2_L1SEN, HDA_VS_INTEL_EM2_L1SEN);
306 		hda->l1_disabled = false;
307 	}
308 
309 	if (!found) {
310 		dev_err(sdev->dev, "%s: stream_tag %d not opened!\n",
311 			__func__, stream_tag);
312 		return -ENODEV;
313 	}
314 
315 	return 0;
316 }
317 
318 static int hda_dsp_stream_reset(struct snd_sof_dev *sdev, struct hdac_stream *hstream)
319 {
320 	int sd_offset = SOF_STREAM_SD_OFFSET(hstream);
321 	int timeout = HDA_DSP_STREAM_RESET_TIMEOUT;
322 	u32 val;
323 
324 	/* enter stream reset */
325 	snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset, SOF_STREAM_SD_OFFSET_CRST,
326 				SOF_STREAM_SD_OFFSET_CRST);
327 	do {
328 		val = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, sd_offset);
329 		if (val & SOF_STREAM_SD_OFFSET_CRST)
330 			break;
331 	} while (--timeout);
332 	if (timeout == 0) {
333 		dev_err(sdev->dev, "timeout waiting for stream reset\n");
334 		return -ETIMEDOUT;
335 	}
336 
337 	timeout = HDA_DSP_STREAM_RESET_TIMEOUT;
338 
339 	/* exit stream reset and wait to read a zero before reading any other register */
340 	snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset, SOF_STREAM_SD_OFFSET_CRST, 0x0);
341 
342 	/* wait for hardware to report that stream is out of reset */
343 	udelay(3);
344 	do {
345 		val = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, sd_offset);
346 		if ((val & SOF_STREAM_SD_OFFSET_CRST) == 0)
347 			break;
348 	} while (--timeout);
349 	if (timeout == 0) {
350 		dev_err(sdev->dev, "timeout waiting for stream to exit reset\n");
351 		return -ETIMEDOUT;
352 	}
353 
354 	return 0;
355 }
356 
357 int hda_dsp_stream_trigger(struct snd_sof_dev *sdev,
358 			   struct hdac_ext_stream *hext_stream, int cmd)
359 {
360 	struct hdac_stream *hstream = &hext_stream->hstream;
361 	int sd_offset = SOF_STREAM_SD_OFFSET(hstream);
362 	u32 dma_start = SOF_HDA_SD_CTL_DMA_START;
363 	int ret = 0;
364 	u32 run;
365 
366 	/* cmd must be for audio stream */
367 	switch (cmd) {
368 	case SNDRV_PCM_TRIGGER_PAUSE_RELEASE:
369 		if (!sdev->dspless_mode_selected)
370 			break;
371 		fallthrough;
372 	case SNDRV_PCM_TRIGGER_START:
373 		if (hstream->running)
374 			break;
375 
376 		snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, SOF_HDA_INTCTL,
377 					1 << hstream->index,
378 					1 << hstream->index);
379 
380 		snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
381 					sd_offset,
382 					SOF_HDA_SD_CTL_DMA_START |
383 					SOF_HDA_CL_DMA_SD_INT_MASK,
384 					SOF_HDA_SD_CTL_DMA_START |
385 					SOF_HDA_CL_DMA_SD_INT_MASK);
386 
387 		ret = snd_sof_dsp_read_poll_timeout(sdev,
388 					HDA_DSP_HDA_BAR,
389 					sd_offset, run,
390 					((run &	dma_start) == dma_start),
391 					HDA_DSP_REG_POLL_INTERVAL_US,
392 					HDA_DSP_STREAM_RUN_TIMEOUT);
393 
394 		if (ret >= 0)
395 			hstream->running = true;
396 
397 		break;
398 	case SNDRV_PCM_TRIGGER_PAUSE_PUSH:
399 		if (!sdev->dspless_mode_selected)
400 			break;
401 		fallthrough;
402 	case SNDRV_PCM_TRIGGER_SUSPEND:
403 	case SNDRV_PCM_TRIGGER_STOP:
404 		snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
405 					sd_offset,
406 					SOF_HDA_SD_CTL_DMA_START |
407 					SOF_HDA_CL_DMA_SD_INT_MASK, 0x0);
408 
409 		ret = snd_sof_dsp_read_poll_timeout(sdev, HDA_DSP_HDA_BAR,
410 						sd_offset, run,
411 						!(run &	dma_start),
412 						HDA_DSP_REG_POLL_INTERVAL_US,
413 						HDA_DSP_STREAM_RUN_TIMEOUT);
414 
415 		if (ret >= 0) {
416 			snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
417 					  sd_offset + SOF_HDA_ADSP_REG_SD_STS,
418 					  SOF_HDA_CL_DMA_SD_INT_MASK);
419 
420 			hstream->running = false;
421 			snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
422 						SOF_HDA_INTCTL,
423 						1 << hstream->index, 0x0);
424 		}
425 		break;
426 	default:
427 		dev_err(sdev->dev, "error: unknown command: %d\n", cmd);
428 		return -EINVAL;
429 	}
430 
431 	if (ret < 0) {
432 		char *stream_name = hda_hstream_dbg_get_stream_info_str(hstream);
433 
434 		dev_err(sdev->dev,
435 			"%s: cmd %d on %s: timeout on STREAM_SD_OFFSET read\n",
436 			__func__, cmd, stream_name ? stream_name : "unknown stream");
437 		kfree(stream_name);
438 	}
439 
440 	return ret;
441 }
442 
443 /* minimal recommended programming for ICCMAX stream */
444 int hda_dsp_iccmax_stream_hw_params(struct snd_sof_dev *sdev, struct hdac_ext_stream *hext_stream,
445 				    struct snd_dma_buffer *dmab,
446 				    struct snd_pcm_hw_params *params)
447 {
448 	struct hdac_stream *hstream = &hext_stream->hstream;
449 	int sd_offset = SOF_STREAM_SD_OFFSET(hstream);
450 	int ret;
451 	u32 mask = 0x1 << hstream->index;
452 
453 	if (!hext_stream) {
454 		dev_err(sdev->dev, "error: no stream available\n");
455 		return -ENODEV;
456 	}
457 
458 	if (!dmab) {
459 		dev_err(sdev->dev, "error: no dma buffer allocated!\n");
460 		return -ENODEV;
461 	}
462 
463 	if (hstream->posbuf)
464 		*hstream->posbuf = 0;
465 
466 	/* reset BDL address */
467 	snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
468 			  sd_offset + SOF_HDA_ADSP_REG_SD_BDLPL,
469 			  0x0);
470 	snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
471 			  sd_offset + SOF_HDA_ADSP_REG_SD_BDLPU,
472 			  0x0);
473 
474 	hstream->frags = 0;
475 
476 	ret = hda_dsp_stream_setup_bdl(sdev, dmab, hstream);
477 	if (ret < 0) {
478 		dev_err(sdev->dev, "error: set up of BDL failed\n");
479 		return ret;
480 	}
481 
482 	/* program BDL address */
483 	snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
484 			  sd_offset + SOF_HDA_ADSP_REG_SD_BDLPL,
485 			  (u32)hstream->bdl.addr);
486 	snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
487 			  sd_offset + SOF_HDA_ADSP_REG_SD_BDLPU,
488 			  upper_32_bits(hstream->bdl.addr));
489 
490 	/* program cyclic buffer length */
491 	snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
492 			  sd_offset + SOF_HDA_ADSP_REG_SD_CBL,
493 			  hstream->bufsize);
494 
495 	/* program last valid index */
496 	snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
497 				sd_offset + SOF_HDA_ADSP_REG_SD_LVI,
498 				0xffff, (hstream->frags - 1));
499 
500 	/* decouple host and link DMA, enable DSP features */
501 	snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR, SOF_HDA_REG_PP_PPCTL,
502 				mask, mask);
503 
504 	/* Follow HW recommendation to set the guardband value to 95us during FW boot */
505 	snd_sof_dsp_update8(sdev, HDA_DSP_HDA_BAR, HDA_VS_INTEL_LTRP,
506 			    HDA_VS_INTEL_LTRP_GB_MASK, HDA_LTRP_GB_VALUE_US);
507 
508 	/* start DMA */
509 	snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset,
510 				SOF_HDA_SD_CTL_DMA_START, SOF_HDA_SD_CTL_DMA_START);
511 
512 	return 0;
513 }
514 
515 /*
516  * prepare for common hdac registers settings, for both code loader
517  * and normal stream.
518  */
519 int hda_dsp_stream_hw_params(struct snd_sof_dev *sdev,
520 			     struct hdac_ext_stream *hext_stream,
521 			     struct snd_dma_buffer *dmab,
522 			     struct snd_pcm_hw_params *params)
523 {
524 	const struct sof_intel_dsp_desc *chip = get_chip_info(sdev->pdata);
525 	struct hdac_bus *bus = sof_to_bus(sdev);
526 	struct hdac_stream *hstream;
527 	int sd_offset, ret;
528 	u32 dma_start = SOF_HDA_SD_CTL_DMA_START;
529 	u32 mask;
530 	u32 run;
531 
532 	if (!hext_stream) {
533 		dev_err(sdev->dev, "error: no stream available\n");
534 		return -ENODEV;
535 	}
536 
537 	if (!dmab) {
538 		dev_err(sdev->dev, "error: no dma buffer allocated!\n");
539 		return -ENODEV;
540 	}
541 
542 	hstream = &hext_stream->hstream;
543 	sd_offset = SOF_STREAM_SD_OFFSET(hstream);
544 	mask = BIT(hstream->index);
545 
546 	/* decouple host and link DMA if the DSP is used */
547 	if (!sdev->dspless_mode_selected)
548 		snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR, SOF_HDA_REG_PP_PPCTL,
549 					mask, mask);
550 
551 	/* clear stream status */
552 	snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset,
553 				SOF_HDA_CL_DMA_SD_INT_MASK |
554 				SOF_HDA_SD_CTL_DMA_START, 0);
555 
556 	ret = snd_sof_dsp_read_poll_timeout(sdev, HDA_DSP_HDA_BAR,
557 					    sd_offset, run,
558 					    !(run & dma_start),
559 					    HDA_DSP_REG_POLL_INTERVAL_US,
560 					    HDA_DSP_STREAM_RUN_TIMEOUT);
561 
562 	if (ret < 0) {
563 		char *stream_name = hda_hstream_dbg_get_stream_info_str(hstream);
564 
565 		dev_err(sdev->dev,
566 			"%s: on %s: timeout on STREAM_SD_OFFSET read1\n",
567 			__func__, stream_name ? stream_name : "unknown stream");
568 		kfree(stream_name);
569 		return ret;
570 	}
571 
572 	snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
573 				sd_offset + SOF_HDA_ADSP_REG_SD_STS,
574 				SOF_HDA_CL_DMA_SD_INT_MASK,
575 				SOF_HDA_CL_DMA_SD_INT_MASK);
576 
577 	/* stream reset */
578 	ret = hda_dsp_stream_reset(sdev, hstream);
579 	if (ret < 0)
580 		return ret;
581 
582 	if (hstream->posbuf)
583 		*hstream->posbuf = 0;
584 
585 	/* reset BDL address */
586 	snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
587 			  sd_offset + SOF_HDA_ADSP_REG_SD_BDLPL,
588 			  0x0);
589 	snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
590 			  sd_offset + SOF_HDA_ADSP_REG_SD_BDLPU,
591 			  0x0);
592 
593 	/* clear stream status */
594 	snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset,
595 				SOF_HDA_CL_DMA_SD_INT_MASK |
596 				SOF_HDA_SD_CTL_DMA_START, 0);
597 
598 	ret = snd_sof_dsp_read_poll_timeout(sdev, HDA_DSP_HDA_BAR,
599 					    sd_offset, run,
600 					    !(run & dma_start),
601 					    HDA_DSP_REG_POLL_INTERVAL_US,
602 					    HDA_DSP_STREAM_RUN_TIMEOUT);
603 
604 	if (ret < 0) {
605 		char *stream_name = hda_hstream_dbg_get_stream_info_str(hstream);
606 
607 		dev_err(sdev->dev,
608 			"%s: on %s: timeout on STREAM_SD_OFFSET read1\n",
609 			__func__, stream_name ? stream_name : "unknown stream");
610 		kfree(stream_name);
611 		return ret;
612 	}
613 
614 	snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
615 				sd_offset + SOF_HDA_ADSP_REG_SD_STS,
616 				SOF_HDA_CL_DMA_SD_INT_MASK,
617 				SOF_HDA_CL_DMA_SD_INT_MASK);
618 
619 	hstream->frags = 0;
620 
621 	ret = hda_dsp_stream_setup_bdl(sdev, dmab, hstream);
622 	if (ret < 0) {
623 		dev_err(sdev->dev, "error: set up of BDL failed\n");
624 		return ret;
625 	}
626 
627 	/* program stream tag to set up stream descriptor for DMA */
628 	snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset,
629 				SOF_HDA_CL_SD_CTL_STREAM_TAG_MASK,
630 				hstream->stream_tag <<
631 				SOF_HDA_CL_SD_CTL_STREAM_TAG_SHIFT);
632 
633 	/* program cyclic buffer length */
634 	snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
635 			  sd_offset + SOF_HDA_ADSP_REG_SD_CBL,
636 			  hstream->bufsize);
637 
638 	/*
639 	 * Recommended hardware programming sequence for HDAudio DMA format
640 	 * on earlier platforms - this is not needed on newer platforms
641 	 *
642 	 * 1. Put DMA into coupled mode by clearing PPCTL.PROCEN bit
643 	 *    for corresponding stream index before the time of writing
644 	 *    format to SDxFMT register.
645 	 * 2. Write SDxFMT
646 	 * 3. Set PPCTL.PROCEN bit for corresponding stream index to
647 	 *    enable decoupled mode
648 	 */
649 
650 	if (!sdev->dspless_mode_selected && (chip->quirks & SOF_INTEL_PROCEN_FMT_QUIRK))
651 		/* couple host and link DMA, disable DSP features */
652 		snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR, SOF_HDA_REG_PP_PPCTL,
653 					mask, 0);
654 
655 	/* program stream format */
656 	snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
657 				sd_offset +
658 				SOF_HDA_ADSP_REG_SD_FORMAT,
659 				0xffff, hstream->format_val);
660 
661 	if (!sdev->dspless_mode_selected && (chip->quirks & SOF_INTEL_PROCEN_FMT_QUIRK))
662 		/* decouple host and link DMA, enable DSP features */
663 		snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR, SOF_HDA_REG_PP_PPCTL,
664 					mask, mask);
665 
666 	/* program last valid index */
667 	snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
668 				sd_offset + SOF_HDA_ADSP_REG_SD_LVI,
669 				0xffff, (hstream->frags - 1));
670 
671 	/* program BDL address */
672 	snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
673 			  sd_offset + SOF_HDA_ADSP_REG_SD_BDLPL,
674 			  (u32)hstream->bdl.addr);
675 	snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
676 			  sd_offset + SOF_HDA_ADSP_REG_SD_BDLPU,
677 			  upper_32_bits(hstream->bdl.addr));
678 
679 	/* enable position buffer, if needed */
680 	if (bus->use_posbuf && bus->posbuf.addr &&
681 	    !(snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, SOF_HDA_ADSP_DPLBASE)
682 	      & SOF_HDA_ADSP_DPLBASE_ENABLE)) {
683 		snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR, SOF_HDA_ADSP_DPUBASE,
684 				  upper_32_bits(bus->posbuf.addr));
685 		snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR, SOF_HDA_ADSP_DPLBASE,
686 				  (u32)bus->posbuf.addr |
687 				  SOF_HDA_ADSP_DPLBASE_ENABLE);
688 	}
689 
690 	/* set interrupt enable bits */
691 	snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset,
692 				SOF_HDA_CL_DMA_SD_INT_MASK,
693 				SOF_HDA_CL_DMA_SD_INT_MASK);
694 
695 	/* read FIFO size */
696 	if (hstream->direction == SNDRV_PCM_STREAM_PLAYBACK) {
697 		hstream->fifo_size =
698 			snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR,
699 					 sd_offset +
700 					 SOF_HDA_ADSP_REG_SD_FIFOSIZE);
701 		hstream->fifo_size &= SOF_HDA_SD_FIFOSIZE_FIFOS_MASK;
702 		hstream->fifo_size += 1;
703 	} else {
704 		hstream->fifo_size = 0;
705 	}
706 
707 	return ret;
708 }
709 
710 int hda_dsp_stream_hw_free(struct snd_sof_dev *sdev,
711 			   struct snd_pcm_substream *substream)
712 {
713 	struct hdac_stream *hstream = substream->runtime->private_data;
714 	struct hdac_ext_stream *hext_stream = container_of(hstream,
715 							 struct hdac_ext_stream,
716 							 hstream);
717 	int ret;
718 
719 	ret = hda_dsp_stream_reset(sdev, hstream);
720 	if (ret < 0)
721 		return ret;
722 
723 	if (!sdev->dspless_mode_selected) {
724 		struct hdac_bus *bus = sof_to_bus(sdev);
725 		u32 mask = BIT(hstream->index);
726 
727 		guard(spinlock_irq)(&bus->reg_lock);
728 
729 		/* couple host and link DMA if link DMA channel is idle */
730 		if (!hext_stream->link_locked)
731 			snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR,
732 						SOF_HDA_REG_PP_PPCTL, mask, 0);
733 	}
734 
735 	hda_dsp_stream_spib_config(sdev, hext_stream, HDA_DSP_SPIB_DISABLE, 0);
736 
737 	hstream->substream = NULL;
738 
739 	return 0;
740 }
741 EXPORT_SYMBOL_NS(hda_dsp_stream_hw_free, "SND_SOC_SOF_INTEL_HDA_COMMON");
742 
743 bool hda_dsp_check_stream_irq(struct snd_sof_dev *sdev)
744 {
745 	struct hdac_bus *bus = sof_to_bus(sdev);
746 	bool ret = false;
747 	u32 status;
748 
749 	/* The function can be called at irq thread, so use spin_lock_irq */
750 	guard(spinlock_irq)(&bus->reg_lock);
751 
752 	status = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, SOF_HDA_INTSTS);
753 
754 	trace_sof_intel_hda_dsp_check_stream_irq(sdev, status);
755 
756 	/* if Register inaccessible, ignore it.*/
757 	if (status != 0xffffffff)
758 		ret = true;
759 
760 	return ret;
761 }
762 EXPORT_SYMBOL_NS(hda_dsp_check_stream_irq, "SND_SOC_SOF_INTEL_HDA_COMMON");
763 
764 static void
765 hda_dsp_compr_bytes_transferred(struct hdac_stream *hstream, int direction)
766 {
767 	u64 buffer_size = hstream->bufsize;
768 	u64 prev_pos, pos, num_bytes;
769 
770 	div64_u64_rem(hstream->curr_pos, buffer_size, &prev_pos);
771 	pos = hda_dsp_stream_get_position(hstream, direction, false);
772 
773 	if (pos < prev_pos)
774 		num_bytes = (buffer_size - prev_pos) +  pos;
775 	else
776 		num_bytes = pos - prev_pos;
777 
778 	hstream->curr_pos += num_bytes;
779 }
780 
781 static bool hda_dsp_stream_check(struct hdac_bus *bus, u32 status)
782 {
783 	struct sof_intel_hda_dev *sof_hda = bus_to_sof_hda(bus);
784 	struct hdac_stream *s;
785 	bool active = false;
786 	u32 sd_status;
787 
788 	list_for_each_entry(s, &bus->stream_list, list) {
789 		if (status & BIT(s->index) && s->opened) {
790 			sd_status = readb(s->sd_addr + SOF_HDA_ADSP_REG_SD_STS);
791 
792 			trace_sof_intel_hda_dsp_stream_status(bus->dev, s, sd_status);
793 
794 			writeb(sd_status, s->sd_addr + SOF_HDA_ADSP_REG_SD_STS);
795 
796 			active = true;
797 			if (!s->running)
798 				continue;
799 			if ((sd_status & SOF_HDA_CL_DMA_SD_INT_COMPLETE) == 0)
800 				continue;
801 			if (!s->substream && !s->cstream) {
802 				/*
803 				 * when no substream is found, the DMA may used for code loading
804 				 * or data transfers which can rely on wait_for_completion()
805 				 */
806 				struct sof_intel_hda_stream *hda_stream;
807 				struct hdac_ext_stream *hext_stream;
808 
809 				hext_stream = stream_to_hdac_ext_stream(s);
810 				hda_stream = container_of(hext_stream, struct sof_intel_hda_stream,
811 							  hext_stream);
812 
813 				complete(&hda_stream->ioc);
814 				continue;
815 			}
816 
817 			/* Inform ALSA only if the IPC position is not used */
818 			if (s->substream && sof_hda->no_ipc_position) {
819 				snd_sof_pcm_period_elapsed(s->substream);
820 			} else if (s->cstream) {
821 				hda_dsp_compr_bytes_transferred(s, s->cstream->direction);
822 				snd_compr_fragment_elapsed(s->cstream);
823 			}
824 		}
825 	}
826 
827 	return active;
828 }
829 
830 irqreturn_t hda_dsp_stream_threaded_handler(int irq, void *context)
831 {
832 	struct snd_sof_dev *sdev = context;
833 	struct hdac_bus *bus = sof_to_bus(sdev);
834 	bool active;
835 	u32 status;
836 	int i;
837 
838 	/*
839 	 * Loop 10 times to handle missed interrupts caused by
840 	 * unsolicited responses from the codec
841 	 */
842 	for (i = 0, active = true; i < 10 && active; i++) {
843 		guard(spinlock_irq)(&bus->reg_lock);
844 
845 		status = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, SOF_HDA_INTSTS);
846 
847 		/* check streams */
848 		active = hda_dsp_stream_check(bus, status);
849 
850 		/* check and clear RIRB interrupt */
851 		if (status & AZX_INT_CTRL_EN) {
852 			active |= hda_codec_check_rirb_status(sdev);
853 		}
854 	}
855 
856 	return IRQ_HANDLED;
857 }
858 EXPORT_SYMBOL_NS(hda_dsp_stream_threaded_handler, "SND_SOC_SOF_INTEL_HDA_COMMON");
859 
860 int hda_dsp_stream_init(struct snd_sof_dev *sdev)
861 {
862 	struct hdac_bus *bus = sof_to_bus(sdev);
863 	struct hdac_ext_stream *hext_stream;
864 	struct hdac_stream *hstream;
865 	struct pci_dev *pci = to_pci_dev(sdev->dev);
866 	struct sof_intel_hda_dev *sof_hda = bus_to_sof_hda(bus);
867 	int sd_offset;
868 	int i, num_playback, num_capture, num_total, ret;
869 	u32 gcap;
870 
871 	gcap = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, SOF_HDA_GCAP);
872 	dev_dbg(sdev->dev, "hda global caps = 0x%x\n", gcap);
873 
874 	/* get stream count from GCAP */
875 	num_capture = (gcap >> 8) & 0x0f;
876 	num_playback = (gcap >> 12) & 0x0f;
877 	num_total = num_playback + num_capture;
878 
879 	dev_dbg(sdev->dev, "detected %d playback and %d capture streams\n",
880 		num_playback, num_capture);
881 
882 	if (num_playback >= SOF_HDA_PLAYBACK_STREAMS) {
883 		dev_err(sdev->dev, "error: too many playback streams %d\n",
884 			num_playback);
885 		return -EINVAL;
886 	}
887 
888 	if (num_capture >= SOF_HDA_CAPTURE_STREAMS) {
889 		dev_err(sdev->dev, "error: too many capture streams %d\n",
890 			num_capture);
891 		return -EINVAL;
892 	}
893 
894 	/*
895 	 * mem alloc for the position buffer
896 	 * TODO: check position buffer update
897 	 */
898 	ret = snd_dma_alloc_pages(SNDRV_DMA_TYPE_DEV, &pci->dev,
899 				  SOF_HDA_DPIB_ENTRY_SIZE * num_total,
900 				  &bus->posbuf);
901 	if (ret < 0) {
902 		dev_err(sdev->dev, "error: posbuffer dma alloc failed\n");
903 		return -ENOMEM;
904 	}
905 
906 	/*
907 	 * mem alloc for the CORB/RIRB ringbuffers - this will be used only for
908 	 * HDAudio codecs
909 	 */
910 	ret = snd_dma_alloc_pages(SNDRV_DMA_TYPE_DEV, &pci->dev,
911 				  PAGE_SIZE, &bus->rb);
912 	if (ret < 0) {
913 		dev_err(sdev->dev, "error: RB alloc failed\n");
914 		return -ENOMEM;
915 	}
916 
917 	/* create capture and playback streams */
918 	for (i = 0; i < num_total; i++) {
919 		struct sof_intel_hda_stream *hda_stream;
920 
921 		hda_stream = devm_kzalloc(sdev->dev, sizeof(*hda_stream),
922 					  GFP_KERNEL);
923 		if (!hda_stream)
924 			return -ENOMEM;
925 
926 		hda_stream->sdev = sdev;
927 		init_completion(&hda_stream->ioc);
928 
929 		hext_stream = &hda_stream->hext_stream;
930 
931 		if (sdev->bar[HDA_DSP_PP_BAR]) {
932 			hext_stream->pphc_addr = sdev->bar[HDA_DSP_PP_BAR] +
933 				SOF_HDA_PPHC_BASE + SOF_HDA_PPHC_INTERVAL * i;
934 
935 			hext_stream->pplc_addr = sdev->bar[HDA_DSP_PP_BAR] +
936 				SOF_HDA_PPLC_BASE + SOF_HDA_PPLC_MULTI * num_total +
937 				SOF_HDA_PPLC_INTERVAL * i;
938 		}
939 
940 		hstream = &hext_stream->hstream;
941 
942 		/* do we support SPIB */
943 		if (sdev->bar[HDA_DSP_SPIB_BAR]) {
944 			hstream->spib_addr = sdev->bar[HDA_DSP_SPIB_BAR] +
945 				SOF_HDA_SPIB_BASE + SOF_HDA_SPIB_INTERVAL * i +
946 				SOF_HDA_SPIB_SPIB;
947 
948 			hstream->fifo_addr = sdev->bar[HDA_DSP_SPIB_BAR] +
949 				SOF_HDA_SPIB_BASE + SOF_HDA_SPIB_INTERVAL * i +
950 				SOF_HDA_SPIB_MAXFIFO;
951 		}
952 
953 		hstream->bus = bus;
954 		hstream->sd_int_sta_mask = 1 << i;
955 		hstream->index = i;
956 		sd_offset = SOF_STREAM_SD_OFFSET(hstream);
957 		hstream->sd_addr = sdev->bar[HDA_DSP_HDA_BAR] + sd_offset;
958 		hstream->opened = false;
959 		hstream->running = false;
960 
961 		if (i < num_capture) {
962 			hstream->stream_tag = i + 1;
963 			hstream->direction = SNDRV_PCM_STREAM_CAPTURE;
964 		} else {
965 			hstream->stream_tag = i - num_capture + 1;
966 			hstream->direction = SNDRV_PCM_STREAM_PLAYBACK;
967 		}
968 
969 		/* mem alloc for stream BDL */
970 		ret = snd_dma_alloc_pages(SNDRV_DMA_TYPE_DEV, &pci->dev,
971 					  HDA_DSP_BDL_SIZE, &hstream->bdl);
972 		if (ret < 0) {
973 			dev_err(sdev->dev, "error: stream bdl dma alloc failed\n");
974 			return -ENOMEM;
975 		}
976 
977 		hstream->posbuf = (__le32 *)(bus->posbuf.area +
978 			(hstream->index) * 8);
979 
980 		list_add_tail(&hstream->list, &bus->stream_list);
981 	}
982 
983 	/* store total stream count (playback + capture) from GCAP */
984 	sof_hda->stream_max = num_total;
985 
986 	/* store stream count from GCAP required for CHAIN_DMA */
987 	if (sdev->pdata->ipc_type == SOF_IPC_TYPE_4) {
988 		struct sof_ipc4_fw_data *ipc4_data = sdev->private;
989 
990 		ipc4_data->num_playback_streams = num_playback;
991 		ipc4_data->num_capture_streams = num_capture;
992 	}
993 
994 	return 0;
995 }
996 EXPORT_SYMBOL_NS(hda_dsp_stream_init, "SND_SOC_SOF_INTEL_HDA_COMMON");
997 
998 void hda_dsp_stream_free(struct snd_sof_dev *sdev)
999 {
1000 	struct hdac_bus *bus = sof_to_bus(sdev);
1001 	struct hdac_stream *s, *_s;
1002 	struct hdac_ext_stream *hext_stream;
1003 	struct sof_intel_hda_stream *hda_stream;
1004 
1005 	/* free position buffer */
1006 	if (bus->posbuf.area)
1007 		snd_dma_free_pages(&bus->posbuf);
1008 
1009 	/* free CORB/RIRB buffer - only used for HDaudio codecs */
1010 	if (bus->rb.area)
1011 		snd_dma_free_pages(&bus->rb);
1012 
1013 	list_for_each_entry_safe(s, _s, &bus->stream_list, list) {
1014 		/* TODO: decouple */
1015 
1016 		/* free bdl buffer */
1017 		if (s->bdl.area)
1018 			snd_dma_free_pages(&s->bdl);
1019 		list_del(&s->list);
1020 		hext_stream = stream_to_hdac_ext_stream(s);
1021 		hda_stream = container_of(hext_stream, struct sof_intel_hda_stream,
1022 					  hext_stream);
1023 		devm_kfree(sdev->dev, hda_stream);
1024 	}
1025 }
1026 EXPORT_SYMBOL_NS(hda_dsp_stream_free, "SND_SOC_SOF_INTEL_HDA_COMMON");
1027 
1028 snd_pcm_uframes_t hda_dsp_stream_get_position(struct hdac_stream *hstream,
1029 					      int direction, bool can_sleep)
1030 {
1031 	struct hdac_ext_stream *hext_stream = stream_to_hdac_ext_stream(hstream);
1032 	struct sof_intel_hda_stream *hda_stream = hstream_to_sof_hda_stream(hext_stream);
1033 	struct snd_sof_dev *sdev = hda_stream->sdev;
1034 	snd_pcm_uframes_t pos;
1035 
1036 	switch (sof_hda_position_quirk) {
1037 	case SOF_HDA_POSITION_QUIRK_USE_SKYLAKE_LEGACY:
1038 		/*
1039 		 * This legacy code, inherited from the Skylake driver,
1040 		 * mixes DPIB registers and DPIB DDR updates and
1041 		 * does not seem to follow any known hardware recommendations.
1042 		 * It's not clear e.g. why there is a different flow
1043 		 * for capture and playback, the only information that matters is
1044 		 * what traffic class is used, and on all SOF-enabled platforms
1045 		 * only VC0 is supported so the work-around was likely not necessary
1046 		 * and quite possibly wrong.
1047 		 */
1048 
1049 		/* DPIB/posbuf position mode:
1050 		 * For Playback, Use DPIB register from HDA space which
1051 		 * reflects the actual data transferred.
1052 		 * For Capture, Use the position buffer for pointer, as DPIB
1053 		 * is not accurate enough, its update may be completed
1054 		 * earlier than the data written to DDR.
1055 		 */
1056 		if (direction == SNDRV_PCM_STREAM_PLAYBACK) {
1057 			pos = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR,
1058 					       AZX_REG_VS_SDXDPIB_XBASE +
1059 					       (AZX_REG_VS_SDXDPIB_XINTERVAL *
1060 						hstream->index));
1061 		} else {
1062 			/*
1063 			 * For capture stream, we need more workaround to fix the
1064 			 * position incorrect issue:
1065 			 *
1066 			 * 1. Wait at least 20us before reading position buffer after
1067 			 * the interrupt generated(IOC), to make sure position update
1068 			 * happens on frame boundary i.e. 20.833uSec for 48KHz.
1069 			 * 2. Perform a dummy Read to DPIB register to flush DMA
1070 			 * position value.
1071 			 * 3. Read the DMA Position from posbuf. Now the readback
1072 			 * value should be >= period boundary.
1073 			 */
1074 			if (can_sleep)
1075 				usleep_range(20, 21);
1076 
1077 			snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR,
1078 					 AZX_REG_VS_SDXDPIB_XBASE +
1079 					 (AZX_REG_VS_SDXDPIB_XINTERVAL *
1080 					  hstream->index));
1081 			pos = snd_hdac_stream_get_pos_posbuf(hstream);
1082 		}
1083 		break;
1084 	case SOF_HDA_POSITION_QUIRK_USE_DPIB_REGISTERS:
1085 		/*
1086 		 * In case VC1 traffic is disabled this is the recommended option
1087 		 */
1088 		pos = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR,
1089 				       AZX_REG_VS_SDXDPIB_XBASE +
1090 				       (AZX_REG_VS_SDXDPIB_XINTERVAL *
1091 					hstream->index));
1092 		break;
1093 	case SOF_HDA_POSITION_QUIRK_USE_DPIB_DDR_UPDATE:
1094 		/*
1095 		 * This is the recommended option when VC1 is enabled.
1096 		 * While this isn't needed for SOF platforms it's added for
1097 		 * consistency and debug.
1098 		 */
1099 		pos = snd_hdac_stream_get_pos_posbuf(hstream);
1100 		break;
1101 	default:
1102 		dev_err_once(sdev->dev, "hda_position_quirk value %d not supported\n",
1103 			     sof_hda_position_quirk);
1104 		pos = 0;
1105 		break;
1106 	}
1107 
1108 	if (pos >= hstream->bufsize)
1109 		pos = 0;
1110 
1111 	return pos;
1112 }
1113 EXPORT_SYMBOL_NS(hda_dsp_stream_get_position, "SND_SOC_SOF_INTEL_HDA_COMMON");
1114 
1115 #define merge_u64(u32_u, u32_l) (((u64)(u32_u) << 32) | (u32_l))
1116 
1117 /**
1118  * hda_dsp_get_stream_llp - Retrieve the LLP (Linear Link Position) of the stream
1119  * @sdev: SOF device
1120  * @component: ASoC component
1121  * @substream: PCM substream
1122  *
1123  * Returns the raw Linear Link Position value
1124  */
1125 u64 hda_dsp_get_stream_llp(struct snd_sof_dev *sdev,
1126 			   struct snd_soc_component *component,
1127 			   struct snd_pcm_substream *substream)
1128 {
1129 	struct snd_soc_pcm_runtime *rtd = snd_soc_substream_to_rtd(substream);
1130 	struct snd_soc_pcm_runtime *be_rtd = NULL;
1131 	struct hdac_ext_stream *hext_stream;
1132 	struct snd_soc_dai *cpu_dai;
1133 	struct snd_soc_dpcm *dpcm;
1134 	u32 llp_l, llp_u;
1135 
1136 	/*
1137 	 * The LLP needs to be read from the Link DMA used for this FE as it is
1138 	 * allowed to use any combination of Link and Host channels
1139 	 */
1140 	for_each_dpcm_be(rtd, substream->stream, dpcm) {
1141 		if (dpcm->fe != rtd)
1142 			continue;
1143 
1144 		be_rtd = dpcm->be;
1145 	}
1146 
1147 	if (!be_rtd)
1148 		return 0;
1149 
1150 	cpu_dai = snd_soc_rtd_to_cpu(be_rtd, 0);
1151 	if (!cpu_dai)
1152 		return 0;
1153 
1154 	hext_stream = snd_soc_dai_get_dma_data(cpu_dai, substream);
1155 	if (!hext_stream)
1156 		return 0;
1157 
1158 	/*
1159 	 * The pplc_addr have been calculated during probe in
1160 	 * hda_dsp_stream_init():
1161 	 * pplc_addr = sdev->bar[HDA_DSP_PP_BAR] +
1162 	 *	       SOF_HDA_PPLC_BASE +
1163 	 *	       SOF_HDA_PPLC_MULTI * total_stream +
1164 	 *	       SOF_HDA_PPLC_INTERVAL * stream_index
1165 	 *
1166 	 * Use this pre-calculated address to avoid repeated re-calculation.
1167 	 */
1168 	llp_l = readl(hext_stream->pplc_addr + AZX_REG_PPLCLLPL);
1169 	llp_u = readl(hext_stream->pplc_addr + AZX_REG_PPLCLLPU);
1170 
1171 	/* Compensate the LLP counter with the saved offset */
1172 	if (hext_stream->pplcllpl || hext_stream->pplcllpu)
1173 		return merge_u64(llp_u, llp_l) -
1174 		       merge_u64(hext_stream->pplcllpu, hext_stream->pplcllpl);
1175 
1176 	return merge_u64(llp_u, llp_l);
1177 }
1178 EXPORT_SYMBOL_NS(hda_dsp_get_stream_llp, "SND_SOC_SOF_INTEL_HDA_COMMON");
1179 
1180 /**
1181  * hda_dsp_get_stream_ldp - Retrieve the LDP (Linear DMA Position) of the stream
1182  * @sdev: SOF device
1183  * @component: ASoC component
1184  * @substream: PCM substream
1185  *
1186  * Returns the raw Linear Link Position value
1187  */
1188 u64 hda_dsp_get_stream_ldp(struct snd_sof_dev *sdev,
1189 			   struct snd_soc_component *component,
1190 			   struct snd_pcm_substream *substream)
1191 {
1192 	struct hdac_stream *hstream = substream->runtime->private_data;
1193 	struct hdac_ext_stream *hext_stream = stream_to_hdac_ext_stream(hstream);
1194 	u32 ldp_l, ldp_u;
1195 
1196 	/*
1197 	 * The pphc_addr have been calculated during probe in
1198 	 * hda_dsp_stream_init():
1199 	 * pphc_addr = sdev->bar[HDA_DSP_PP_BAR] +
1200 	 *	       SOF_HDA_PPHC_BASE +
1201 	 *	       SOF_HDA_PPHC_INTERVAL * stream_index
1202 	 *
1203 	 * Use this pre-calculated address to avoid repeated re-calculation.
1204 	 */
1205 	ldp_l = readl(hext_stream->pphc_addr + AZX_REG_PPHCLDPL);
1206 	ldp_u = readl(hext_stream->pphc_addr + AZX_REG_PPHCLDPU);
1207 
1208 	return ((u64)ldp_u << 32) | ldp_l;
1209 }
1210 EXPORT_SYMBOL_NS(hda_dsp_get_stream_ldp, "SND_SOC_SOF_INTEL_HDA_COMMON");
1211