xref: /linux/sound/soc/sof/intel/hda-stream.c (revision 7255fcc80d4b525cc10cfaaf7f485830d4ed2000)
1 // SPDX-License-Identifier: (GPL-2.0-only OR BSD-3-Clause)
2 //
3 // This file is provided under a dual BSD/GPLv2 license.  When using or
4 // redistributing this file, you may do so under either license.
5 //
6 // Copyright(c) 2018 Intel Corporation. All rights reserved.
7 //
8 // Authors: Liam Girdwood <liam.r.girdwood@linux.intel.com>
9 //	    Ranjani Sridharan <ranjani.sridharan@linux.intel.com>
10 //	    Rander Wang <rander.wang@intel.com>
11 //          Keyon Jie <yang.jie@linux.intel.com>
12 //
13 
14 /*
15  * Hardware interface for generic Intel audio DSP HDA IP
16  */
17 
18 #include <sound/hdaudio_ext.h>
19 #include <sound/hda_register.h>
20 #include <sound/sof.h>
21 #include <trace/events/sof_intel.h>
22 #include "../ops.h"
23 #include "../sof-audio.h"
24 #include "../ipc4-priv.h"
25 #include "hda.h"
26 
27 #define HDA_LTRP_GB_VALUE_US	95
28 
29 static inline const char *hda_hstream_direction_str(struct hdac_stream *hstream)
30 {
31 	if (hstream->direction == SNDRV_PCM_STREAM_PLAYBACK)
32 		return "Playback";
33 	else
34 		return "Capture";
35 }
36 
37 static char *hda_hstream_dbg_get_stream_info_str(struct hdac_stream *hstream)
38 {
39 	struct snd_soc_pcm_runtime *rtd;
40 
41 	if (hstream->substream)
42 		rtd = snd_soc_substream_to_rtd(hstream->substream);
43 	else if (hstream->cstream)
44 		rtd = hstream->cstream->private_data;
45 	else
46 		/* Non audio DMA user, like dma-trace */
47 		return kasprintf(GFP_KERNEL, "-- (%s, stream_tag: %u)",
48 				 hda_hstream_direction_str(hstream),
49 				 hstream->stream_tag);
50 
51 	return kasprintf(GFP_KERNEL, "dai_link \"%s\" (%s, stream_tag: %u)",
52 			 rtd->dai_link->name, hda_hstream_direction_str(hstream),
53 			 hstream->stream_tag);
54 }
55 
56 /*
57  * set up one of BDL entries for a stream
58  */
59 static int hda_setup_bdle(struct snd_sof_dev *sdev,
60 			  struct snd_dma_buffer *dmab,
61 			  struct hdac_stream *hstream,
62 			  struct sof_intel_dsp_bdl **bdlp,
63 			  int offset, int size, int ioc)
64 {
65 	struct hdac_bus *bus = sof_to_bus(sdev);
66 	struct sof_intel_dsp_bdl *bdl = *bdlp;
67 
68 	while (size > 0) {
69 		dma_addr_t addr;
70 		int chunk;
71 
72 		if (hstream->frags >= HDA_DSP_MAX_BDL_ENTRIES) {
73 			dev_err(sdev->dev, "error: stream frags exceeded\n");
74 			return -EINVAL;
75 		}
76 
77 		addr = snd_sgbuf_get_addr(dmab, offset);
78 		/* program BDL addr */
79 		bdl->addr_l = cpu_to_le32(lower_32_bits(addr));
80 		bdl->addr_h = cpu_to_le32(upper_32_bits(addr));
81 		/* program BDL size */
82 		chunk = snd_sgbuf_get_chunk_size(dmab, offset, size);
83 		/* one BDLE should not cross 4K boundary */
84 		if (bus->align_bdle_4k) {
85 			u32 remain = 0x1000 - (offset & 0xfff);
86 
87 			if (chunk > remain)
88 				chunk = remain;
89 		}
90 		bdl->size = cpu_to_le32(chunk);
91 		/* only program IOC when the whole segment is processed */
92 		size -= chunk;
93 		bdl->ioc = (size || !ioc) ? 0 : cpu_to_le32(0x01);
94 		bdl++;
95 		hstream->frags++;
96 		offset += chunk;
97 	}
98 
99 	*bdlp = bdl;
100 	return offset;
101 }
102 
103 /*
104  * set up Buffer Descriptor List (BDL) for host memory transfer
105  * BDL describes the location of the individual buffers and is little endian.
106  */
107 int hda_dsp_stream_setup_bdl(struct snd_sof_dev *sdev,
108 			     struct snd_dma_buffer *dmab,
109 			     struct hdac_stream *hstream)
110 {
111 	struct sof_intel_hda_dev *hda = sdev->pdata->hw_pdata;
112 	struct sof_intel_dsp_bdl *bdl;
113 	int i, offset, period_bytes, periods;
114 	int remain, ioc;
115 
116 	period_bytes = hstream->period_bytes;
117 	dev_dbg(sdev->dev, "period_bytes:0x%x\n", period_bytes);
118 	if (!period_bytes)
119 		period_bytes = hstream->bufsize;
120 
121 	periods = hstream->bufsize / period_bytes;
122 
123 	dev_dbg(sdev->dev, "periods:%d\n", periods);
124 
125 	remain = hstream->bufsize % period_bytes;
126 	if (remain)
127 		periods++;
128 
129 	/* program the initial BDL entries */
130 	bdl = (struct sof_intel_dsp_bdl *)hstream->bdl.area;
131 	offset = 0;
132 	hstream->frags = 0;
133 
134 	/*
135 	 * set IOC if don't use position IPC
136 	 * and period_wakeup needed.
137 	 */
138 	ioc = hda->no_ipc_position ?
139 	      !hstream->no_period_wakeup : 0;
140 
141 	for (i = 0; i < periods; i++) {
142 		if (i == (periods - 1) && remain)
143 			/* set the last small entry */
144 			offset = hda_setup_bdle(sdev, dmab,
145 						hstream, &bdl, offset,
146 						remain, 0);
147 		else
148 			offset = hda_setup_bdle(sdev, dmab,
149 						hstream, &bdl, offset,
150 						period_bytes, ioc);
151 	}
152 
153 	return offset;
154 }
155 
156 int hda_dsp_stream_spib_config(struct snd_sof_dev *sdev,
157 			       struct hdac_ext_stream *hext_stream,
158 			       int enable, u32 size)
159 {
160 	struct hdac_stream *hstream = &hext_stream->hstream;
161 	u32 mask;
162 
163 	if (!sdev->bar[HDA_DSP_SPIB_BAR]) {
164 		dev_err(sdev->dev, "error: address of spib capability is NULL\n");
165 		return -EINVAL;
166 	}
167 
168 	mask = (1 << hstream->index);
169 
170 	/* enable/disable SPIB for the stream */
171 	snd_sof_dsp_update_bits(sdev, HDA_DSP_SPIB_BAR,
172 				SOF_HDA_ADSP_REG_CL_SPBFIFO_SPBFCCTL, mask,
173 				enable << hstream->index);
174 
175 	/* set the SPIB value */
176 	sof_io_write(sdev, hstream->spib_addr, size);
177 
178 	return 0;
179 }
180 
181 /* get next unused stream */
182 struct hdac_ext_stream *
183 hda_dsp_stream_get(struct snd_sof_dev *sdev, int direction, u32 flags)
184 {
185 	const struct sof_intel_dsp_desc *chip_info =  get_chip_info(sdev->pdata);
186 	struct sof_intel_hda_dev *hda = sdev->pdata->hw_pdata;
187 	struct hdac_bus *bus = sof_to_bus(sdev);
188 	struct sof_intel_hda_stream *hda_stream;
189 	struct hdac_ext_stream *hext_stream = NULL;
190 	struct hdac_stream *s;
191 
192 	spin_lock_irq(&bus->reg_lock);
193 
194 	/* get an unused stream */
195 	list_for_each_entry(s, &bus->stream_list, list) {
196 		if (s->direction == direction && !s->opened) {
197 			hext_stream = stream_to_hdac_ext_stream(s);
198 			hda_stream = container_of(hext_stream,
199 						  struct sof_intel_hda_stream,
200 						  hext_stream);
201 			/* check if the host DMA channel is reserved */
202 			if (hda_stream->host_reserved)
203 				continue;
204 
205 			s->opened = true;
206 			break;
207 		}
208 	}
209 
210 	spin_unlock_irq(&bus->reg_lock);
211 
212 	/* stream found ? */
213 	if (!hext_stream) {
214 		dev_err(sdev->dev, "error: no free %s streams\n",
215 			direction == SNDRV_PCM_STREAM_PLAYBACK ?
216 			"playback" : "capture");
217 		return hext_stream;
218 	}
219 
220 	hda_stream->flags = flags;
221 
222 	/*
223 	 * Prevent DMI Link L1 entry for streams that don't support it.
224 	 * Workaround to address a known issue with host DMA that results
225 	 * in xruns during pause/release in capture scenarios. This is not needed for the ACE IP.
226 	 */
227 	if (chip_info->hw_ip_version < SOF_INTEL_ACE_1_0 &&
228 	    !(flags & SOF_HDA_STREAM_DMI_L1_COMPATIBLE)) {
229 		snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
230 					HDA_VS_INTEL_EM2,
231 					HDA_VS_INTEL_EM2_L1SEN, 0);
232 		hda->l1_disabled = true;
233 	}
234 
235 	return hext_stream;
236 }
237 
238 /* free a stream */
239 int hda_dsp_stream_put(struct snd_sof_dev *sdev, int direction, int stream_tag)
240 {
241 	const struct sof_intel_dsp_desc *chip_info =  get_chip_info(sdev->pdata);
242 	struct sof_intel_hda_dev *hda = sdev->pdata->hw_pdata;
243 	struct hdac_bus *bus = sof_to_bus(sdev);
244 	struct sof_intel_hda_stream *hda_stream;
245 	struct hdac_ext_stream *hext_stream;
246 	struct hdac_stream *s;
247 	bool dmi_l1_enable = true;
248 	bool found = false;
249 
250 	spin_lock_irq(&bus->reg_lock);
251 
252 	/*
253 	 * close stream matching the stream tag and check if there are any open streams
254 	 * that are DMI L1 incompatible.
255 	 */
256 	list_for_each_entry(s, &bus->stream_list, list) {
257 		hext_stream = stream_to_hdac_ext_stream(s);
258 		hda_stream = container_of(hext_stream, struct sof_intel_hda_stream, hext_stream);
259 
260 		if (!s->opened)
261 			continue;
262 
263 		if (s->direction == direction && s->stream_tag == stream_tag) {
264 			s->opened = false;
265 			found = true;
266 		} else if (!(hda_stream->flags & SOF_HDA_STREAM_DMI_L1_COMPATIBLE)) {
267 			dmi_l1_enable = false;
268 		}
269 	}
270 
271 	spin_unlock_irq(&bus->reg_lock);
272 
273 	/* Enable DMI L1 if permitted */
274 	if (chip_info->hw_ip_version < SOF_INTEL_ACE_1_0 && dmi_l1_enable) {
275 		snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, HDA_VS_INTEL_EM2,
276 					HDA_VS_INTEL_EM2_L1SEN, HDA_VS_INTEL_EM2_L1SEN);
277 		hda->l1_disabled = false;
278 	}
279 
280 	if (!found) {
281 		dev_err(sdev->dev, "%s: stream_tag %d not opened!\n",
282 			__func__, stream_tag);
283 		return -ENODEV;
284 	}
285 
286 	return 0;
287 }
288 
289 static int hda_dsp_stream_reset(struct snd_sof_dev *sdev, struct hdac_stream *hstream)
290 {
291 	int sd_offset = SOF_STREAM_SD_OFFSET(hstream);
292 	int timeout = HDA_DSP_STREAM_RESET_TIMEOUT;
293 	u32 val;
294 
295 	/* enter stream reset */
296 	snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset, SOF_STREAM_SD_OFFSET_CRST,
297 				SOF_STREAM_SD_OFFSET_CRST);
298 	do {
299 		val = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, sd_offset);
300 		if (val & SOF_STREAM_SD_OFFSET_CRST)
301 			break;
302 	} while (--timeout);
303 	if (timeout == 0) {
304 		dev_err(sdev->dev, "timeout waiting for stream reset\n");
305 		return -ETIMEDOUT;
306 	}
307 
308 	timeout = HDA_DSP_STREAM_RESET_TIMEOUT;
309 
310 	/* exit stream reset and wait to read a zero before reading any other register */
311 	snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset, SOF_STREAM_SD_OFFSET_CRST, 0x0);
312 
313 	/* wait for hardware to report that stream is out of reset */
314 	udelay(3);
315 	do {
316 		val = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, sd_offset);
317 		if ((val & SOF_STREAM_SD_OFFSET_CRST) == 0)
318 			break;
319 	} while (--timeout);
320 	if (timeout == 0) {
321 		dev_err(sdev->dev, "timeout waiting for stream to exit reset\n");
322 		return -ETIMEDOUT;
323 	}
324 
325 	return 0;
326 }
327 
328 int hda_dsp_stream_trigger(struct snd_sof_dev *sdev,
329 			   struct hdac_ext_stream *hext_stream, int cmd)
330 {
331 	struct hdac_stream *hstream = &hext_stream->hstream;
332 	int sd_offset = SOF_STREAM_SD_OFFSET(hstream);
333 	u32 dma_start = SOF_HDA_SD_CTL_DMA_START;
334 	int ret = 0;
335 	u32 run;
336 
337 	/* cmd must be for audio stream */
338 	switch (cmd) {
339 	case SNDRV_PCM_TRIGGER_PAUSE_RELEASE:
340 		if (!sdev->dspless_mode_selected)
341 			break;
342 		fallthrough;
343 	case SNDRV_PCM_TRIGGER_START:
344 		if (hstream->running)
345 			break;
346 
347 		snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, SOF_HDA_INTCTL,
348 					1 << hstream->index,
349 					1 << hstream->index);
350 
351 		snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
352 					sd_offset,
353 					SOF_HDA_SD_CTL_DMA_START |
354 					SOF_HDA_CL_DMA_SD_INT_MASK,
355 					SOF_HDA_SD_CTL_DMA_START |
356 					SOF_HDA_CL_DMA_SD_INT_MASK);
357 
358 		ret = snd_sof_dsp_read_poll_timeout(sdev,
359 					HDA_DSP_HDA_BAR,
360 					sd_offset, run,
361 					((run &	dma_start) == dma_start),
362 					HDA_DSP_REG_POLL_INTERVAL_US,
363 					HDA_DSP_STREAM_RUN_TIMEOUT);
364 
365 		if (ret >= 0)
366 			hstream->running = true;
367 
368 		break;
369 	case SNDRV_PCM_TRIGGER_PAUSE_PUSH:
370 		if (!sdev->dspless_mode_selected)
371 			break;
372 		fallthrough;
373 	case SNDRV_PCM_TRIGGER_SUSPEND:
374 	case SNDRV_PCM_TRIGGER_STOP:
375 		snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
376 					sd_offset,
377 					SOF_HDA_SD_CTL_DMA_START |
378 					SOF_HDA_CL_DMA_SD_INT_MASK, 0x0);
379 
380 		ret = snd_sof_dsp_read_poll_timeout(sdev, HDA_DSP_HDA_BAR,
381 						sd_offset, run,
382 						!(run &	dma_start),
383 						HDA_DSP_REG_POLL_INTERVAL_US,
384 						HDA_DSP_STREAM_RUN_TIMEOUT);
385 
386 		if (ret >= 0) {
387 			snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
388 					  sd_offset + SOF_HDA_ADSP_REG_SD_STS,
389 					  SOF_HDA_CL_DMA_SD_INT_MASK);
390 
391 			hstream->running = false;
392 			snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
393 						SOF_HDA_INTCTL,
394 						1 << hstream->index, 0x0);
395 		}
396 		break;
397 	default:
398 		dev_err(sdev->dev, "error: unknown command: %d\n", cmd);
399 		return -EINVAL;
400 	}
401 
402 	if (ret < 0) {
403 		char *stream_name = hda_hstream_dbg_get_stream_info_str(hstream);
404 
405 		dev_err(sdev->dev,
406 			"%s: cmd %d on %s: timeout on STREAM_SD_OFFSET read\n",
407 			__func__, cmd, stream_name ? stream_name : "unknown stream");
408 		kfree(stream_name);
409 	}
410 
411 	return ret;
412 }
413 
414 /* minimal recommended programming for ICCMAX stream */
415 int hda_dsp_iccmax_stream_hw_params(struct snd_sof_dev *sdev, struct hdac_ext_stream *hext_stream,
416 				    struct snd_dma_buffer *dmab,
417 				    struct snd_pcm_hw_params *params)
418 {
419 	struct hdac_stream *hstream = &hext_stream->hstream;
420 	int sd_offset = SOF_STREAM_SD_OFFSET(hstream);
421 	int ret;
422 	u32 mask = 0x1 << hstream->index;
423 
424 	if (!hext_stream) {
425 		dev_err(sdev->dev, "error: no stream available\n");
426 		return -ENODEV;
427 	}
428 
429 	if (!dmab) {
430 		dev_err(sdev->dev, "error: no dma buffer allocated!\n");
431 		return -ENODEV;
432 	}
433 
434 	if (hstream->posbuf)
435 		*hstream->posbuf = 0;
436 
437 	/* reset BDL address */
438 	snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
439 			  sd_offset + SOF_HDA_ADSP_REG_SD_BDLPL,
440 			  0x0);
441 	snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
442 			  sd_offset + SOF_HDA_ADSP_REG_SD_BDLPU,
443 			  0x0);
444 
445 	hstream->frags = 0;
446 
447 	ret = hda_dsp_stream_setup_bdl(sdev, dmab, hstream);
448 	if (ret < 0) {
449 		dev_err(sdev->dev, "error: set up of BDL failed\n");
450 		return ret;
451 	}
452 
453 	/* program BDL address */
454 	snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
455 			  sd_offset + SOF_HDA_ADSP_REG_SD_BDLPL,
456 			  (u32)hstream->bdl.addr);
457 	snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
458 			  sd_offset + SOF_HDA_ADSP_REG_SD_BDLPU,
459 			  upper_32_bits(hstream->bdl.addr));
460 
461 	/* program cyclic buffer length */
462 	snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
463 			  sd_offset + SOF_HDA_ADSP_REG_SD_CBL,
464 			  hstream->bufsize);
465 
466 	/* program last valid index */
467 	snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
468 				sd_offset + SOF_HDA_ADSP_REG_SD_LVI,
469 				0xffff, (hstream->frags - 1));
470 
471 	/* decouple host and link DMA, enable DSP features */
472 	snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR, SOF_HDA_REG_PP_PPCTL,
473 				mask, mask);
474 
475 	/* Follow HW recommendation to set the guardband value to 95us during FW boot */
476 	snd_sof_dsp_update8(sdev, HDA_DSP_HDA_BAR, HDA_VS_INTEL_LTRP,
477 			    HDA_VS_INTEL_LTRP_GB_MASK, HDA_LTRP_GB_VALUE_US);
478 
479 	/* start DMA */
480 	snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset,
481 				SOF_HDA_SD_CTL_DMA_START, SOF_HDA_SD_CTL_DMA_START);
482 
483 	return 0;
484 }
485 
486 /*
487  * prepare for common hdac registers settings, for both code loader
488  * and normal stream.
489  */
490 int hda_dsp_stream_hw_params(struct snd_sof_dev *sdev,
491 			     struct hdac_ext_stream *hext_stream,
492 			     struct snd_dma_buffer *dmab,
493 			     struct snd_pcm_hw_params *params)
494 {
495 	const struct sof_intel_dsp_desc *chip = get_chip_info(sdev->pdata);
496 	struct hdac_bus *bus = sof_to_bus(sdev);
497 	struct hdac_stream *hstream;
498 	int sd_offset, ret;
499 	u32 dma_start = SOF_HDA_SD_CTL_DMA_START;
500 	u32 mask;
501 	u32 run;
502 
503 	if (!hext_stream) {
504 		dev_err(sdev->dev, "error: no stream available\n");
505 		return -ENODEV;
506 	}
507 
508 	if (!dmab) {
509 		dev_err(sdev->dev, "error: no dma buffer allocated!\n");
510 		return -ENODEV;
511 	}
512 
513 	hstream = &hext_stream->hstream;
514 	sd_offset = SOF_STREAM_SD_OFFSET(hstream);
515 	mask = BIT(hstream->index);
516 
517 	/* decouple host and link DMA if the DSP is used */
518 	if (!sdev->dspless_mode_selected)
519 		snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR, SOF_HDA_REG_PP_PPCTL,
520 					mask, mask);
521 
522 	/* clear stream status */
523 	snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset,
524 				SOF_HDA_CL_DMA_SD_INT_MASK |
525 				SOF_HDA_SD_CTL_DMA_START, 0);
526 
527 	ret = snd_sof_dsp_read_poll_timeout(sdev, HDA_DSP_HDA_BAR,
528 					    sd_offset, run,
529 					    !(run & dma_start),
530 					    HDA_DSP_REG_POLL_INTERVAL_US,
531 					    HDA_DSP_STREAM_RUN_TIMEOUT);
532 
533 	if (ret < 0) {
534 		char *stream_name = hda_hstream_dbg_get_stream_info_str(hstream);
535 
536 		dev_err(sdev->dev,
537 			"%s: on %s: timeout on STREAM_SD_OFFSET read1\n",
538 			__func__, stream_name ? stream_name : "unknown stream");
539 		kfree(stream_name);
540 		return ret;
541 	}
542 
543 	snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
544 				sd_offset + SOF_HDA_ADSP_REG_SD_STS,
545 				SOF_HDA_CL_DMA_SD_INT_MASK,
546 				SOF_HDA_CL_DMA_SD_INT_MASK);
547 
548 	/* stream reset */
549 	ret = hda_dsp_stream_reset(sdev, hstream);
550 	if (ret < 0)
551 		return ret;
552 
553 	if (hstream->posbuf)
554 		*hstream->posbuf = 0;
555 
556 	/* reset BDL address */
557 	snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
558 			  sd_offset + SOF_HDA_ADSP_REG_SD_BDLPL,
559 			  0x0);
560 	snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
561 			  sd_offset + SOF_HDA_ADSP_REG_SD_BDLPU,
562 			  0x0);
563 
564 	/* clear stream status */
565 	snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset,
566 				SOF_HDA_CL_DMA_SD_INT_MASK |
567 				SOF_HDA_SD_CTL_DMA_START, 0);
568 
569 	ret = snd_sof_dsp_read_poll_timeout(sdev, HDA_DSP_HDA_BAR,
570 					    sd_offset, run,
571 					    !(run & dma_start),
572 					    HDA_DSP_REG_POLL_INTERVAL_US,
573 					    HDA_DSP_STREAM_RUN_TIMEOUT);
574 
575 	if (ret < 0) {
576 		char *stream_name = hda_hstream_dbg_get_stream_info_str(hstream);
577 
578 		dev_err(sdev->dev,
579 			"%s: on %s: timeout on STREAM_SD_OFFSET read1\n",
580 			__func__, stream_name ? stream_name : "unknown stream");
581 		kfree(stream_name);
582 		return ret;
583 	}
584 
585 	snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
586 				sd_offset + SOF_HDA_ADSP_REG_SD_STS,
587 				SOF_HDA_CL_DMA_SD_INT_MASK,
588 				SOF_HDA_CL_DMA_SD_INT_MASK);
589 
590 	hstream->frags = 0;
591 
592 	ret = hda_dsp_stream_setup_bdl(sdev, dmab, hstream);
593 	if (ret < 0) {
594 		dev_err(sdev->dev, "error: set up of BDL failed\n");
595 		return ret;
596 	}
597 
598 	/* program stream tag to set up stream descriptor for DMA */
599 	snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset,
600 				SOF_HDA_CL_SD_CTL_STREAM_TAG_MASK,
601 				hstream->stream_tag <<
602 				SOF_HDA_CL_SD_CTL_STREAM_TAG_SHIFT);
603 
604 	/* program cyclic buffer length */
605 	snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
606 			  sd_offset + SOF_HDA_ADSP_REG_SD_CBL,
607 			  hstream->bufsize);
608 
609 	/*
610 	 * Recommended hardware programming sequence for HDAudio DMA format
611 	 * on earlier platforms - this is not needed on newer platforms
612 	 *
613 	 * 1. Put DMA into coupled mode by clearing PPCTL.PROCEN bit
614 	 *    for corresponding stream index before the time of writing
615 	 *    format to SDxFMT register.
616 	 * 2. Write SDxFMT
617 	 * 3. Set PPCTL.PROCEN bit for corresponding stream index to
618 	 *    enable decoupled mode
619 	 */
620 
621 	if (!sdev->dspless_mode_selected && (chip->quirks & SOF_INTEL_PROCEN_FMT_QUIRK))
622 		/* couple host and link DMA, disable DSP features */
623 		snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR, SOF_HDA_REG_PP_PPCTL,
624 					mask, 0);
625 
626 	/* program stream format */
627 	snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
628 				sd_offset +
629 				SOF_HDA_ADSP_REG_SD_FORMAT,
630 				0xffff, hstream->format_val);
631 
632 	if (!sdev->dspless_mode_selected && (chip->quirks & SOF_INTEL_PROCEN_FMT_QUIRK))
633 		/* decouple host and link DMA, enable DSP features */
634 		snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR, SOF_HDA_REG_PP_PPCTL,
635 					mask, mask);
636 
637 	/* program last valid index */
638 	snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
639 				sd_offset + SOF_HDA_ADSP_REG_SD_LVI,
640 				0xffff, (hstream->frags - 1));
641 
642 	/* program BDL address */
643 	snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
644 			  sd_offset + SOF_HDA_ADSP_REG_SD_BDLPL,
645 			  (u32)hstream->bdl.addr);
646 	snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
647 			  sd_offset + SOF_HDA_ADSP_REG_SD_BDLPU,
648 			  upper_32_bits(hstream->bdl.addr));
649 
650 	/* enable position buffer, if needed */
651 	if (bus->use_posbuf && bus->posbuf.addr &&
652 	    !(snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, SOF_HDA_ADSP_DPLBASE)
653 	      & SOF_HDA_ADSP_DPLBASE_ENABLE)) {
654 		snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR, SOF_HDA_ADSP_DPUBASE,
655 				  upper_32_bits(bus->posbuf.addr));
656 		snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR, SOF_HDA_ADSP_DPLBASE,
657 				  (u32)bus->posbuf.addr |
658 				  SOF_HDA_ADSP_DPLBASE_ENABLE);
659 	}
660 
661 	/* set interrupt enable bits */
662 	snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset,
663 				SOF_HDA_CL_DMA_SD_INT_MASK,
664 				SOF_HDA_CL_DMA_SD_INT_MASK);
665 
666 	/* read FIFO size */
667 	if (hstream->direction == SNDRV_PCM_STREAM_PLAYBACK) {
668 		hstream->fifo_size =
669 			snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR,
670 					 sd_offset +
671 					 SOF_HDA_ADSP_REG_SD_FIFOSIZE);
672 		hstream->fifo_size &= SOF_HDA_SD_FIFOSIZE_FIFOS_MASK;
673 		hstream->fifo_size += 1;
674 	} else {
675 		hstream->fifo_size = 0;
676 	}
677 
678 	return ret;
679 }
680 
681 int hda_dsp_stream_hw_free(struct snd_sof_dev *sdev,
682 			   struct snd_pcm_substream *substream)
683 {
684 	struct hdac_stream *hstream = substream->runtime->private_data;
685 	struct hdac_ext_stream *hext_stream = container_of(hstream,
686 							 struct hdac_ext_stream,
687 							 hstream);
688 	int ret;
689 
690 	ret = hda_dsp_stream_reset(sdev, hstream);
691 	if (ret < 0)
692 		return ret;
693 
694 	if (!sdev->dspless_mode_selected) {
695 		struct hdac_bus *bus = sof_to_bus(sdev);
696 		u32 mask = BIT(hstream->index);
697 
698 		spin_lock_irq(&bus->reg_lock);
699 		/* couple host and link DMA if link DMA channel is idle */
700 		if (!hext_stream->link_locked)
701 			snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR,
702 						SOF_HDA_REG_PP_PPCTL, mask, 0);
703 		spin_unlock_irq(&bus->reg_lock);
704 	}
705 
706 	hda_dsp_stream_spib_config(sdev, hext_stream, HDA_DSP_SPIB_DISABLE, 0);
707 
708 	hstream->substream = NULL;
709 
710 	return 0;
711 }
712 
713 bool hda_dsp_check_stream_irq(struct snd_sof_dev *sdev)
714 {
715 	struct hdac_bus *bus = sof_to_bus(sdev);
716 	bool ret = false;
717 	u32 status;
718 
719 	/* The function can be called at irq thread, so use spin_lock_irq */
720 	spin_lock_irq(&bus->reg_lock);
721 
722 	status = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, SOF_HDA_INTSTS);
723 
724 	trace_sof_intel_hda_dsp_check_stream_irq(sdev, status);
725 
726 	/* if Register inaccessible, ignore it.*/
727 	if (status != 0xffffffff)
728 		ret = true;
729 
730 	spin_unlock_irq(&bus->reg_lock);
731 
732 	return ret;
733 }
734 
735 static void
736 hda_dsp_compr_bytes_transferred(struct hdac_stream *hstream, int direction)
737 {
738 	u64 buffer_size = hstream->bufsize;
739 	u64 prev_pos, pos, num_bytes;
740 
741 	div64_u64_rem(hstream->curr_pos, buffer_size, &prev_pos);
742 	pos = hda_dsp_stream_get_position(hstream, direction, false);
743 
744 	if (pos < prev_pos)
745 		num_bytes = (buffer_size - prev_pos) +  pos;
746 	else
747 		num_bytes = pos - prev_pos;
748 
749 	hstream->curr_pos += num_bytes;
750 }
751 
752 static bool hda_dsp_stream_check(struct hdac_bus *bus, u32 status)
753 {
754 	struct sof_intel_hda_dev *sof_hda = bus_to_sof_hda(bus);
755 	struct hdac_stream *s;
756 	bool active = false;
757 	u32 sd_status;
758 
759 	list_for_each_entry(s, &bus->stream_list, list) {
760 		if (status & BIT(s->index) && s->opened) {
761 			sd_status = readb(s->sd_addr + SOF_HDA_ADSP_REG_SD_STS);
762 
763 			trace_sof_intel_hda_dsp_stream_status(bus->dev, s, sd_status);
764 
765 			writeb(sd_status, s->sd_addr + SOF_HDA_ADSP_REG_SD_STS);
766 
767 			active = true;
768 			if ((!s->substream && !s->cstream) ||
769 			    !s->running ||
770 			    (sd_status & SOF_HDA_CL_DMA_SD_INT_COMPLETE) == 0)
771 				continue;
772 
773 			/* Inform ALSA only in case not do that with IPC */
774 			if (s->substream && sof_hda->no_ipc_position) {
775 				snd_sof_pcm_period_elapsed(s->substream);
776 			} else if (s->cstream) {
777 				hda_dsp_compr_bytes_transferred(s, s->cstream->direction);
778 				snd_compr_fragment_elapsed(s->cstream);
779 			}
780 		}
781 	}
782 
783 	return active;
784 }
785 
786 irqreturn_t hda_dsp_stream_threaded_handler(int irq, void *context)
787 {
788 	struct snd_sof_dev *sdev = context;
789 	struct hdac_bus *bus = sof_to_bus(sdev);
790 	bool active;
791 	u32 status;
792 	int i;
793 
794 	/*
795 	 * Loop 10 times to handle missed interrupts caused by
796 	 * unsolicited responses from the codec
797 	 */
798 	for (i = 0, active = true; i < 10 && active; i++) {
799 		spin_lock_irq(&bus->reg_lock);
800 
801 		status = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, SOF_HDA_INTSTS);
802 
803 		/* check streams */
804 		active = hda_dsp_stream_check(bus, status);
805 
806 		/* check and clear RIRB interrupt */
807 		if (status & AZX_INT_CTRL_EN) {
808 			active |= hda_codec_check_rirb_status(sdev);
809 		}
810 		spin_unlock_irq(&bus->reg_lock);
811 	}
812 
813 	return IRQ_HANDLED;
814 }
815 
816 int hda_dsp_stream_init(struct snd_sof_dev *sdev)
817 {
818 	struct hdac_bus *bus = sof_to_bus(sdev);
819 	struct hdac_ext_stream *hext_stream;
820 	struct hdac_stream *hstream;
821 	struct pci_dev *pci = to_pci_dev(sdev->dev);
822 	struct sof_intel_hda_dev *sof_hda = bus_to_sof_hda(bus);
823 	int sd_offset;
824 	int i, num_playback, num_capture, num_total, ret;
825 	u32 gcap;
826 
827 	gcap = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, SOF_HDA_GCAP);
828 	dev_dbg(sdev->dev, "hda global caps = 0x%x\n", gcap);
829 
830 	/* get stream count from GCAP */
831 	num_capture = (gcap >> 8) & 0x0f;
832 	num_playback = (gcap >> 12) & 0x0f;
833 	num_total = num_playback + num_capture;
834 
835 	dev_dbg(sdev->dev, "detected %d playback and %d capture streams\n",
836 		num_playback, num_capture);
837 
838 	if (num_playback >= SOF_HDA_PLAYBACK_STREAMS) {
839 		dev_err(sdev->dev, "error: too many playback streams %d\n",
840 			num_playback);
841 		return -EINVAL;
842 	}
843 
844 	if (num_capture >= SOF_HDA_CAPTURE_STREAMS) {
845 		dev_err(sdev->dev, "error: too many capture streams %d\n",
846 			num_playback);
847 		return -EINVAL;
848 	}
849 
850 	/*
851 	 * mem alloc for the position buffer
852 	 * TODO: check position buffer update
853 	 */
854 	ret = snd_dma_alloc_pages(SNDRV_DMA_TYPE_DEV, &pci->dev,
855 				  SOF_HDA_DPIB_ENTRY_SIZE * num_total,
856 				  &bus->posbuf);
857 	if (ret < 0) {
858 		dev_err(sdev->dev, "error: posbuffer dma alloc failed\n");
859 		return -ENOMEM;
860 	}
861 
862 	/*
863 	 * mem alloc for the CORB/RIRB ringbuffers - this will be used only for
864 	 * HDAudio codecs
865 	 */
866 	ret = snd_dma_alloc_pages(SNDRV_DMA_TYPE_DEV, &pci->dev,
867 				  PAGE_SIZE, &bus->rb);
868 	if (ret < 0) {
869 		dev_err(sdev->dev, "error: RB alloc failed\n");
870 		return -ENOMEM;
871 	}
872 
873 	/* create capture and playback streams */
874 	for (i = 0; i < num_total; i++) {
875 		struct sof_intel_hda_stream *hda_stream;
876 
877 		hda_stream = devm_kzalloc(sdev->dev, sizeof(*hda_stream),
878 					  GFP_KERNEL);
879 		if (!hda_stream)
880 			return -ENOMEM;
881 
882 		hda_stream->sdev = sdev;
883 
884 		hext_stream = &hda_stream->hext_stream;
885 
886 		if (sdev->bar[HDA_DSP_PP_BAR]) {
887 			hext_stream->pphc_addr = sdev->bar[HDA_DSP_PP_BAR] +
888 				SOF_HDA_PPHC_BASE + SOF_HDA_PPHC_INTERVAL * i;
889 
890 			hext_stream->pplc_addr = sdev->bar[HDA_DSP_PP_BAR] +
891 				SOF_HDA_PPLC_BASE + SOF_HDA_PPLC_MULTI * num_total +
892 				SOF_HDA_PPLC_INTERVAL * i;
893 		}
894 
895 		hstream = &hext_stream->hstream;
896 
897 		/* do we support SPIB */
898 		if (sdev->bar[HDA_DSP_SPIB_BAR]) {
899 			hstream->spib_addr = sdev->bar[HDA_DSP_SPIB_BAR] +
900 				SOF_HDA_SPIB_BASE + SOF_HDA_SPIB_INTERVAL * i +
901 				SOF_HDA_SPIB_SPIB;
902 
903 			hstream->fifo_addr = sdev->bar[HDA_DSP_SPIB_BAR] +
904 				SOF_HDA_SPIB_BASE + SOF_HDA_SPIB_INTERVAL * i +
905 				SOF_HDA_SPIB_MAXFIFO;
906 		}
907 
908 		hstream->bus = bus;
909 		hstream->sd_int_sta_mask = 1 << i;
910 		hstream->index = i;
911 		sd_offset = SOF_STREAM_SD_OFFSET(hstream);
912 		hstream->sd_addr = sdev->bar[HDA_DSP_HDA_BAR] + sd_offset;
913 		hstream->opened = false;
914 		hstream->running = false;
915 
916 		if (i < num_capture) {
917 			hstream->stream_tag = i + 1;
918 			hstream->direction = SNDRV_PCM_STREAM_CAPTURE;
919 		} else {
920 			hstream->stream_tag = i - num_capture + 1;
921 			hstream->direction = SNDRV_PCM_STREAM_PLAYBACK;
922 		}
923 
924 		/* mem alloc for stream BDL */
925 		ret = snd_dma_alloc_pages(SNDRV_DMA_TYPE_DEV, &pci->dev,
926 					  HDA_DSP_BDL_SIZE, &hstream->bdl);
927 		if (ret < 0) {
928 			dev_err(sdev->dev, "error: stream bdl dma alloc failed\n");
929 			return -ENOMEM;
930 		}
931 
932 		hstream->posbuf = (__le32 *)(bus->posbuf.area +
933 			(hstream->index) * 8);
934 
935 		list_add_tail(&hstream->list, &bus->stream_list);
936 	}
937 
938 	/* store total stream count (playback + capture) from GCAP */
939 	sof_hda->stream_max = num_total;
940 
941 	/* store stream count from GCAP required for CHAIN_DMA */
942 	if (sdev->pdata->ipc_type == SOF_IPC_TYPE_4) {
943 		struct sof_ipc4_fw_data *ipc4_data = sdev->private;
944 
945 		ipc4_data->num_playback_streams = num_playback;
946 		ipc4_data->num_capture_streams = num_capture;
947 	}
948 
949 	return 0;
950 }
951 
952 void hda_dsp_stream_free(struct snd_sof_dev *sdev)
953 {
954 	struct hdac_bus *bus = sof_to_bus(sdev);
955 	struct hdac_stream *s, *_s;
956 	struct hdac_ext_stream *hext_stream;
957 	struct sof_intel_hda_stream *hda_stream;
958 
959 	/* free position buffer */
960 	if (bus->posbuf.area)
961 		snd_dma_free_pages(&bus->posbuf);
962 
963 	/* free CORB/RIRB buffer - only used for HDaudio codecs */
964 	if (bus->rb.area)
965 		snd_dma_free_pages(&bus->rb);
966 
967 	list_for_each_entry_safe(s, _s, &bus->stream_list, list) {
968 		/* TODO: decouple */
969 
970 		/* free bdl buffer */
971 		if (s->bdl.area)
972 			snd_dma_free_pages(&s->bdl);
973 		list_del(&s->list);
974 		hext_stream = stream_to_hdac_ext_stream(s);
975 		hda_stream = container_of(hext_stream, struct sof_intel_hda_stream,
976 					  hext_stream);
977 		devm_kfree(sdev->dev, hda_stream);
978 	}
979 }
980 
981 snd_pcm_uframes_t hda_dsp_stream_get_position(struct hdac_stream *hstream,
982 					      int direction, bool can_sleep)
983 {
984 	struct hdac_ext_stream *hext_stream = stream_to_hdac_ext_stream(hstream);
985 	struct sof_intel_hda_stream *hda_stream = hstream_to_sof_hda_stream(hext_stream);
986 	struct snd_sof_dev *sdev = hda_stream->sdev;
987 	snd_pcm_uframes_t pos;
988 
989 	switch (sof_hda_position_quirk) {
990 	case SOF_HDA_POSITION_QUIRK_USE_SKYLAKE_LEGACY:
991 		/*
992 		 * This legacy code, inherited from the Skylake driver,
993 		 * mixes DPIB registers and DPIB DDR updates and
994 		 * does not seem to follow any known hardware recommendations.
995 		 * It's not clear e.g. why there is a different flow
996 		 * for capture and playback, the only information that matters is
997 		 * what traffic class is used, and on all SOF-enabled platforms
998 		 * only VC0 is supported so the work-around was likely not necessary
999 		 * and quite possibly wrong.
1000 		 */
1001 
1002 		/* DPIB/posbuf position mode:
1003 		 * For Playback, Use DPIB register from HDA space which
1004 		 * reflects the actual data transferred.
1005 		 * For Capture, Use the position buffer for pointer, as DPIB
1006 		 * is not accurate enough, its update may be completed
1007 		 * earlier than the data written to DDR.
1008 		 */
1009 		if (direction == SNDRV_PCM_STREAM_PLAYBACK) {
1010 			pos = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR,
1011 					       AZX_REG_VS_SDXDPIB_XBASE +
1012 					       (AZX_REG_VS_SDXDPIB_XINTERVAL *
1013 						hstream->index));
1014 		} else {
1015 			/*
1016 			 * For capture stream, we need more workaround to fix the
1017 			 * position incorrect issue:
1018 			 *
1019 			 * 1. Wait at least 20us before reading position buffer after
1020 			 * the interrupt generated(IOC), to make sure position update
1021 			 * happens on frame boundary i.e. 20.833uSec for 48KHz.
1022 			 * 2. Perform a dummy Read to DPIB register to flush DMA
1023 			 * position value.
1024 			 * 3. Read the DMA Position from posbuf. Now the readback
1025 			 * value should be >= period boundary.
1026 			 */
1027 			if (can_sleep)
1028 				usleep_range(20, 21);
1029 
1030 			snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR,
1031 					 AZX_REG_VS_SDXDPIB_XBASE +
1032 					 (AZX_REG_VS_SDXDPIB_XINTERVAL *
1033 					  hstream->index));
1034 			pos = snd_hdac_stream_get_pos_posbuf(hstream);
1035 		}
1036 		break;
1037 	case SOF_HDA_POSITION_QUIRK_USE_DPIB_REGISTERS:
1038 		/*
1039 		 * In case VC1 traffic is disabled this is the recommended option
1040 		 */
1041 		pos = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR,
1042 				       AZX_REG_VS_SDXDPIB_XBASE +
1043 				       (AZX_REG_VS_SDXDPIB_XINTERVAL *
1044 					hstream->index));
1045 		break;
1046 	case SOF_HDA_POSITION_QUIRK_USE_DPIB_DDR_UPDATE:
1047 		/*
1048 		 * This is the recommended option when VC1 is enabled.
1049 		 * While this isn't needed for SOF platforms it's added for
1050 		 * consistency and debug.
1051 		 */
1052 		pos = snd_hdac_stream_get_pos_posbuf(hstream);
1053 		break;
1054 	default:
1055 		dev_err_once(sdev->dev, "hda_position_quirk value %d not supported\n",
1056 			     sof_hda_position_quirk);
1057 		pos = 0;
1058 		break;
1059 	}
1060 
1061 	if (pos >= hstream->bufsize)
1062 		pos = 0;
1063 
1064 	return pos;
1065 }
1066 
1067 #define merge_u64(u32_u, u32_l) (((u64)(u32_u) << 32) | (u32_l))
1068 
1069 /**
1070  * hda_dsp_get_stream_llp - Retrieve the LLP (Linear Link Position) of the stream
1071  * @sdev: SOF device
1072  * @component: ASoC component
1073  * @substream: PCM substream
1074  *
1075  * Returns the raw Linear Link Position value
1076  */
1077 u64 hda_dsp_get_stream_llp(struct snd_sof_dev *sdev,
1078 			   struct snd_soc_component *component,
1079 			   struct snd_pcm_substream *substream)
1080 {
1081 	struct hdac_stream *hstream = substream->runtime->private_data;
1082 	struct hdac_ext_stream *hext_stream = stream_to_hdac_ext_stream(hstream);
1083 	u32 llp_l, llp_u;
1084 
1085 	/*
1086 	 * The pplc_addr have been calculated during probe in
1087 	 * hda_dsp_stream_init():
1088 	 * pplc_addr = sdev->bar[HDA_DSP_PP_BAR] +
1089 	 *	       SOF_HDA_PPLC_BASE +
1090 	 *	       SOF_HDA_PPLC_MULTI * total_stream +
1091 	 *	       SOF_HDA_PPLC_INTERVAL * stream_index
1092 	 *
1093 	 * Use this pre-calculated address to avoid repeated re-calculation.
1094 	 */
1095 	llp_l = readl(hext_stream->pplc_addr + AZX_REG_PPLCLLPL);
1096 	llp_u = readl(hext_stream->pplc_addr + AZX_REG_PPLCLLPU);
1097 
1098 	/* Compensate the LLP counter with the saved offset */
1099 	if (hext_stream->pplcllpl || hext_stream->pplcllpu)
1100 		return merge_u64(llp_u, llp_l) -
1101 		       merge_u64(hext_stream->pplcllpu, hext_stream->pplcllpl);
1102 
1103 	return merge_u64(llp_u, llp_l);
1104 }
1105 
1106 /**
1107  * hda_dsp_get_stream_ldp - Retrieve the LDP (Linear DMA Position) of the stream
1108  * @sdev: SOF device
1109  * @component: ASoC component
1110  * @substream: PCM substream
1111  *
1112  * Returns the raw Linear Link Position value
1113  */
1114 u64 hda_dsp_get_stream_ldp(struct snd_sof_dev *sdev,
1115 			   struct snd_soc_component *component,
1116 			   struct snd_pcm_substream *substream)
1117 {
1118 	struct hdac_stream *hstream = substream->runtime->private_data;
1119 	struct hdac_ext_stream *hext_stream = stream_to_hdac_ext_stream(hstream);
1120 	u32 ldp_l, ldp_u;
1121 
1122 	/*
1123 	 * The pphc_addr have been calculated during probe in
1124 	 * hda_dsp_stream_init():
1125 	 * pphc_addr = sdev->bar[HDA_DSP_PP_BAR] +
1126 	 *	       SOF_HDA_PPHC_BASE +
1127 	 *	       SOF_HDA_PPHC_INTERVAL * stream_index
1128 	 *
1129 	 * Use this pre-calculated address to avoid repeated re-calculation.
1130 	 */
1131 	ldp_l = readl(hext_stream->pphc_addr + AZX_REG_PPHCLDPL);
1132 	ldp_u = readl(hext_stream->pphc_addr + AZX_REG_PPHCLDPU);
1133 
1134 	return ((u64)ldp_u << 32) | ldp_l;
1135 }
1136