xref: /linux/sound/soc/sof/intel/hda-stream.c (revision 8834ae896bfe10f239d49adb9cc76bb6a57c431c)
1 // SPDX-License-Identifier: (GPL-2.0-only OR BSD-3-Clause)
2 //
3 // This file is provided under a dual BSD/GPLv2 license.  When using or
4 // redistributing this file, you may do so under either license.
5 //
6 // Copyright(c) 2018 Intel Corporation
7 //
8 // Authors: Liam Girdwood <liam.r.girdwood@linux.intel.com>
9 //	    Ranjani Sridharan <ranjani.sridharan@linux.intel.com>
10 //	    Rander Wang <rander.wang@intel.com>
11 //          Keyon Jie <yang.jie@linux.intel.com>
12 //
13 
14 /*
15  * Hardware interface for generic Intel audio DSP HDA IP
16  */
17 
18 #include <sound/hdaudio_ext.h>
19 #include <sound/hda_register.h>
20 #include <sound/sof.h>
21 #include <trace/events/sof_intel.h>
22 #include "../ops.h"
23 #include "../sof-audio.h"
24 #include "../ipc4-priv.h"
25 #include "hda.h"
26 
27 int sof_hda_position_quirk = SOF_HDA_POSITION_QUIRK_USE_DPIB_REGISTERS;
28 module_param_named(position_quirk, sof_hda_position_quirk, int, 0444);
29 MODULE_PARM_DESC(position_quirk, "SOF HDaudio position quirk");
30 EXPORT_SYMBOL_NS(sof_hda_position_quirk, "SND_SOC_SOF_INTEL_HDA_COMMON");
31 
32 #define HDA_LTRP_GB_VALUE_US	95
33 
34 static inline const char *hda_hstream_direction_str(struct hdac_stream *hstream)
35 {
36 	if (hstream->direction == SNDRV_PCM_STREAM_PLAYBACK)
37 		return "Playback";
38 	else
39 		return "Capture";
40 }
41 
42 static char *hda_hstream_dbg_get_stream_info_str(struct hdac_stream *hstream)
43 {
44 	struct snd_soc_pcm_runtime *rtd;
45 
46 	if (hstream->substream)
47 		rtd = snd_soc_substream_to_rtd(hstream->substream);
48 	else if (hstream->cstream)
49 		rtd = hstream->cstream->private_data;
50 	else
51 		/* Non audio DMA user, like dma-trace */
52 		return kasprintf(GFP_KERNEL, "-- (%s, stream_tag: %u)",
53 				 hda_hstream_direction_str(hstream),
54 				 hstream->stream_tag);
55 
56 	return kasprintf(GFP_KERNEL, "dai_link \"%s\" (%s, stream_tag: %u)",
57 			 rtd->dai_link->name, hda_hstream_direction_str(hstream),
58 			 hstream->stream_tag);
59 }
60 
61 /*
62  * set up one of BDL entries for a stream
63  */
64 static int hda_setup_bdle(struct snd_sof_dev *sdev,
65 			  struct snd_dma_buffer *dmab,
66 			  struct hdac_stream *hstream,
67 			  struct sof_intel_dsp_bdl **bdlp,
68 			  int offset, int size, int ioc)
69 {
70 	struct hdac_bus *bus = sof_to_bus(sdev);
71 	struct sof_intel_dsp_bdl *bdl = *bdlp;
72 
73 	while (size > 0) {
74 		dma_addr_t addr;
75 		int chunk;
76 
77 		if (hstream->frags >= HDA_DSP_MAX_BDL_ENTRIES) {
78 			dev_err(sdev->dev, "error: stream frags exceeded\n");
79 			return -EINVAL;
80 		}
81 
82 		addr = snd_sgbuf_get_addr(dmab, offset);
83 		/* program BDL addr */
84 		bdl->addr_l = cpu_to_le32(lower_32_bits(addr));
85 		bdl->addr_h = cpu_to_le32(upper_32_bits(addr));
86 		/* program BDL size */
87 		chunk = snd_sgbuf_get_chunk_size(dmab, offset, size);
88 		/* one BDLE should not cross 4K boundary */
89 		if (bus->align_bdle_4k) {
90 			u32 remain = 0x1000 - (offset & 0xfff);
91 
92 			if (chunk > remain)
93 				chunk = remain;
94 		}
95 		bdl->size = cpu_to_le32(chunk);
96 		/* only program IOC when the whole segment is processed */
97 		size -= chunk;
98 		bdl->ioc = (size || !ioc) ? 0 : cpu_to_le32(0x01);
99 		bdl++;
100 		hstream->frags++;
101 		offset += chunk;
102 	}
103 
104 	*bdlp = bdl;
105 	return offset;
106 }
107 
108 /*
109  * set up Buffer Descriptor List (BDL) for host memory transfer
110  * BDL describes the location of the individual buffers and is little endian.
111  */
112 int hda_dsp_stream_setup_bdl(struct snd_sof_dev *sdev,
113 			     struct snd_dma_buffer *dmab,
114 			     struct hdac_stream *hstream)
115 {
116 	struct sof_intel_hda_dev *hda = sdev->pdata->hw_pdata;
117 	struct sof_intel_dsp_bdl *bdl;
118 	int i, offset, period_bytes, periods;
119 	int remain, ioc;
120 
121 	period_bytes = hstream->period_bytes;
122 	dev_dbg(sdev->dev, "period_bytes: %#x, bufsize: %#x\n", period_bytes,
123 		hstream->bufsize);
124 
125 	if (!period_bytes) {
126 		unsigned int chunk_size;
127 
128 		chunk_size = snd_sgbuf_get_chunk_size(dmab, 0, hstream->bufsize);
129 
130 		period_bytes = hstream->bufsize;
131 
132 		/*
133 		 * HDA spec demands that the LVI value must be at least one
134 		 * before the DMA operation can begin. This means that there
135 		 * must be at least two BDLE present for the transfer.
136 		 *
137 		 * If the buffer is not a single continuous area then the
138 		 * hda_setup_bdle() will create multiple BDLEs for each segment.
139 		 * If the memory is a single continuous area, force it to be
140 		 * split into two 'periods', otherwise the transfer will be
141 		 * split to multiple BDLE for each chunk in hda_setup_bdle()
142 		 *
143 		 * Note: period_bytes == 0 can only happen for firmware or
144 		 * library loading. The data size is 4K aligned, which ensures
145 		 * that the second chunk's start address will be 128-byte
146 		 * aligned.
147 		 */
148 		if (chunk_size == hstream->bufsize)
149 			period_bytes /= 2;
150 	}
151 
152 	periods = hstream->bufsize / period_bytes;
153 
154 	dev_dbg(sdev->dev, "periods: %d\n", periods);
155 
156 	remain = hstream->bufsize % period_bytes;
157 	if (remain)
158 		periods++;
159 
160 	/* program the initial BDL entries */
161 	bdl = (struct sof_intel_dsp_bdl *)hstream->bdl.area;
162 	offset = 0;
163 	hstream->frags = 0;
164 
165 	/*
166 	 * set IOC if don't use position IPC
167 	 * and period_wakeup needed.
168 	 */
169 	ioc = hda->no_ipc_position ?
170 	      !hstream->no_period_wakeup : 0;
171 
172 	for (i = 0; i < periods; i++) {
173 		if (i == (periods - 1) && remain)
174 			/* set the last small entry */
175 			offset = hda_setup_bdle(sdev, dmab,
176 						hstream, &bdl, offset,
177 						remain, 0);
178 		else
179 			offset = hda_setup_bdle(sdev, dmab,
180 						hstream, &bdl, offset,
181 						period_bytes, ioc);
182 	}
183 
184 	return offset;
185 }
186 
187 int hda_dsp_stream_spib_config(struct snd_sof_dev *sdev,
188 			       struct hdac_ext_stream *hext_stream,
189 			       int enable, u32 size)
190 {
191 	struct hdac_stream *hstream = &hext_stream->hstream;
192 	u32 mask;
193 
194 	if (!sdev->bar[HDA_DSP_SPIB_BAR]) {
195 		dev_err(sdev->dev, "error: address of spib capability is NULL\n");
196 		return -EINVAL;
197 	}
198 
199 	mask = (1 << hstream->index);
200 
201 	/* enable/disable SPIB for the stream */
202 	snd_sof_dsp_update_bits(sdev, HDA_DSP_SPIB_BAR,
203 				SOF_HDA_ADSP_REG_CL_SPBFIFO_SPBFCCTL, mask,
204 				enable << hstream->index);
205 
206 	/* set the SPIB value */
207 	sof_io_write(sdev, hstream->spib_addr, size);
208 
209 	return 0;
210 }
211 
212 /* get next unused stream */
213 static struct hdac_ext_stream *
214 _hda_dsp_stream_get(struct snd_sof_dev *sdev, int direction, u32 flags, bool pair)
215 {
216 	const struct sof_intel_dsp_desc *chip_info =  get_chip_info(sdev->pdata);
217 	struct sof_intel_hda_dev *hda = sdev->pdata->hw_pdata;
218 	struct hdac_bus *bus = sof_to_bus(sdev);
219 	struct sof_intel_hda_stream *hda_stream;
220 	struct hdac_ext_stream *hext_stream = NULL;
221 	struct hdac_stream *s;
222 
223 	spin_lock_irq(&bus->reg_lock);
224 
225 	/* get an unused stream */
226 	list_for_each_entry(s, &bus->stream_list, list) {
227 		if (s->direction == direction && !s->opened) {
228 			hext_stream = stream_to_hdac_ext_stream(s);
229 			hda_stream = container_of(hext_stream,
230 						  struct sof_intel_hda_stream,
231 						  hext_stream);
232 			/* check if the host DMA channel is reserved */
233 			if (hda_stream->host_reserved)
234 				continue;
235 
236 			if (pair && hext_stream->link_locked)
237 				continue;
238 
239 			s->opened = true;
240 
241 			if (pair)
242 				hext_stream->link_locked = true;
243 
244 			break;
245 		}
246 	}
247 
248 	spin_unlock_irq(&bus->reg_lock);
249 
250 	/* stream found ? */
251 	if (!hext_stream) {
252 		dev_err(sdev->dev, "error: no free %s streams\n", snd_pcm_direction_name(direction));
253 		return hext_stream;
254 	}
255 
256 	hda_stream->flags = flags;
257 
258 	/*
259 	 * Prevent DMI Link L1 entry for streams that don't support it.
260 	 * Workaround to address a known issue with host DMA that results
261 	 * in xruns during pause/release in capture scenarios. This is not needed for the ACE IP.
262 	 */
263 	if (chip_info->hw_ip_version < SOF_INTEL_ACE_1_0 &&
264 	    !(flags & SOF_HDA_STREAM_DMI_L1_COMPATIBLE)) {
265 		snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
266 					HDA_VS_INTEL_EM2,
267 					HDA_VS_INTEL_EM2_L1SEN, 0);
268 		hda->l1_disabled = true;
269 	}
270 
271 	return hext_stream;
272 }
273 
274 struct hdac_ext_stream *
275 hda_dsp_stream_get(struct snd_sof_dev *sdev, int direction, u32 flags)
276 {
277 	return _hda_dsp_stream_get(sdev, direction, flags, false);
278 }
279 
280 struct hdac_ext_stream *
281 hda_dsp_stream_pair_get(struct snd_sof_dev *sdev, int direction, u32 flags)
282 {
283 	return _hda_dsp_stream_get(sdev, direction, flags, true);
284 }
285 
286 /* free a stream */
287 static int _hda_dsp_stream_put(struct snd_sof_dev *sdev, int direction, int stream_tag, bool pair)
288 {
289 	const struct sof_intel_dsp_desc *chip_info =  get_chip_info(sdev->pdata);
290 	struct sof_intel_hda_dev *hda = sdev->pdata->hw_pdata;
291 	struct hdac_bus *bus = sof_to_bus(sdev);
292 	struct sof_intel_hda_stream *hda_stream;
293 	struct hdac_ext_stream *hext_stream;
294 	struct hdac_ext_stream *link_stream;
295 	struct hdac_stream *s;
296 	bool dmi_l1_enable = true;
297 	bool found = false;
298 
299 	spin_lock_irq(&bus->reg_lock);
300 
301 	/*
302 	 * close stream matching the stream tag and check if there are any open streams
303 	 * that are DMI L1 incompatible.
304 	 */
305 	list_for_each_entry(s, &bus->stream_list, list) {
306 		hext_stream = stream_to_hdac_ext_stream(s);
307 		hda_stream = container_of(hext_stream, struct sof_intel_hda_stream, hext_stream);
308 
309 		if (!s->opened)
310 			continue;
311 
312 		if (s->direction == direction && s->stream_tag == stream_tag) {
313 			s->opened = false;
314 			found = true;
315 			if (pair)
316 				link_stream = hext_stream;
317 		} else if (!(hda_stream->flags & SOF_HDA_STREAM_DMI_L1_COMPATIBLE)) {
318 			dmi_l1_enable = false;
319 		}
320 	}
321 
322 	spin_unlock_irq(&bus->reg_lock);
323 
324 	/* Enable DMI L1 if permitted */
325 	if (chip_info->hw_ip_version < SOF_INTEL_ACE_1_0 && dmi_l1_enable) {
326 		snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, HDA_VS_INTEL_EM2,
327 					HDA_VS_INTEL_EM2_L1SEN, HDA_VS_INTEL_EM2_L1SEN);
328 		hda->l1_disabled = false;
329 	}
330 
331 	if (!found) {
332 		dev_err(sdev->dev, "%s: stream_tag %d not opened!\n",
333 			__func__, stream_tag);
334 		return -ENODEV;
335 	}
336 
337 	if (pair)
338 		snd_hdac_ext_stream_release(link_stream, HDAC_EXT_STREAM_TYPE_LINK);
339 
340 	return 0;
341 }
342 
343 int hda_dsp_stream_put(struct snd_sof_dev *sdev, int direction, int stream_tag)
344 {
345 	return _hda_dsp_stream_put(sdev, direction, stream_tag, false);
346 }
347 
348 int hda_dsp_stream_pair_put(struct snd_sof_dev *sdev, int direction, int stream_tag)
349 {
350 	return _hda_dsp_stream_put(sdev, direction, stream_tag, true);
351 }
352 
353 static int hda_dsp_stream_reset(struct snd_sof_dev *sdev, struct hdac_stream *hstream)
354 {
355 	int sd_offset = SOF_STREAM_SD_OFFSET(hstream);
356 	int timeout = HDA_DSP_STREAM_RESET_TIMEOUT;
357 	u32 val;
358 
359 	/* enter stream reset */
360 	snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset, SOF_STREAM_SD_OFFSET_CRST,
361 				SOF_STREAM_SD_OFFSET_CRST);
362 	do {
363 		val = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, sd_offset);
364 		if (val & SOF_STREAM_SD_OFFSET_CRST)
365 			break;
366 	} while (--timeout);
367 	if (timeout == 0) {
368 		dev_err(sdev->dev, "timeout waiting for stream reset\n");
369 		return -ETIMEDOUT;
370 	}
371 
372 	timeout = HDA_DSP_STREAM_RESET_TIMEOUT;
373 
374 	/* exit stream reset and wait to read a zero before reading any other register */
375 	snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset, SOF_STREAM_SD_OFFSET_CRST, 0x0);
376 
377 	/* wait for hardware to report that stream is out of reset */
378 	udelay(3);
379 	do {
380 		val = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, sd_offset);
381 		if ((val & SOF_STREAM_SD_OFFSET_CRST) == 0)
382 			break;
383 	} while (--timeout);
384 	if (timeout == 0) {
385 		dev_err(sdev->dev, "timeout waiting for stream to exit reset\n");
386 		return -ETIMEDOUT;
387 	}
388 
389 	return 0;
390 }
391 
392 int hda_dsp_stream_trigger(struct snd_sof_dev *sdev,
393 			   struct hdac_ext_stream *hext_stream, int cmd)
394 {
395 	struct hdac_stream *hstream = &hext_stream->hstream;
396 	int sd_offset = SOF_STREAM_SD_OFFSET(hstream);
397 	u32 dma_start = SOF_HDA_SD_CTL_DMA_START;
398 	int ret = 0;
399 	u32 run;
400 
401 	/* cmd must be for audio stream */
402 	switch (cmd) {
403 	case SNDRV_PCM_TRIGGER_PAUSE_RELEASE:
404 		if (!sdev->dspless_mode_selected)
405 			break;
406 		fallthrough;
407 	case SNDRV_PCM_TRIGGER_START:
408 		if (hstream->running)
409 			break;
410 
411 		snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, SOF_HDA_INTCTL,
412 					1 << hstream->index,
413 					1 << hstream->index);
414 
415 		snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
416 					sd_offset,
417 					SOF_HDA_SD_CTL_DMA_START |
418 					SOF_HDA_CL_DMA_SD_INT_MASK,
419 					SOF_HDA_SD_CTL_DMA_START |
420 					SOF_HDA_CL_DMA_SD_INT_MASK);
421 
422 		ret = snd_sof_dsp_read_poll_timeout(sdev,
423 					HDA_DSP_HDA_BAR,
424 					sd_offset, run,
425 					((run &	dma_start) == dma_start),
426 					HDA_DSP_REG_POLL_INTERVAL_US,
427 					HDA_DSP_STREAM_RUN_TIMEOUT);
428 
429 		if (ret >= 0)
430 			hstream->running = true;
431 
432 		break;
433 	case SNDRV_PCM_TRIGGER_PAUSE_PUSH:
434 		if (!sdev->dspless_mode_selected)
435 			break;
436 		fallthrough;
437 	case SNDRV_PCM_TRIGGER_SUSPEND:
438 	case SNDRV_PCM_TRIGGER_STOP:
439 		snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
440 					sd_offset,
441 					SOF_HDA_SD_CTL_DMA_START |
442 					SOF_HDA_CL_DMA_SD_INT_MASK, 0x0);
443 
444 		ret = snd_sof_dsp_read_poll_timeout(sdev, HDA_DSP_HDA_BAR,
445 						sd_offset, run,
446 						!(run &	dma_start),
447 						HDA_DSP_REG_POLL_INTERVAL_US,
448 						HDA_DSP_STREAM_RUN_TIMEOUT);
449 
450 		if (ret >= 0) {
451 			snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
452 					  sd_offset + SOF_HDA_ADSP_REG_SD_STS,
453 					  SOF_HDA_CL_DMA_SD_INT_MASK);
454 
455 			hstream->running = false;
456 			snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
457 						SOF_HDA_INTCTL,
458 						1 << hstream->index, 0x0);
459 		}
460 		break;
461 	default:
462 		dev_err(sdev->dev, "error: unknown command: %d\n", cmd);
463 		return -EINVAL;
464 	}
465 
466 	if (ret < 0) {
467 		char *stream_name = hda_hstream_dbg_get_stream_info_str(hstream);
468 
469 		dev_err(sdev->dev,
470 			"%s: cmd %d on %s: timeout on STREAM_SD_OFFSET read\n",
471 			__func__, cmd, stream_name ? stream_name : "unknown stream");
472 		kfree(stream_name);
473 	}
474 
475 	return ret;
476 }
477 
478 /* minimal recommended programming for ICCMAX stream */
479 int hda_dsp_iccmax_stream_hw_params(struct snd_sof_dev *sdev, struct hdac_ext_stream *hext_stream,
480 				    struct snd_dma_buffer *dmab,
481 				    struct snd_pcm_hw_params *params)
482 {
483 	struct hdac_stream *hstream = &hext_stream->hstream;
484 	int sd_offset = SOF_STREAM_SD_OFFSET(hstream);
485 	int ret;
486 	u32 mask = 0x1 << hstream->index;
487 
488 	if (!hext_stream) {
489 		dev_err(sdev->dev, "error: no stream available\n");
490 		return -ENODEV;
491 	}
492 
493 	if (!dmab) {
494 		dev_err(sdev->dev, "error: no dma buffer allocated!\n");
495 		return -ENODEV;
496 	}
497 
498 	if (hstream->posbuf)
499 		*hstream->posbuf = 0;
500 
501 	/* reset BDL address */
502 	snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
503 			  sd_offset + SOF_HDA_ADSP_REG_SD_BDLPL,
504 			  0x0);
505 	snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
506 			  sd_offset + SOF_HDA_ADSP_REG_SD_BDLPU,
507 			  0x0);
508 
509 	hstream->frags = 0;
510 
511 	ret = hda_dsp_stream_setup_bdl(sdev, dmab, hstream);
512 	if (ret < 0) {
513 		dev_err(sdev->dev, "error: set up of BDL failed\n");
514 		return ret;
515 	}
516 
517 	/* program BDL address */
518 	snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
519 			  sd_offset + SOF_HDA_ADSP_REG_SD_BDLPL,
520 			  (u32)hstream->bdl.addr);
521 	snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
522 			  sd_offset + SOF_HDA_ADSP_REG_SD_BDLPU,
523 			  upper_32_bits(hstream->bdl.addr));
524 
525 	/* program cyclic buffer length */
526 	snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
527 			  sd_offset + SOF_HDA_ADSP_REG_SD_CBL,
528 			  hstream->bufsize);
529 
530 	/* program last valid index */
531 	snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
532 				sd_offset + SOF_HDA_ADSP_REG_SD_LVI,
533 				0xffff, (hstream->frags - 1));
534 
535 	/* decouple host and link DMA, enable DSP features */
536 	snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR, SOF_HDA_REG_PP_PPCTL,
537 				mask, mask);
538 
539 	/* Follow HW recommendation to set the guardband value to 95us during FW boot */
540 	snd_sof_dsp_update8(sdev, HDA_DSP_HDA_BAR, HDA_VS_INTEL_LTRP,
541 			    HDA_VS_INTEL_LTRP_GB_MASK, HDA_LTRP_GB_VALUE_US);
542 
543 	/* start DMA */
544 	snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset,
545 				SOF_HDA_SD_CTL_DMA_START, SOF_HDA_SD_CTL_DMA_START);
546 
547 	return 0;
548 }
549 
550 /*
551  * prepare for common hdac registers settings, for both code loader
552  * and normal stream.
553  */
554 int hda_dsp_stream_hw_params(struct snd_sof_dev *sdev,
555 			     struct hdac_ext_stream *hext_stream,
556 			     struct snd_dma_buffer *dmab,
557 			     struct snd_pcm_hw_params *params)
558 {
559 	const struct sof_intel_dsp_desc *chip = get_chip_info(sdev->pdata);
560 	struct hdac_bus *bus = sof_to_bus(sdev);
561 	struct hdac_stream *hstream;
562 	int sd_offset, ret;
563 	u32 dma_start = SOF_HDA_SD_CTL_DMA_START;
564 	u32 mask;
565 	u32 run;
566 
567 	if (!hext_stream) {
568 		dev_err(sdev->dev, "error: no stream available\n");
569 		return -ENODEV;
570 	}
571 
572 	if (!dmab) {
573 		dev_err(sdev->dev, "error: no dma buffer allocated!\n");
574 		return -ENODEV;
575 	}
576 
577 	hstream = &hext_stream->hstream;
578 	sd_offset = SOF_STREAM_SD_OFFSET(hstream);
579 	mask = BIT(hstream->index);
580 
581 	/* decouple host and link DMA if the DSP is used */
582 	if (!sdev->dspless_mode_selected)
583 		snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR, SOF_HDA_REG_PP_PPCTL,
584 					mask, mask);
585 
586 	/* clear stream status */
587 	snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset,
588 				SOF_HDA_CL_DMA_SD_INT_MASK |
589 				SOF_HDA_SD_CTL_DMA_START, 0);
590 
591 	ret = snd_sof_dsp_read_poll_timeout(sdev, HDA_DSP_HDA_BAR,
592 					    sd_offset, run,
593 					    !(run & dma_start),
594 					    HDA_DSP_REG_POLL_INTERVAL_US,
595 					    HDA_DSP_STREAM_RUN_TIMEOUT);
596 
597 	if (ret < 0) {
598 		char *stream_name = hda_hstream_dbg_get_stream_info_str(hstream);
599 
600 		dev_err(sdev->dev,
601 			"%s: on %s: timeout on STREAM_SD_OFFSET read1\n",
602 			__func__, stream_name ? stream_name : "unknown stream");
603 		kfree(stream_name);
604 		return ret;
605 	}
606 
607 	snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
608 				sd_offset + SOF_HDA_ADSP_REG_SD_STS,
609 				SOF_HDA_CL_DMA_SD_INT_MASK,
610 				SOF_HDA_CL_DMA_SD_INT_MASK);
611 
612 	/* stream reset */
613 	ret = hda_dsp_stream_reset(sdev, hstream);
614 	if (ret < 0)
615 		return ret;
616 
617 	if (hstream->posbuf)
618 		*hstream->posbuf = 0;
619 
620 	/* reset BDL address */
621 	snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
622 			  sd_offset + SOF_HDA_ADSP_REG_SD_BDLPL,
623 			  0x0);
624 	snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
625 			  sd_offset + SOF_HDA_ADSP_REG_SD_BDLPU,
626 			  0x0);
627 
628 	/* clear stream status */
629 	snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset,
630 				SOF_HDA_CL_DMA_SD_INT_MASK |
631 				SOF_HDA_SD_CTL_DMA_START, 0);
632 
633 	ret = snd_sof_dsp_read_poll_timeout(sdev, HDA_DSP_HDA_BAR,
634 					    sd_offset, run,
635 					    !(run & dma_start),
636 					    HDA_DSP_REG_POLL_INTERVAL_US,
637 					    HDA_DSP_STREAM_RUN_TIMEOUT);
638 
639 	if (ret < 0) {
640 		char *stream_name = hda_hstream_dbg_get_stream_info_str(hstream);
641 
642 		dev_err(sdev->dev,
643 			"%s: on %s: timeout on STREAM_SD_OFFSET read1\n",
644 			__func__, stream_name ? stream_name : "unknown stream");
645 		kfree(stream_name);
646 		return ret;
647 	}
648 
649 	snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
650 				sd_offset + SOF_HDA_ADSP_REG_SD_STS,
651 				SOF_HDA_CL_DMA_SD_INT_MASK,
652 				SOF_HDA_CL_DMA_SD_INT_MASK);
653 
654 	hstream->frags = 0;
655 
656 	ret = hda_dsp_stream_setup_bdl(sdev, dmab, hstream);
657 	if (ret < 0) {
658 		dev_err(sdev->dev, "error: set up of BDL failed\n");
659 		return ret;
660 	}
661 
662 	/* program stream tag to set up stream descriptor for DMA */
663 	snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset,
664 				SOF_HDA_CL_SD_CTL_STREAM_TAG_MASK,
665 				hstream->stream_tag <<
666 				SOF_HDA_CL_SD_CTL_STREAM_TAG_SHIFT);
667 
668 	/* program cyclic buffer length */
669 	snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
670 			  sd_offset + SOF_HDA_ADSP_REG_SD_CBL,
671 			  hstream->bufsize);
672 
673 	/*
674 	 * Recommended hardware programming sequence for HDAudio DMA format
675 	 * on earlier platforms - this is not needed on newer platforms
676 	 *
677 	 * 1. Put DMA into coupled mode by clearing PPCTL.PROCEN bit
678 	 *    for corresponding stream index before the time of writing
679 	 *    format to SDxFMT register.
680 	 * 2. Write SDxFMT
681 	 * 3. Set PPCTL.PROCEN bit for corresponding stream index to
682 	 *    enable decoupled mode
683 	 */
684 
685 	if (!sdev->dspless_mode_selected && (chip->quirks & SOF_INTEL_PROCEN_FMT_QUIRK))
686 		/* couple host and link DMA, disable DSP features */
687 		snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR, SOF_HDA_REG_PP_PPCTL,
688 					mask, 0);
689 
690 	/* program stream format */
691 	snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
692 				sd_offset +
693 				SOF_HDA_ADSP_REG_SD_FORMAT,
694 				0xffff, hstream->format_val);
695 
696 	if (!sdev->dspless_mode_selected && (chip->quirks & SOF_INTEL_PROCEN_FMT_QUIRK))
697 		/* decouple host and link DMA, enable DSP features */
698 		snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR, SOF_HDA_REG_PP_PPCTL,
699 					mask, mask);
700 
701 	/* program last valid index */
702 	snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR,
703 				sd_offset + SOF_HDA_ADSP_REG_SD_LVI,
704 				0xffff, (hstream->frags - 1));
705 
706 	/* program BDL address */
707 	snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
708 			  sd_offset + SOF_HDA_ADSP_REG_SD_BDLPL,
709 			  (u32)hstream->bdl.addr);
710 	snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
711 			  sd_offset + SOF_HDA_ADSP_REG_SD_BDLPU,
712 			  upper_32_bits(hstream->bdl.addr));
713 
714 	/* enable position buffer, if needed */
715 	if (bus->use_posbuf && bus->posbuf.addr &&
716 	    !(snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, SOF_HDA_ADSP_DPLBASE)
717 	      & SOF_HDA_ADSP_DPLBASE_ENABLE)) {
718 		snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR, SOF_HDA_ADSP_DPUBASE,
719 				  upper_32_bits(bus->posbuf.addr));
720 		snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR, SOF_HDA_ADSP_DPLBASE,
721 				  (u32)bus->posbuf.addr |
722 				  SOF_HDA_ADSP_DPLBASE_ENABLE);
723 	}
724 
725 	/* set interrupt enable bits */
726 	snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset,
727 				SOF_HDA_CL_DMA_SD_INT_MASK,
728 				SOF_HDA_CL_DMA_SD_INT_MASK);
729 
730 	/* read FIFO size */
731 	if (hstream->direction == SNDRV_PCM_STREAM_PLAYBACK) {
732 		hstream->fifo_size =
733 			snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR,
734 					 sd_offset +
735 					 SOF_HDA_ADSP_REG_SD_FIFOSIZE);
736 		hstream->fifo_size &= SOF_HDA_SD_FIFOSIZE_FIFOS_MASK;
737 		hstream->fifo_size += 1;
738 	} else {
739 		hstream->fifo_size = 0;
740 	}
741 
742 	return ret;
743 }
744 
745 int hda_dsp_stream_hw_free(struct snd_sof_dev *sdev,
746 			   struct snd_pcm_substream *substream)
747 {
748 	struct hdac_stream *hstream = substream->runtime->private_data;
749 	struct hdac_ext_stream *hext_stream = container_of(hstream,
750 							 struct hdac_ext_stream,
751 							 hstream);
752 	int ret;
753 
754 	ret = hda_dsp_stream_reset(sdev, hstream);
755 	if (ret < 0)
756 		return ret;
757 
758 	if (!sdev->dspless_mode_selected) {
759 		struct hdac_bus *bus = sof_to_bus(sdev);
760 		u32 mask = BIT(hstream->index);
761 
762 		guard(spinlock_irq)(&bus->reg_lock);
763 
764 		/* couple host and link DMA if link DMA channel is idle */
765 		if (!hext_stream->link_locked)
766 			snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR,
767 						SOF_HDA_REG_PP_PPCTL, mask, 0);
768 	}
769 
770 	hda_dsp_stream_spib_config(sdev, hext_stream, HDA_DSP_SPIB_DISABLE, 0);
771 
772 	hstream->substream = NULL;
773 
774 	return 0;
775 }
776 EXPORT_SYMBOL_NS(hda_dsp_stream_hw_free, "SND_SOC_SOF_INTEL_HDA_COMMON");
777 
778 bool hda_dsp_check_stream_irq(struct snd_sof_dev *sdev)
779 {
780 	struct hdac_bus *bus = sof_to_bus(sdev);
781 	bool ret = false;
782 	u32 status;
783 
784 	/* The function can be called at irq thread, so use spin_lock_irq */
785 	guard(spinlock_irq)(&bus->reg_lock);
786 
787 	status = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, SOF_HDA_INTSTS);
788 
789 	trace_sof_intel_hda_dsp_check_stream_irq(sdev, status);
790 
791 	/* if Register inaccessible, ignore it.*/
792 	if (status != 0xffffffff)
793 		ret = true;
794 
795 	return ret;
796 }
797 EXPORT_SYMBOL_NS(hda_dsp_check_stream_irq, "SND_SOC_SOF_INTEL_HDA_COMMON");
798 
799 static void
800 hda_dsp_compr_bytes_transferred(struct hdac_stream *hstream, int direction)
801 {
802 	u64 buffer_size = hstream->bufsize;
803 	u64 prev_pos, pos, num_bytes;
804 
805 	div64_u64_rem(hstream->curr_pos, buffer_size, &prev_pos);
806 	pos = hda_dsp_stream_get_position(hstream, direction, false);
807 
808 	if (pos < prev_pos)
809 		num_bytes = (buffer_size - prev_pos) +  pos;
810 	else
811 		num_bytes = pos - prev_pos;
812 
813 	hstream->curr_pos += num_bytes;
814 }
815 
816 static bool hda_dsp_stream_check(struct hdac_bus *bus, u32 status)
817 {
818 	struct sof_intel_hda_dev *sof_hda = bus_to_sof_hda(bus);
819 	struct hdac_stream *s;
820 	bool active = false;
821 	u32 sd_status;
822 
823 	list_for_each_entry(s, &bus->stream_list, list) {
824 		if (status & BIT(s->index) && s->opened) {
825 			sd_status = readb(s->sd_addr + SOF_HDA_ADSP_REG_SD_STS);
826 
827 			trace_sof_intel_hda_dsp_stream_status(bus->dev, s, sd_status);
828 
829 			writeb(sd_status, s->sd_addr + SOF_HDA_ADSP_REG_SD_STS);
830 
831 			active = true;
832 			if (!s->running)
833 				continue;
834 			if ((sd_status & SOF_HDA_CL_DMA_SD_INT_COMPLETE) == 0)
835 				continue;
836 			if (!s->substream && !s->cstream) {
837 				/*
838 				 * when no substream is found, the DMA may used for code loading
839 				 * or data transfers which can rely on wait_for_completion()
840 				 */
841 				struct sof_intel_hda_stream *hda_stream;
842 				struct hdac_ext_stream *hext_stream;
843 
844 				hext_stream = stream_to_hdac_ext_stream(s);
845 				hda_stream = container_of(hext_stream, struct sof_intel_hda_stream,
846 							  hext_stream);
847 
848 				complete(&hda_stream->ioc);
849 				continue;
850 			}
851 
852 			/* Inform ALSA only if the IPC position is not used */
853 			if (s->substream && sof_hda->no_ipc_position) {
854 				snd_sof_pcm_period_elapsed(s->substream);
855 			} else if (s->cstream) {
856 				hda_dsp_compr_bytes_transferred(s, s->cstream->direction);
857 				snd_compr_fragment_elapsed(s->cstream);
858 			}
859 		}
860 	}
861 
862 	return active;
863 }
864 
865 irqreturn_t hda_dsp_stream_threaded_handler(int irq, void *context)
866 {
867 	struct snd_sof_dev *sdev = context;
868 	struct hdac_bus *bus = sof_to_bus(sdev);
869 	bool active;
870 	u32 status;
871 	int i;
872 
873 	/*
874 	 * Loop 10 times to handle missed interrupts caused by
875 	 * unsolicited responses from the codec
876 	 */
877 	for (i = 0, active = true; i < 10 && active; i++) {
878 		guard(spinlock_irq)(&bus->reg_lock);
879 
880 		status = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, SOF_HDA_INTSTS);
881 
882 		/* check streams */
883 		active = hda_dsp_stream_check(bus, status);
884 
885 		/* check and clear RIRB interrupt */
886 		if (status & AZX_INT_CTRL_EN) {
887 			active |= hda_codec_check_rirb_status(sdev);
888 		}
889 	}
890 
891 	return IRQ_HANDLED;
892 }
893 EXPORT_SYMBOL_NS(hda_dsp_stream_threaded_handler, "SND_SOC_SOF_INTEL_HDA_COMMON");
894 
895 int hda_dsp_stream_init(struct snd_sof_dev *sdev)
896 {
897 	struct hdac_bus *bus = sof_to_bus(sdev);
898 	struct hdac_ext_stream *hext_stream;
899 	struct hdac_stream *hstream;
900 	struct pci_dev *pci = to_pci_dev(sdev->dev);
901 	struct sof_intel_hda_dev *sof_hda = bus_to_sof_hda(bus);
902 	int sd_offset;
903 	int i, num_playback, num_capture, num_total, ret;
904 	u32 gcap;
905 
906 	gcap = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, SOF_HDA_GCAP);
907 	dev_dbg(sdev->dev, "hda global caps = 0x%x\n", gcap);
908 
909 	/* get stream count from GCAP */
910 	num_capture = (gcap >> 8) & 0x0f;
911 	num_playback = (gcap >> 12) & 0x0f;
912 	num_total = num_playback + num_capture;
913 
914 	dev_dbg(sdev->dev, "detected %d playback and %d capture streams\n",
915 		num_playback, num_capture);
916 
917 	if (num_playback >= SOF_HDA_PLAYBACK_STREAMS) {
918 		dev_err(sdev->dev, "error: too many playback streams %d\n",
919 			num_playback);
920 		return -EINVAL;
921 	}
922 
923 	if (num_capture >= SOF_HDA_CAPTURE_STREAMS) {
924 		dev_err(sdev->dev, "error: too many capture streams %d\n",
925 			num_capture);
926 		return -EINVAL;
927 	}
928 
929 	/*
930 	 * mem alloc for the position buffer
931 	 * TODO: check position buffer update
932 	 */
933 	ret = snd_dma_alloc_pages(SNDRV_DMA_TYPE_DEV, &pci->dev,
934 				  SOF_HDA_DPIB_ENTRY_SIZE * num_total,
935 				  &bus->posbuf);
936 	if (ret < 0) {
937 		dev_err(sdev->dev, "error: posbuffer dma alloc failed\n");
938 		return -ENOMEM;
939 	}
940 
941 	/*
942 	 * mem alloc for the CORB/RIRB ringbuffers - this will be used only for
943 	 * HDAudio codecs
944 	 */
945 	ret = snd_dma_alloc_pages(SNDRV_DMA_TYPE_DEV, &pci->dev,
946 				  PAGE_SIZE, &bus->rb);
947 	if (ret < 0) {
948 		dev_err(sdev->dev, "error: RB alloc failed\n");
949 		return -ENOMEM;
950 	}
951 
952 	/* create capture and playback streams */
953 	for (i = 0; i < num_total; i++) {
954 		struct sof_intel_hda_stream *hda_stream;
955 
956 		hda_stream = devm_kzalloc(sdev->dev, sizeof(*hda_stream),
957 					  GFP_KERNEL);
958 		if (!hda_stream)
959 			return -ENOMEM;
960 
961 		hda_stream->sdev = sdev;
962 		init_completion(&hda_stream->ioc);
963 
964 		hext_stream = &hda_stream->hext_stream;
965 
966 		if (sdev->bar[HDA_DSP_PP_BAR]) {
967 			hext_stream->pphc_addr = sdev->bar[HDA_DSP_PP_BAR] +
968 				SOF_HDA_PPHC_BASE + SOF_HDA_PPHC_INTERVAL * i;
969 
970 			hext_stream->pplc_addr = sdev->bar[HDA_DSP_PP_BAR] +
971 				SOF_HDA_PPLC_BASE + SOF_HDA_PPLC_MULTI * num_total +
972 				SOF_HDA_PPLC_INTERVAL * i;
973 		}
974 
975 		hstream = &hext_stream->hstream;
976 
977 		/* do we support SPIB */
978 		if (sdev->bar[HDA_DSP_SPIB_BAR]) {
979 			hstream->spib_addr = sdev->bar[HDA_DSP_SPIB_BAR] +
980 				SOF_HDA_SPIB_BASE + SOF_HDA_SPIB_INTERVAL * i +
981 				SOF_HDA_SPIB_SPIB;
982 
983 			hstream->fifo_addr = sdev->bar[HDA_DSP_SPIB_BAR] +
984 				SOF_HDA_SPIB_BASE + SOF_HDA_SPIB_INTERVAL * i +
985 				SOF_HDA_SPIB_MAXFIFO;
986 		}
987 
988 		hstream->bus = bus;
989 		hstream->sd_int_sta_mask = 1 << i;
990 		hstream->index = i;
991 		sd_offset = SOF_STREAM_SD_OFFSET(hstream);
992 		hstream->sd_addr = sdev->bar[HDA_DSP_HDA_BAR] + sd_offset;
993 		hstream->opened = false;
994 		hstream->running = false;
995 
996 		if (i < num_capture) {
997 			hstream->stream_tag = i + 1;
998 			hstream->direction = SNDRV_PCM_STREAM_CAPTURE;
999 		} else {
1000 			hstream->stream_tag = i - num_capture + 1;
1001 			hstream->direction = SNDRV_PCM_STREAM_PLAYBACK;
1002 		}
1003 
1004 		/* mem alloc for stream BDL */
1005 		ret = snd_dma_alloc_pages(SNDRV_DMA_TYPE_DEV, &pci->dev,
1006 					  HDA_DSP_BDL_SIZE, &hstream->bdl);
1007 		if (ret < 0) {
1008 			dev_err(sdev->dev, "error: stream bdl dma alloc failed\n");
1009 			return -ENOMEM;
1010 		}
1011 
1012 		hstream->posbuf = (__le32 *)(bus->posbuf.area +
1013 			(hstream->index) * 8);
1014 
1015 		list_add_tail(&hstream->list, &bus->stream_list);
1016 	}
1017 
1018 	/* store total stream count (playback + capture) from GCAP */
1019 	sof_hda->stream_max = num_total;
1020 
1021 	/* store stream count from GCAP required for CHAIN_DMA */
1022 	if (sdev->pdata->ipc_type == SOF_IPC_TYPE_4) {
1023 		struct sof_ipc4_fw_data *ipc4_data = sdev->private;
1024 
1025 		ipc4_data->num_playback_streams = num_playback;
1026 		ipc4_data->num_capture_streams = num_capture;
1027 	}
1028 
1029 	return 0;
1030 }
1031 EXPORT_SYMBOL_NS(hda_dsp_stream_init, "SND_SOC_SOF_INTEL_HDA_COMMON");
1032 
1033 void hda_dsp_stream_free(struct snd_sof_dev *sdev)
1034 {
1035 	struct hdac_bus *bus = sof_to_bus(sdev);
1036 	struct hdac_stream *s, *_s;
1037 	struct hdac_ext_stream *hext_stream;
1038 	struct sof_intel_hda_stream *hda_stream;
1039 
1040 	/* free position buffer */
1041 	if (bus->posbuf.area)
1042 		snd_dma_free_pages(&bus->posbuf);
1043 
1044 	/* free CORB/RIRB buffer - only used for HDaudio codecs */
1045 	if (bus->rb.area)
1046 		snd_dma_free_pages(&bus->rb);
1047 
1048 	list_for_each_entry_safe(s, _s, &bus->stream_list, list) {
1049 		/* TODO: decouple */
1050 
1051 		/* free bdl buffer */
1052 		if (s->bdl.area)
1053 			snd_dma_free_pages(&s->bdl);
1054 		list_del(&s->list);
1055 		hext_stream = stream_to_hdac_ext_stream(s);
1056 		hda_stream = container_of(hext_stream, struct sof_intel_hda_stream,
1057 					  hext_stream);
1058 		devm_kfree(sdev->dev, hda_stream);
1059 	}
1060 }
1061 EXPORT_SYMBOL_NS(hda_dsp_stream_free, "SND_SOC_SOF_INTEL_HDA_COMMON");
1062 
1063 snd_pcm_uframes_t hda_dsp_stream_get_position(struct hdac_stream *hstream,
1064 					      int direction, bool can_sleep)
1065 {
1066 	struct hdac_ext_stream *hext_stream = stream_to_hdac_ext_stream(hstream);
1067 	struct sof_intel_hda_stream *hda_stream = hstream_to_sof_hda_stream(hext_stream);
1068 	struct snd_sof_dev *sdev = hda_stream->sdev;
1069 	snd_pcm_uframes_t pos;
1070 
1071 	switch (sof_hda_position_quirk) {
1072 	case SOF_HDA_POSITION_QUIRK_USE_SKYLAKE_LEGACY:
1073 		/*
1074 		 * This legacy code, inherited from the Skylake driver,
1075 		 * mixes DPIB registers and DPIB DDR updates and
1076 		 * does not seem to follow any known hardware recommendations.
1077 		 * It's not clear e.g. why there is a different flow
1078 		 * for capture and playback, the only information that matters is
1079 		 * what traffic class is used, and on all SOF-enabled platforms
1080 		 * only VC0 is supported so the work-around was likely not necessary
1081 		 * and quite possibly wrong.
1082 		 */
1083 
1084 		/* DPIB/posbuf position mode:
1085 		 * For Playback, Use DPIB register from HDA space which
1086 		 * reflects the actual data transferred.
1087 		 * For Capture, Use the position buffer for pointer, as DPIB
1088 		 * is not accurate enough, its update may be completed
1089 		 * earlier than the data written to DDR.
1090 		 */
1091 		if (direction == SNDRV_PCM_STREAM_PLAYBACK) {
1092 			pos = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR,
1093 					       AZX_REG_VS_SDXDPIB_XBASE +
1094 					       (AZX_REG_VS_SDXDPIB_XINTERVAL *
1095 						hstream->index));
1096 		} else {
1097 			/*
1098 			 * For capture stream, we need more workaround to fix the
1099 			 * position incorrect issue:
1100 			 *
1101 			 * 1. Wait at least 20us before reading position buffer after
1102 			 * the interrupt generated(IOC), to make sure position update
1103 			 * happens on frame boundary i.e. 20.833uSec for 48KHz.
1104 			 * 2. Perform a dummy Read to DPIB register to flush DMA
1105 			 * position value.
1106 			 * 3. Read the DMA Position from posbuf. Now the readback
1107 			 * value should be >= period boundary.
1108 			 */
1109 			if (can_sleep)
1110 				usleep_range(20, 21);
1111 
1112 			snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR,
1113 					 AZX_REG_VS_SDXDPIB_XBASE +
1114 					 (AZX_REG_VS_SDXDPIB_XINTERVAL *
1115 					  hstream->index));
1116 			pos = snd_hdac_stream_get_pos_posbuf(hstream);
1117 		}
1118 		break;
1119 	case SOF_HDA_POSITION_QUIRK_USE_DPIB_REGISTERS:
1120 		/*
1121 		 * In case VC1 traffic is disabled this is the recommended option
1122 		 */
1123 		pos = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR,
1124 				       AZX_REG_VS_SDXDPIB_XBASE +
1125 				       (AZX_REG_VS_SDXDPIB_XINTERVAL *
1126 					hstream->index));
1127 		break;
1128 	case SOF_HDA_POSITION_QUIRK_USE_DPIB_DDR_UPDATE:
1129 		/*
1130 		 * This is the recommended option when VC1 is enabled.
1131 		 * While this isn't needed for SOF platforms it's added for
1132 		 * consistency and debug.
1133 		 */
1134 		pos = snd_hdac_stream_get_pos_posbuf(hstream);
1135 		break;
1136 	default:
1137 		dev_err_once(sdev->dev, "hda_position_quirk value %d not supported\n",
1138 			     sof_hda_position_quirk);
1139 		pos = 0;
1140 		break;
1141 	}
1142 
1143 	if (pos >= hstream->bufsize)
1144 		pos = 0;
1145 
1146 	return pos;
1147 }
1148 EXPORT_SYMBOL_NS(hda_dsp_stream_get_position, "SND_SOC_SOF_INTEL_HDA_COMMON");
1149 
1150 #define merge_u64(u32_u, u32_l) (((u64)(u32_u) << 32) | (u32_l))
1151 
1152 /**
1153  * hda_dsp_get_stream_llp - Retrieve the LLP (Linear Link Position) of the stream
1154  * @sdev: SOF device
1155  * @component: ASoC component
1156  * @substream: PCM substream
1157  *
1158  * Returns the raw Linear Link Position value
1159  */
1160 u64 hda_dsp_get_stream_llp(struct snd_sof_dev *sdev,
1161 			   struct snd_soc_component *component,
1162 			   struct snd_pcm_substream *substream)
1163 {
1164 	struct snd_soc_pcm_runtime *rtd = snd_soc_substream_to_rtd(substream);
1165 	struct snd_soc_pcm_runtime *be_rtd = NULL;
1166 	struct hdac_ext_stream *hext_stream;
1167 	struct snd_soc_dai *cpu_dai;
1168 	struct snd_soc_dpcm *dpcm;
1169 	u32 llp_l, llp_u;
1170 
1171 	/*
1172 	 * The LLP needs to be read from the Link DMA used for this FE as it is
1173 	 * allowed to use any combination of Link and Host channels
1174 	 */
1175 	for_each_dpcm_be(rtd, substream->stream, dpcm) {
1176 		if (dpcm->fe != rtd)
1177 			continue;
1178 
1179 		be_rtd = dpcm->be;
1180 	}
1181 
1182 	if (!be_rtd)
1183 		return 0;
1184 
1185 	cpu_dai = snd_soc_rtd_to_cpu(be_rtd, 0);
1186 	if (!cpu_dai)
1187 		return 0;
1188 
1189 	hext_stream = snd_soc_dai_get_dma_data(cpu_dai, substream);
1190 	if (!hext_stream)
1191 		return 0;
1192 
1193 	/*
1194 	 * The pplc_addr have been calculated during probe in
1195 	 * hda_dsp_stream_init():
1196 	 * pplc_addr = sdev->bar[HDA_DSP_PP_BAR] +
1197 	 *	       SOF_HDA_PPLC_BASE +
1198 	 *	       SOF_HDA_PPLC_MULTI * total_stream +
1199 	 *	       SOF_HDA_PPLC_INTERVAL * stream_index
1200 	 *
1201 	 * Use this pre-calculated address to avoid repeated re-calculation.
1202 	 */
1203 	llp_l = readl(hext_stream->pplc_addr + AZX_REG_PPLCLLPL);
1204 	llp_u = readl(hext_stream->pplc_addr + AZX_REG_PPLCLLPU);
1205 
1206 	/* Compensate the LLP counter with the saved offset */
1207 	if (hext_stream->pplcllpl || hext_stream->pplcllpu)
1208 		return merge_u64(llp_u, llp_l) -
1209 		       merge_u64(hext_stream->pplcllpu, hext_stream->pplcllpl);
1210 
1211 	return merge_u64(llp_u, llp_l);
1212 }
1213 EXPORT_SYMBOL_NS(hda_dsp_get_stream_llp, "SND_SOC_SOF_INTEL_HDA_COMMON");
1214 
1215 /**
1216  * hda_dsp_get_stream_ldp - Retrieve the LDP (Linear DMA Position) of the stream
1217  * @sdev: SOF device
1218  * @component: ASoC component
1219  * @substream: PCM substream
1220  *
1221  * Returns the raw Linear Link Position value
1222  */
1223 u64 hda_dsp_get_stream_ldp(struct snd_sof_dev *sdev,
1224 			   struct snd_soc_component *component,
1225 			   struct snd_pcm_substream *substream)
1226 {
1227 	struct hdac_stream *hstream = substream->runtime->private_data;
1228 	struct hdac_ext_stream *hext_stream = stream_to_hdac_ext_stream(hstream);
1229 	u32 ldp_l, ldp_u;
1230 
1231 	/*
1232 	 * The pphc_addr have been calculated during probe in
1233 	 * hda_dsp_stream_init():
1234 	 * pphc_addr = sdev->bar[HDA_DSP_PP_BAR] +
1235 	 *	       SOF_HDA_PPHC_BASE +
1236 	 *	       SOF_HDA_PPHC_INTERVAL * stream_index
1237 	 *
1238 	 * Use this pre-calculated address to avoid repeated re-calculation.
1239 	 */
1240 	ldp_l = readl(hext_stream->pphc_addr + AZX_REG_PPHCLDPL);
1241 	ldp_u = readl(hext_stream->pphc_addr + AZX_REG_PPHCLDPU);
1242 
1243 	return ((u64)ldp_u << 32) | ldp_l;
1244 }
1245 EXPORT_SYMBOL_NS(hda_dsp_get_stream_ldp, "SND_SOC_SOF_INTEL_HDA_COMMON");
1246 
1247 struct hdac_ext_stream *
1248 hda_data_stream_prepare(struct device *dev, unsigned int format, unsigned int size,
1249 			struct snd_dma_buffer *dmab, bool persistent_buffer, int direction,
1250 			bool is_iccmax, bool pair)
1251 {
1252 	struct snd_sof_dev *sdev = dev_get_drvdata(dev);
1253 	struct hdac_ext_stream *hext_stream;
1254 	struct hdac_stream *hstream;
1255 	int ret;
1256 
1257 	if (pair)
1258 		hext_stream = hda_dsp_stream_pair_get(sdev, direction, 0);
1259 	else
1260 		hext_stream = hda_dsp_stream_get(sdev, direction, 0);
1261 
1262 	if (!hext_stream) {
1263 		dev_err(sdev->dev, "%s: no stream available\n", __func__);
1264 		return ERR_PTR(-ENODEV);
1265 	}
1266 	hstream = &hext_stream->hstream;
1267 	hstream->substream = NULL;
1268 
1269 	/*
1270 	 * Allocate DMA buffer if it is temporary or if the buffer is intended
1271 	 * to be persistent but not yet allocated.
1272 	 * We cannot rely solely on !dmab->area as caller might use a struct on
1273 	 * stack (when it is temporary) without clearing it to 0.
1274 	 */
1275 	if (!persistent_buffer || !dmab->area) {
1276 		ret = snd_dma_alloc_pages(SNDRV_DMA_TYPE_DEV_SG, dev, size, dmab);
1277 		if (ret < 0) {
1278 			dev_err(sdev->dev, "%s: memory alloc failed: %d\n",
1279 				__func__, ret);
1280 			goto out_put;
1281 		}
1282 	}
1283 
1284 	hstream->period_bytes = 0; /* initialize period_bytes */
1285 	hstream->format_val = format;
1286 	hstream->bufsize = size;
1287 
1288 	if (is_iccmax) {
1289 		ret = hda_dsp_iccmax_stream_hw_params(sdev, hext_stream, dmab, NULL);
1290 		if (ret < 0) {
1291 			dev_err(sdev->dev, "%s: iccmax stream prepare failed: %d\n",
1292 				__func__, ret);
1293 			goto out_free;
1294 		}
1295 	} else {
1296 		ret = hda_dsp_stream_hw_params(sdev, hext_stream, dmab, NULL);
1297 		if (ret < 0) {
1298 			dev_err(sdev->dev, "%s: hdac prepare failed: %d\n", __func__, ret);
1299 			goto out_free;
1300 		}
1301 		hda_dsp_stream_spib_config(sdev, hext_stream, HDA_DSP_SPIB_ENABLE, size);
1302 	}
1303 
1304 	return hext_stream;
1305 
1306 out_free:
1307 	snd_dma_free_pages(dmab);
1308 	dmab->area = NULL;
1309 	dmab->bytes = 0;
1310 	hstream->bufsize = 0;
1311 	hstream->format_val = 0;
1312 out_put:
1313 	if (pair)
1314 		hda_dsp_stream_pair_put(sdev, direction, hstream->stream_tag);
1315 	else
1316 		hda_dsp_stream_put(sdev, direction, hstream->stream_tag);
1317 	return ERR_PTR(ret);
1318 }
1319 EXPORT_SYMBOL_NS(hda_data_stream_prepare, "SND_SOC_SOF_INTEL_HDA_COMMON");
1320 
1321 int hda_data_stream_cleanup(struct device *dev, struct snd_dma_buffer *dmab,
1322 			    bool persistent_buffer, struct hdac_ext_stream *hext_stream, bool pair)
1323 {
1324 	struct snd_sof_dev *sdev =  dev_get_drvdata(dev);
1325 	struct hdac_stream *hstream = hdac_stream(hext_stream);
1326 	int sd_offset = SOF_STREAM_SD_OFFSET(hstream);
1327 	int ret = 0;
1328 
1329 	if (hstream->direction == SNDRV_PCM_STREAM_PLAYBACK)
1330 		ret = hda_dsp_stream_spib_config(sdev, hext_stream, HDA_DSP_SPIB_DISABLE, 0);
1331 	else
1332 		snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset,
1333 					SOF_HDA_SD_CTL_DMA_START, 0);
1334 
1335 	if (pair)
1336 		hda_dsp_stream_pair_put(sdev, hstream->direction, hstream->stream_tag);
1337 	else
1338 		hda_dsp_stream_put(sdev, hstream->direction, hstream->stream_tag);
1339 
1340 	hstream->running = 0;
1341 	hstream->substream = NULL;
1342 
1343 	/* reset BDL address */
1344 	snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
1345 			  sd_offset + SOF_HDA_ADSP_REG_SD_BDLPL, 0);
1346 	snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR,
1347 			  sd_offset + SOF_HDA_ADSP_REG_SD_BDLPU, 0);
1348 
1349 	snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR, sd_offset, 0);
1350 
1351 	if (!persistent_buffer) {
1352 		snd_dma_free_pages(dmab);
1353 		dmab->area = NULL;
1354 		dmab->bytes = 0;
1355 		hstream->bufsize = 0;
1356 		hstream->format_val = 0;
1357 	}
1358 
1359 	return ret;
1360 }
1361 EXPORT_SYMBOL_NS(hda_data_stream_cleanup, "SND_SOC_SOF_INTEL_HDA_COMMON");
1362