1 // SPDX-License-Identifier: (GPL-2.0-only OR BSD-3-Clause) 2 // 3 // This file is provided under a dual BSD/GPLv2 license. When using or 4 // redistributing this file, you may do so under either license. 5 // 6 // Copyright(c) 2018 Intel Corporation 7 // 8 // Authors: Liam Girdwood <liam.r.girdwood@linux.intel.com> 9 // Ranjani Sridharan <ranjani.sridharan@linux.intel.com> 10 // Rander Wang <rander.wang@intel.com> 11 // Keyon Jie <yang.jie@linux.intel.com> 12 // 13 14 /* 15 * Hardware interface for generic Intel audio DSP HDA IP 16 */ 17 18 #include <sound/hdaudio_ext.h> 19 #include <sound/hda_register.h> 20 #include <sound/sof.h> 21 #include <trace/events/sof_intel.h> 22 #include "../ops.h" 23 #include "../sof-audio.h" 24 #include "../ipc4-priv.h" 25 #include "hda.h" 26 27 int sof_hda_position_quirk = SOF_HDA_POSITION_QUIRK_USE_DPIB_REGISTERS; 28 module_param_named(position_quirk, sof_hda_position_quirk, int, 0444); 29 MODULE_PARM_DESC(position_quirk, "SOF HDaudio position quirk"); 30 EXPORT_SYMBOL_NS(sof_hda_position_quirk, SND_SOC_SOF_INTEL_HDA_COMMON); 31 32 #define HDA_LTRP_GB_VALUE_US 95 33 34 static inline const char *hda_hstream_direction_str(struct hdac_stream *hstream) 35 { 36 if (hstream->direction == SNDRV_PCM_STREAM_PLAYBACK) 37 return "Playback"; 38 else 39 return "Capture"; 40 } 41 42 static char *hda_hstream_dbg_get_stream_info_str(struct hdac_stream *hstream) 43 { 44 struct snd_soc_pcm_runtime *rtd; 45 46 if (hstream->substream) 47 rtd = snd_soc_substream_to_rtd(hstream->substream); 48 else if (hstream->cstream) 49 rtd = hstream->cstream->private_data; 50 else 51 /* Non audio DMA user, like dma-trace */ 52 return kasprintf(GFP_KERNEL, "-- (%s, stream_tag: %u)", 53 hda_hstream_direction_str(hstream), 54 hstream->stream_tag); 55 56 return kasprintf(GFP_KERNEL, "dai_link \"%s\" (%s, stream_tag: %u)", 57 rtd->dai_link->name, hda_hstream_direction_str(hstream), 58 hstream->stream_tag); 59 } 60 61 /* 62 * set up one of BDL entries for a stream 63 */ 64 static int hda_setup_bdle(struct snd_sof_dev *sdev, 65 struct snd_dma_buffer *dmab, 66 struct hdac_stream *hstream, 67 struct sof_intel_dsp_bdl **bdlp, 68 int offset, int size, int ioc) 69 { 70 struct hdac_bus *bus = sof_to_bus(sdev); 71 struct sof_intel_dsp_bdl *bdl = *bdlp; 72 73 while (size > 0) { 74 dma_addr_t addr; 75 int chunk; 76 77 if (hstream->frags >= HDA_DSP_MAX_BDL_ENTRIES) { 78 dev_err(sdev->dev, "error: stream frags exceeded\n"); 79 return -EINVAL; 80 } 81 82 addr = snd_sgbuf_get_addr(dmab, offset); 83 /* program BDL addr */ 84 bdl->addr_l = cpu_to_le32(lower_32_bits(addr)); 85 bdl->addr_h = cpu_to_le32(upper_32_bits(addr)); 86 /* program BDL size */ 87 chunk = snd_sgbuf_get_chunk_size(dmab, offset, size); 88 /* one BDLE should not cross 4K boundary */ 89 if (bus->align_bdle_4k) { 90 u32 remain = 0x1000 - (offset & 0xfff); 91 92 if (chunk > remain) 93 chunk = remain; 94 } 95 bdl->size = cpu_to_le32(chunk); 96 /* only program IOC when the whole segment is processed */ 97 size -= chunk; 98 bdl->ioc = (size || !ioc) ? 0 : cpu_to_le32(0x01); 99 bdl++; 100 hstream->frags++; 101 offset += chunk; 102 } 103 104 *bdlp = bdl; 105 return offset; 106 } 107 108 /* 109 * set up Buffer Descriptor List (BDL) for host memory transfer 110 * BDL describes the location of the individual buffers and is little endian. 111 */ 112 int hda_dsp_stream_setup_bdl(struct snd_sof_dev *sdev, 113 struct snd_dma_buffer *dmab, 114 struct hdac_stream *hstream) 115 { 116 struct sof_intel_hda_dev *hda = sdev->pdata->hw_pdata; 117 struct sof_intel_dsp_bdl *bdl; 118 int i, offset, period_bytes, periods; 119 int remain, ioc; 120 121 period_bytes = hstream->period_bytes; 122 dev_dbg(sdev->dev, "period_bytes:0x%x\n", period_bytes); 123 if (!period_bytes) 124 period_bytes = hstream->bufsize; 125 126 periods = hstream->bufsize / period_bytes; 127 128 dev_dbg(sdev->dev, "periods:%d\n", periods); 129 130 remain = hstream->bufsize % period_bytes; 131 if (remain) 132 periods++; 133 134 /* program the initial BDL entries */ 135 bdl = (struct sof_intel_dsp_bdl *)hstream->bdl.area; 136 offset = 0; 137 hstream->frags = 0; 138 139 /* 140 * set IOC if don't use position IPC 141 * and period_wakeup needed. 142 */ 143 ioc = hda->no_ipc_position ? 144 !hstream->no_period_wakeup : 0; 145 146 for (i = 0; i < periods; i++) { 147 if (i == (periods - 1) && remain) 148 /* set the last small entry */ 149 offset = hda_setup_bdle(sdev, dmab, 150 hstream, &bdl, offset, 151 remain, 0); 152 else 153 offset = hda_setup_bdle(sdev, dmab, 154 hstream, &bdl, offset, 155 period_bytes, ioc); 156 } 157 158 return offset; 159 } 160 161 int hda_dsp_stream_spib_config(struct snd_sof_dev *sdev, 162 struct hdac_ext_stream *hext_stream, 163 int enable, u32 size) 164 { 165 struct hdac_stream *hstream = &hext_stream->hstream; 166 u32 mask; 167 168 if (!sdev->bar[HDA_DSP_SPIB_BAR]) { 169 dev_err(sdev->dev, "error: address of spib capability is NULL\n"); 170 return -EINVAL; 171 } 172 173 mask = (1 << hstream->index); 174 175 /* enable/disable SPIB for the stream */ 176 snd_sof_dsp_update_bits(sdev, HDA_DSP_SPIB_BAR, 177 SOF_HDA_ADSP_REG_CL_SPBFIFO_SPBFCCTL, mask, 178 enable << hstream->index); 179 180 /* set the SPIB value */ 181 sof_io_write(sdev, hstream->spib_addr, size); 182 183 return 0; 184 } 185 186 /* get next unused stream */ 187 struct hdac_ext_stream * 188 hda_dsp_stream_get(struct snd_sof_dev *sdev, int direction, u32 flags) 189 { 190 const struct sof_intel_dsp_desc *chip_info = get_chip_info(sdev->pdata); 191 struct sof_intel_hda_dev *hda = sdev->pdata->hw_pdata; 192 struct hdac_bus *bus = sof_to_bus(sdev); 193 struct sof_intel_hda_stream *hda_stream; 194 struct hdac_ext_stream *hext_stream = NULL; 195 struct hdac_stream *s; 196 197 spin_lock_irq(&bus->reg_lock); 198 199 /* get an unused stream */ 200 list_for_each_entry(s, &bus->stream_list, list) { 201 if (s->direction == direction && !s->opened) { 202 hext_stream = stream_to_hdac_ext_stream(s); 203 hda_stream = container_of(hext_stream, 204 struct sof_intel_hda_stream, 205 hext_stream); 206 /* check if the host DMA channel is reserved */ 207 if (hda_stream->host_reserved) 208 continue; 209 210 s->opened = true; 211 break; 212 } 213 } 214 215 spin_unlock_irq(&bus->reg_lock); 216 217 /* stream found ? */ 218 if (!hext_stream) { 219 dev_err(sdev->dev, "error: no free %s streams\n", 220 direction == SNDRV_PCM_STREAM_PLAYBACK ? 221 "playback" : "capture"); 222 return hext_stream; 223 } 224 225 hda_stream->flags = flags; 226 227 /* 228 * Prevent DMI Link L1 entry for streams that don't support it. 229 * Workaround to address a known issue with host DMA that results 230 * in xruns during pause/release in capture scenarios. This is not needed for the ACE IP. 231 */ 232 if (chip_info->hw_ip_version < SOF_INTEL_ACE_1_0 && 233 !(flags & SOF_HDA_STREAM_DMI_L1_COMPATIBLE)) { 234 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, 235 HDA_VS_INTEL_EM2, 236 HDA_VS_INTEL_EM2_L1SEN, 0); 237 hda->l1_disabled = true; 238 } 239 240 return hext_stream; 241 } 242 243 /* free a stream */ 244 int hda_dsp_stream_put(struct snd_sof_dev *sdev, int direction, int stream_tag) 245 { 246 const struct sof_intel_dsp_desc *chip_info = get_chip_info(sdev->pdata); 247 struct sof_intel_hda_dev *hda = sdev->pdata->hw_pdata; 248 struct hdac_bus *bus = sof_to_bus(sdev); 249 struct sof_intel_hda_stream *hda_stream; 250 struct hdac_ext_stream *hext_stream; 251 struct hdac_stream *s; 252 bool dmi_l1_enable = true; 253 bool found = false; 254 255 spin_lock_irq(&bus->reg_lock); 256 257 /* 258 * close stream matching the stream tag and check if there are any open streams 259 * that are DMI L1 incompatible. 260 */ 261 list_for_each_entry(s, &bus->stream_list, list) { 262 hext_stream = stream_to_hdac_ext_stream(s); 263 hda_stream = container_of(hext_stream, struct sof_intel_hda_stream, hext_stream); 264 265 if (!s->opened) 266 continue; 267 268 if (s->direction == direction && s->stream_tag == stream_tag) { 269 s->opened = false; 270 found = true; 271 } else if (!(hda_stream->flags & SOF_HDA_STREAM_DMI_L1_COMPATIBLE)) { 272 dmi_l1_enable = false; 273 } 274 } 275 276 spin_unlock_irq(&bus->reg_lock); 277 278 /* Enable DMI L1 if permitted */ 279 if (chip_info->hw_ip_version < SOF_INTEL_ACE_1_0 && dmi_l1_enable) { 280 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, HDA_VS_INTEL_EM2, 281 HDA_VS_INTEL_EM2_L1SEN, HDA_VS_INTEL_EM2_L1SEN); 282 hda->l1_disabled = false; 283 } 284 285 if (!found) { 286 dev_err(sdev->dev, "%s: stream_tag %d not opened!\n", 287 __func__, stream_tag); 288 return -ENODEV; 289 } 290 291 return 0; 292 } 293 294 static int hda_dsp_stream_reset(struct snd_sof_dev *sdev, struct hdac_stream *hstream) 295 { 296 int sd_offset = SOF_STREAM_SD_OFFSET(hstream); 297 int timeout = HDA_DSP_STREAM_RESET_TIMEOUT; 298 u32 val; 299 300 /* enter stream reset */ 301 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset, SOF_STREAM_SD_OFFSET_CRST, 302 SOF_STREAM_SD_OFFSET_CRST); 303 do { 304 val = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, sd_offset); 305 if (val & SOF_STREAM_SD_OFFSET_CRST) 306 break; 307 } while (--timeout); 308 if (timeout == 0) { 309 dev_err(sdev->dev, "timeout waiting for stream reset\n"); 310 return -ETIMEDOUT; 311 } 312 313 timeout = HDA_DSP_STREAM_RESET_TIMEOUT; 314 315 /* exit stream reset and wait to read a zero before reading any other register */ 316 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset, SOF_STREAM_SD_OFFSET_CRST, 0x0); 317 318 /* wait for hardware to report that stream is out of reset */ 319 udelay(3); 320 do { 321 val = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, sd_offset); 322 if ((val & SOF_STREAM_SD_OFFSET_CRST) == 0) 323 break; 324 } while (--timeout); 325 if (timeout == 0) { 326 dev_err(sdev->dev, "timeout waiting for stream to exit reset\n"); 327 return -ETIMEDOUT; 328 } 329 330 return 0; 331 } 332 333 int hda_dsp_stream_trigger(struct snd_sof_dev *sdev, 334 struct hdac_ext_stream *hext_stream, int cmd) 335 { 336 struct hdac_stream *hstream = &hext_stream->hstream; 337 int sd_offset = SOF_STREAM_SD_OFFSET(hstream); 338 u32 dma_start = SOF_HDA_SD_CTL_DMA_START; 339 int ret = 0; 340 u32 run; 341 342 /* cmd must be for audio stream */ 343 switch (cmd) { 344 case SNDRV_PCM_TRIGGER_PAUSE_RELEASE: 345 if (!sdev->dspless_mode_selected) 346 break; 347 fallthrough; 348 case SNDRV_PCM_TRIGGER_START: 349 if (hstream->running) 350 break; 351 352 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, SOF_HDA_INTCTL, 353 1 << hstream->index, 354 1 << hstream->index); 355 356 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, 357 sd_offset, 358 SOF_HDA_SD_CTL_DMA_START | 359 SOF_HDA_CL_DMA_SD_INT_MASK, 360 SOF_HDA_SD_CTL_DMA_START | 361 SOF_HDA_CL_DMA_SD_INT_MASK); 362 363 ret = snd_sof_dsp_read_poll_timeout(sdev, 364 HDA_DSP_HDA_BAR, 365 sd_offset, run, 366 ((run & dma_start) == dma_start), 367 HDA_DSP_REG_POLL_INTERVAL_US, 368 HDA_DSP_STREAM_RUN_TIMEOUT); 369 370 if (ret >= 0) 371 hstream->running = true; 372 373 break; 374 case SNDRV_PCM_TRIGGER_PAUSE_PUSH: 375 if (!sdev->dspless_mode_selected) 376 break; 377 fallthrough; 378 case SNDRV_PCM_TRIGGER_SUSPEND: 379 case SNDRV_PCM_TRIGGER_STOP: 380 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, 381 sd_offset, 382 SOF_HDA_SD_CTL_DMA_START | 383 SOF_HDA_CL_DMA_SD_INT_MASK, 0x0); 384 385 ret = snd_sof_dsp_read_poll_timeout(sdev, HDA_DSP_HDA_BAR, 386 sd_offset, run, 387 !(run & dma_start), 388 HDA_DSP_REG_POLL_INTERVAL_US, 389 HDA_DSP_STREAM_RUN_TIMEOUT); 390 391 if (ret >= 0) { 392 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR, 393 sd_offset + SOF_HDA_ADSP_REG_SD_STS, 394 SOF_HDA_CL_DMA_SD_INT_MASK); 395 396 hstream->running = false; 397 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, 398 SOF_HDA_INTCTL, 399 1 << hstream->index, 0x0); 400 } 401 break; 402 default: 403 dev_err(sdev->dev, "error: unknown command: %d\n", cmd); 404 return -EINVAL; 405 } 406 407 if (ret < 0) { 408 char *stream_name = hda_hstream_dbg_get_stream_info_str(hstream); 409 410 dev_err(sdev->dev, 411 "%s: cmd %d on %s: timeout on STREAM_SD_OFFSET read\n", 412 __func__, cmd, stream_name ? stream_name : "unknown stream"); 413 kfree(stream_name); 414 } 415 416 return ret; 417 } 418 419 /* minimal recommended programming for ICCMAX stream */ 420 int hda_dsp_iccmax_stream_hw_params(struct snd_sof_dev *sdev, struct hdac_ext_stream *hext_stream, 421 struct snd_dma_buffer *dmab, 422 struct snd_pcm_hw_params *params) 423 { 424 struct hdac_stream *hstream = &hext_stream->hstream; 425 int sd_offset = SOF_STREAM_SD_OFFSET(hstream); 426 int ret; 427 u32 mask = 0x1 << hstream->index; 428 429 if (!hext_stream) { 430 dev_err(sdev->dev, "error: no stream available\n"); 431 return -ENODEV; 432 } 433 434 if (!dmab) { 435 dev_err(sdev->dev, "error: no dma buffer allocated!\n"); 436 return -ENODEV; 437 } 438 439 if (hstream->posbuf) 440 *hstream->posbuf = 0; 441 442 /* reset BDL address */ 443 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR, 444 sd_offset + SOF_HDA_ADSP_REG_SD_BDLPL, 445 0x0); 446 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR, 447 sd_offset + SOF_HDA_ADSP_REG_SD_BDLPU, 448 0x0); 449 450 hstream->frags = 0; 451 452 ret = hda_dsp_stream_setup_bdl(sdev, dmab, hstream); 453 if (ret < 0) { 454 dev_err(sdev->dev, "error: set up of BDL failed\n"); 455 return ret; 456 } 457 458 /* program BDL address */ 459 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR, 460 sd_offset + SOF_HDA_ADSP_REG_SD_BDLPL, 461 (u32)hstream->bdl.addr); 462 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR, 463 sd_offset + SOF_HDA_ADSP_REG_SD_BDLPU, 464 upper_32_bits(hstream->bdl.addr)); 465 466 /* program cyclic buffer length */ 467 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR, 468 sd_offset + SOF_HDA_ADSP_REG_SD_CBL, 469 hstream->bufsize); 470 471 /* program last valid index */ 472 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, 473 sd_offset + SOF_HDA_ADSP_REG_SD_LVI, 474 0xffff, (hstream->frags - 1)); 475 476 /* decouple host and link DMA, enable DSP features */ 477 snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR, SOF_HDA_REG_PP_PPCTL, 478 mask, mask); 479 480 /* Follow HW recommendation to set the guardband value to 95us during FW boot */ 481 snd_sof_dsp_update8(sdev, HDA_DSP_HDA_BAR, HDA_VS_INTEL_LTRP, 482 HDA_VS_INTEL_LTRP_GB_MASK, HDA_LTRP_GB_VALUE_US); 483 484 /* start DMA */ 485 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset, 486 SOF_HDA_SD_CTL_DMA_START, SOF_HDA_SD_CTL_DMA_START); 487 488 return 0; 489 } 490 491 /* 492 * prepare for common hdac registers settings, for both code loader 493 * and normal stream. 494 */ 495 int hda_dsp_stream_hw_params(struct snd_sof_dev *sdev, 496 struct hdac_ext_stream *hext_stream, 497 struct snd_dma_buffer *dmab, 498 struct snd_pcm_hw_params *params) 499 { 500 const struct sof_intel_dsp_desc *chip = get_chip_info(sdev->pdata); 501 struct hdac_bus *bus = sof_to_bus(sdev); 502 struct hdac_stream *hstream; 503 int sd_offset, ret; 504 u32 dma_start = SOF_HDA_SD_CTL_DMA_START; 505 u32 mask; 506 u32 run; 507 508 if (!hext_stream) { 509 dev_err(sdev->dev, "error: no stream available\n"); 510 return -ENODEV; 511 } 512 513 if (!dmab) { 514 dev_err(sdev->dev, "error: no dma buffer allocated!\n"); 515 return -ENODEV; 516 } 517 518 hstream = &hext_stream->hstream; 519 sd_offset = SOF_STREAM_SD_OFFSET(hstream); 520 mask = BIT(hstream->index); 521 522 /* decouple host and link DMA if the DSP is used */ 523 if (!sdev->dspless_mode_selected) 524 snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR, SOF_HDA_REG_PP_PPCTL, 525 mask, mask); 526 527 /* clear stream status */ 528 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset, 529 SOF_HDA_CL_DMA_SD_INT_MASK | 530 SOF_HDA_SD_CTL_DMA_START, 0); 531 532 ret = snd_sof_dsp_read_poll_timeout(sdev, HDA_DSP_HDA_BAR, 533 sd_offset, run, 534 !(run & dma_start), 535 HDA_DSP_REG_POLL_INTERVAL_US, 536 HDA_DSP_STREAM_RUN_TIMEOUT); 537 538 if (ret < 0) { 539 char *stream_name = hda_hstream_dbg_get_stream_info_str(hstream); 540 541 dev_err(sdev->dev, 542 "%s: on %s: timeout on STREAM_SD_OFFSET read1\n", 543 __func__, stream_name ? stream_name : "unknown stream"); 544 kfree(stream_name); 545 return ret; 546 } 547 548 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, 549 sd_offset + SOF_HDA_ADSP_REG_SD_STS, 550 SOF_HDA_CL_DMA_SD_INT_MASK, 551 SOF_HDA_CL_DMA_SD_INT_MASK); 552 553 /* stream reset */ 554 ret = hda_dsp_stream_reset(sdev, hstream); 555 if (ret < 0) 556 return ret; 557 558 if (hstream->posbuf) 559 *hstream->posbuf = 0; 560 561 /* reset BDL address */ 562 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR, 563 sd_offset + SOF_HDA_ADSP_REG_SD_BDLPL, 564 0x0); 565 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR, 566 sd_offset + SOF_HDA_ADSP_REG_SD_BDLPU, 567 0x0); 568 569 /* clear stream status */ 570 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset, 571 SOF_HDA_CL_DMA_SD_INT_MASK | 572 SOF_HDA_SD_CTL_DMA_START, 0); 573 574 ret = snd_sof_dsp_read_poll_timeout(sdev, HDA_DSP_HDA_BAR, 575 sd_offset, run, 576 !(run & dma_start), 577 HDA_DSP_REG_POLL_INTERVAL_US, 578 HDA_DSP_STREAM_RUN_TIMEOUT); 579 580 if (ret < 0) { 581 char *stream_name = hda_hstream_dbg_get_stream_info_str(hstream); 582 583 dev_err(sdev->dev, 584 "%s: on %s: timeout on STREAM_SD_OFFSET read1\n", 585 __func__, stream_name ? stream_name : "unknown stream"); 586 kfree(stream_name); 587 return ret; 588 } 589 590 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, 591 sd_offset + SOF_HDA_ADSP_REG_SD_STS, 592 SOF_HDA_CL_DMA_SD_INT_MASK, 593 SOF_HDA_CL_DMA_SD_INT_MASK); 594 595 hstream->frags = 0; 596 597 ret = hda_dsp_stream_setup_bdl(sdev, dmab, hstream); 598 if (ret < 0) { 599 dev_err(sdev->dev, "error: set up of BDL failed\n"); 600 return ret; 601 } 602 603 /* program stream tag to set up stream descriptor for DMA */ 604 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset, 605 SOF_HDA_CL_SD_CTL_STREAM_TAG_MASK, 606 hstream->stream_tag << 607 SOF_HDA_CL_SD_CTL_STREAM_TAG_SHIFT); 608 609 /* program cyclic buffer length */ 610 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR, 611 sd_offset + SOF_HDA_ADSP_REG_SD_CBL, 612 hstream->bufsize); 613 614 /* 615 * Recommended hardware programming sequence for HDAudio DMA format 616 * on earlier platforms - this is not needed on newer platforms 617 * 618 * 1. Put DMA into coupled mode by clearing PPCTL.PROCEN bit 619 * for corresponding stream index before the time of writing 620 * format to SDxFMT register. 621 * 2. Write SDxFMT 622 * 3. Set PPCTL.PROCEN bit for corresponding stream index to 623 * enable decoupled mode 624 */ 625 626 if (!sdev->dspless_mode_selected && (chip->quirks & SOF_INTEL_PROCEN_FMT_QUIRK)) 627 /* couple host and link DMA, disable DSP features */ 628 snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR, SOF_HDA_REG_PP_PPCTL, 629 mask, 0); 630 631 /* program stream format */ 632 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, 633 sd_offset + 634 SOF_HDA_ADSP_REG_SD_FORMAT, 635 0xffff, hstream->format_val); 636 637 if (!sdev->dspless_mode_selected && (chip->quirks & SOF_INTEL_PROCEN_FMT_QUIRK)) 638 /* decouple host and link DMA, enable DSP features */ 639 snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR, SOF_HDA_REG_PP_PPCTL, 640 mask, mask); 641 642 /* program last valid index */ 643 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, 644 sd_offset + SOF_HDA_ADSP_REG_SD_LVI, 645 0xffff, (hstream->frags - 1)); 646 647 /* program BDL address */ 648 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR, 649 sd_offset + SOF_HDA_ADSP_REG_SD_BDLPL, 650 (u32)hstream->bdl.addr); 651 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR, 652 sd_offset + SOF_HDA_ADSP_REG_SD_BDLPU, 653 upper_32_bits(hstream->bdl.addr)); 654 655 /* enable position buffer, if needed */ 656 if (bus->use_posbuf && bus->posbuf.addr && 657 !(snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, SOF_HDA_ADSP_DPLBASE) 658 & SOF_HDA_ADSP_DPLBASE_ENABLE)) { 659 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR, SOF_HDA_ADSP_DPUBASE, 660 upper_32_bits(bus->posbuf.addr)); 661 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR, SOF_HDA_ADSP_DPLBASE, 662 (u32)bus->posbuf.addr | 663 SOF_HDA_ADSP_DPLBASE_ENABLE); 664 } 665 666 /* set interrupt enable bits */ 667 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset, 668 SOF_HDA_CL_DMA_SD_INT_MASK, 669 SOF_HDA_CL_DMA_SD_INT_MASK); 670 671 /* read FIFO size */ 672 if (hstream->direction == SNDRV_PCM_STREAM_PLAYBACK) { 673 hstream->fifo_size = 674 snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, 675 sd_offset + 676 SOF_HDA_ADSP_REG_SD_FIFOSIZE); 677 hstream->fifo_size &= SOF_HDA_SD_FIFOSIZE_FIFOS_MASK; 678 hstream->fifo_size += 1; 679 } else { 680 hstream->fifo_size = 0; 681 } 682 683 return ret; 684 } 685 686 int hda_dsp_stream_hw_free(struct snd_sof_dev *sdev, 687 struct snd_pcm_substream *substream) 688 { 689 struct hdac_stream *hstream = substream->runtime->private_data; 690 struct hdac_ext_stream *hext_stream = container_of(hstream, 691 struct hdac_ext_stream, 692 hstream); 693 int ret; 694 695 ret = hda_dsp_stream_reset(sdev, hstream); 696 if (ret < 0) 697 return ret; 698 699 if (!sdev->dspless_mode_selected) { 700 struct hdac_bus *bus = sof_to_bus(sdev); 701 u32 mask = BIT(hstream->index); 702 703 spin_lock_irq(&bus->reg_lock); 704 /* couple host and link DMA if link DMA channel is idle */ 705 if (!hext_stream->link_locked) 706 snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR, 707 SOF_HDA_REG_PP_PPCTL, mask, 0); 708 spin_unlock_irq(&bus->reg_lock); 709 } 710 711 hda_dsp_stream_spib_config(sdev, hext_stream, HDA_DSP_SPIB_DISABLE, 0); 712 713 hstream->substream = NULL; 714 715 return 0; 716 } 717 EXPORT_SYMBOL_NS(hda_dsp_stream_hw_free, SND_SOC_SOF_INTEL_HDA_COMMON); 718 719 bool hda_dsp_check_stream_irq(struct snd_sof_dev *sdev) 720 { 721 struct hdac_bus *bus = sof_to_bus(sdev); 722 bool ret = false; 723 u32 status; 724 725 /* The function can be called at irq thread, so use spin_lock_irq */ 726 spin_lock_irq(&bus->reg_lock); 727 728 status = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, SOF_HDA_INTSTS); 729 730 trace_sof_intel_hda_dsp_check_stream_irq(sdev, status); 731 732 /* if Register inaccessible, ignore it.*/ 733 if (status != 0xffffffff) 734 ret = true; 735 736 spin_unlock_irq(&bus->reg_lock); 737 738 return ret; 739 } 740 EXPORT_SYMBOL_NS(hda_dsp_check_stream_irq, SND_SOC_SOF_INTEL_HDA_COMMON); 741 742 static void 743 hda_dsp_compr_bytes_transferred(struct hdac_stream *hstream, int direction) 744 { 745 u64 buffer_size = hstream->bufsize; 746 u64 prev_pos, pos, num_bytes; 747 748 div64_u64_rem(hstream->curr_pos, buffer_size, &prev_pos); 749 pos = hda_dsp_stream_get_position(hstream, direction, false); 750 751 if (pos < prev_pos) 752 num_bytes = (buffer_size - prev_pos) + pos; 753 else 754 num_bytes = pos - prev_pos; 755 756 hstream->curr_pos += num_bytes; 757 } 758 759 static bool hda_dsp_stream_check(struct hdac_bus *bus, u32 status) 760 { 761 struct sof_intel_hda_dev *sof_hda = bus_to_sof_hda(bus); 762 struct hdac_stream *s; 763 bool active = false; 764 u32 sd_status; 765 766 list_for_each_entry(s, &bus->stream_list, list) { 767 if (status & BIT(s->index) && s->opened) { 768 sd_status = readb(s->sd_addr + SOF_HDA_ADSP_REG_SD_STS); 769 770 trace_sof_intel_hda_dsp_stream_status(bus->dev, s, sd_status); 771 772 writeb(sd_status, s->sd_addr + SOF_HDA_ADSP_REG_SD_STS); 773 774 active = true; 775 if (!s->running) 776 continue; 777 if ((sd_status & SOF_HDA_CL_DMA_SD_INT_COMPLETE) == 0) 778 continue; 779 if (!s->substream && !s->cstream) { 780 /* 781 * when no substream is found, the DMA may used for code loading 782 * or data transfers which can rely on wait_for_completion() 783 */ 784 struct sof_intel_hda_stream *hda_stream; 785 struct hdac_ext_stream *hext_stream; 786 787 hext_stream = stream_to_hdac_ext_stream(s); 788 hda_stream = container_of(hext_stream, struct sof_intel_hda_stream, 789 hext_stream); 790 791 complete(&hda_stream->ioc); 792 continue; 793 } 794 795 /* Inform ALSA only if the IPC position is not used */ 796 if (s->substream && sof_hda->no_ipc_position) { 797 snd_sof_pcm_period_elapsed(s->substream); 798 } else if (s->cstream) { 799 hda_dsp_compr_bytes_transferred(s, s->cstream->direction); 800 snd_compr_fragment_elapsed(s->cstream); 801 } 802 } 803 } 804 805 return active; 806 } 807 808 irqreturn_t hda_dsp_stream_threaded_handler(int irq, void *context) 809 { 810 struct snd_sof_dev *sdev = context; 811 struct hdac_bus *bus = sof_to_bus(sdev); 812 bool active; 813 u32 status; 814 int i; 815 816 /* 817 * Loop 10 times to handle missed interrupts caused by 818 * unsolicited responses from the codec 819 */ 820 for (i = 0, active = true; i < 10 && active; i++) { 821 spin_lock_irq(&bus->reg_lock); 822 823 status = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, SOF_HDA_INTSTS); 824 825 /* check streams */ 826 active = hda_dsp_stream_check(bus, status); 827 828 /* check and clear RIRB interrupt */ 829 if (status & AZX_INT_CTRL_EN) { 830 active |= hda_codec_check_rirb_status(sdev); 831 } 832 spin_unlock_irq(&bus->reg_lock); 833 } 834 835 return IRQ_HANDLED; 836 } 837 EXPORT_SYMBOL_NS(hda_dsp_stream_threaded_handler, SND_SOC_SOF_INTEL_HDA_COMMON); 838 839 int hda_dsp_stream_init(struct snd_sof_dev *sdev) 840 { 841 struct hdac_bus *bus = sof_to_bus(sdev); 842 struct hdac_ext_stream *hext_stream; 843 struct hdac_stream *hstream; 844 struct pci_dev *pci = to_pci_dev(sdev->dev); 845 struct sof_intel_hda_dev *sof_hda = bus_to_sof_hda(bus); 846 int sd_offset; 847 int i, num_playback, num_capture, num_total, ret; 848 u32 gcap; 849 850 gcap = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, SOF_HDA_GCAP); 851 dev_dbg(sdev->dev, "hda global caps = 0x%x\n", gcap); 852 853 /* get stream count from GCAP */ 854 num_capture = (gcap >> 8) & 0x0f; 855 num_playback = (gcap >> 12) & 0x0f; 856 num_total = num_playback + num_capture; 857 858 dev_dbg(sdev->dev, "detected %d playback and %d capture streams\n", 859 num_playback, num_capture); 860 861 if (num_playback >= SOF_HDA_PLAYBACK_STREAMS) { 862 dev_err(sdev->dev, "error: too many playback streams %d\n", 863 num_playback); 864 return -EINVAL; 865 } 866 867 if (num_capture >= SOF_HDA_CAPTURE_STREAMS) { 868 dev_err(sdev->dev, "error: too many capture streams %d\n", 869 num_playback); 870 return -EINVAL; 871 } 872 873 /* 874 * mem alloc for the position buffer 875 * TODO: check position buffer update 876 */ 877 ret = snd_dma_alloc_pages(SNDRV_DMA_TYPE_DEV, &pci->dev, 878 SOF_HDA_DPIB_ENTRY_SIZE * num_total, 879 &bus->posbuf); 880 if (ret < 0) { 881 dev_err(sdev->dev, "error: posbuffer dma alloc failed\n"); 882 return -ENOMEM; 883 } 884 885 /* 886 * mem alloc for the CORB/RIRB ringbuffers - this will be used only for 887 * HDAudio codecs 888 */ 889 ret = snd_dma_alloc_pages(SNDRV_DMA_TYPE_DEV, &pci->dev, 890 PAGE_SIZE, &bus->rb); 891 if (ret < 0) { 892 dev_err(sdev->dev, "error: RB alloc failed\n"); 893 return -ENOMEM; 894 } 895 896 /* create capture and playback streams */ 897 for (i = 0; i < num_total; i++) { 898 struct sof_intel_hda_stream *hda_stream; 899 900 hda_stream = devm_kzalloc(sdev->dev, sizeof(*hda_stream), 901 GFP_KERNEL); 902 if (!hda_stream) 903 return -ENOMEM; 904 905 hda_stream->sdev = sdev; 906 init_completion(&hda_stream->ioc); 907 908 hext_stream = &hda_stream->hext_stream; 909 910 if (sdev->bar[HDA_DSP_PP_BAR]) { 911 hext_stream->pphc_addr = sdev->bar[HDA_DSP_PP_BAR] + 912 SOF_HDA_PPHC_BASE + SOF_HDA_PPHC_INTERVAL * i; 913 914 hext_stream->pplc_addr = sdev->bar[HDA_DSP_PP_BAR] + 915 SOF_HDA_PPLC_BASE + SOF_HDA_PPLC_MULTI * num_total + 916 SOF_HDA_PPLC_INTERVAL * i; 917 } 918 919 hstream = &hext_stream->hstream; 920 921 /* do we support SPIB */ 922 if (sdev->bar[HDA_DSP_SPIB_BAR]) { 923 hstream->spib_addr = sdev->bar[HDA_DSP_SPIB_BAR] + 924 SOF_HDA_SPIB_BASE + SOF_HDA_SPIB_INTERVAL * i + 925 SOF_HDA_SPIB_SPIB; 926 927 hstream->fifo_addr = sdev->bar[HDA_DSP_SPIB_BAR] + 928 SOF_HDA_SPIB_BASE + SOF_HDA_SPIB_INTERVAL * i + 929 SOF_HDA_SPIB_MAXFIFO; 930 } 931 932 hstream->bus = bus; 933 hstream->sd_int_sta_mask = 1 << i; 934 hstream->index = i; 935 sd_offset = SOF_STREAM_SD_OFFSET(hstream); 936 hstream->sd_addr = sdev->bar[HDA_DSP_HDA_BAR] + sd_offset; 937 hstream->opened = false; 938 hstream->running = false; 939 940 if (i < num_capture) { 941 hstream->stream_tag = i + 1; 942 hstream->direction = SNDRV_PCM_STREAM_CAPTURE; 943 } else { 944 hstream->stream_tag = i - num_capture + 1; 945 hstream->direction = SNDRV_PCM_STREAM_PLAYBACK; 946 } 947 948 /* mem alloc for stream BDL */ 949 ret = snd_dma_alloc_pages(SNDRV_DMA_TYPE_DEV, &pci->dev, 950 HDA_DSP_BDL_SIZE, &hstream->bdl); 951 if (ret < 0) { 952 dev_err(sdev->dev, "error: stream bdl dma alloc failed\n"); 953 return -ENOMEM; 954 } 955 956 hstream->posbuf = (__le32 *)(bus->posbuf.area + 957 (hstream->index) * 8); 958 959 list_add_tail(&hstream->list, &bus->stream_list); 960 } 961 962 /* store total stream count (playback + capture) from GCAP */ 963 sof_hda->stream_max = num_total; 964 965 /* store stream count from GCAP required for CHAIN_DMA */ 966 if (sdev->pdata->ipc_type == SOF_IPC_TYPE_4) { 967 struct sof_ipc4_fw_data *ipc4_data = sdev->private; 968 969 ipc4_data->num_playback_streams = num_playback; 970 ipc4_data->num_capture_streams = num_capture; 971 } 972 973 return 0; 974 } 975 EXPORT_SYMBOL_NS(hda_dsp_stream_init, SND_SOC_SOF_INTEL_HDA_COMMON); 976 977 void hda_dsp_stream_free(struct snd_sof_dev *sdev) 978 { 979 struct hdac_bus *bus = sof_to_bus(sdev); 980 struct hdac_stream *s, *_s; 981 struct hdac_ext_stream *hext_stream; 982 struct sof_intel_hda_stream *hda_stream; 983 984 /* free position buffer */ 985 if (bus->posbuf.area) 986 snd_dma_free_pages(&bus->posbuf); 987 988 /* free CORB/RIRB buffer - only used for HDaudio codecs */ 989 if (bus->rb.area) 990 snd_dma_free_pages(&bus->rb); 991 992 list_for_each_entry_safe(s, _s, &bus->stream_list, list) { 993 /* TODO: decouple */ 994 995 /* free bdl buffer */ 996 if (s->bdl.area) 997 snd_dma_free_pages(&s->bdl); 998 list_del(&s->list); 999 hext_stream = stream_to_hdac_ext_stream(s); 1000 hda_stream = container_of(hext_stream, struct sof_intel_hda_stream, 1001 hext_stream); 1002 devm_kfree(sdev->dev, hda_stream); 1003 } 1004 } 1005 EXPORT_SYMBOL_NS(hda_dsp_stream_free, SND_SOC_SOF_INTEL_HDA_COMMON); 1006 1007 snd_pcm_uframes_t hda_dsp_stream_get_position(struct hdac_stream *hstream, 1008 int direction, bool can_sleep) 1009 { 1010 struct hdac_ext_stream *hext_stream = stream_to_hdac_ext_stream(hstream); 1011 struct sof_intel_hda_stream *hda_stream = hstream_to_sof_hda_stream(hext_stream); 1012 struct snd_sof_dev *sdev = hda_stream->sdev; 1013 snd_pcm_uframes_t pos; 1014 1015 switch (sof_hda_position_quirk) { 1016 case SOF_HDA_POSITION_QUIRK_USE_SKYLAKE_LEGACY: 1017 /* 1018 * This legacy code, inherited from the Skylake driver, 1019 * mixes DPIB registers and DPIB DDR updates and 1020 * does not seem to follow any known hardware recommendations. 1021 * It's not clear e.g. why there is a different flow 1022 * for capture and playback, the only information that matters is 1023 * what traffic class is used, and on all SOF-enabled platforms 1024 * only VC0 is supported so the work-around was likely not necessary 1025 * and quite possibly wrong. 1026 */ 1027 1028 /* DPIB/posbuf position mode: 1029 * For Playback, Use DPIB register from HDA space which 1030 * reflects the actual data transferred. 1031 * For Capture, Use the position buffer for pointer, as DPIB 1032 * is not accurate enough, its update may be completed 1033 * earlier than the data written to DDR. 1034 */ 1035 if (direction == SNDRV_PCM_STREAM_PLAYBACK) { 1036 pos = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, 1037 AZX_REG_VS_SDXDPIB_XBASE + 1038 (AZX_REG_VS_SDXDPIB_XINTERVAL * 1039 hstream->index)); 1040 } else { 1041 /* 1042 * For capture stream, we need more workaround to fix the 1043 * position incorrect issue: 1044 * 1045 * 1. Wait at least 20us before reading position buffer after 1046 * the interrupt generated(IOC), to make sure position update 1047 * happens on frame boundary i.e. 20.833uSec for 48KHz. 1048 * 2. Perform a dummy Read to DPIB register to flush DMA 1049 * position value. 1050 * 3. Read the DMA Position from posbuf. Now the readback 1051 * value should be >= period boundary. 1052 */ 1053 if (can_sleep) 1054 usleep_range(20, 21); 1055 1056 snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, 1057 AZX_REG_VS_SDXDPIB_XBASE + 1058 (AZX_REG_VS_SDXDPIB_XINTERVAL * 1059 hstream->index)); 1060 pos = snd_hdac_stream_get_pos_posbuf(hstream); 1061 } 1062 break; 1063 case SOF_HDA_POSITION_QUIRK_USE_DPIB_REGISTERS: 1064 /* 1065 * In case VC1 traffic is disabled this is the recommended option 1066 */ 1067 pos = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, 1068 AZX_REG_VS_SDXDPIB_XBASE + 1069 (AZX_REG_VS_SDXDPIB_XINTERVAL * 1070 hstream->index)); 1071 break; 1072 case SOF_HDA_POSITION_QUIRK_USE_DPIB_DDR_UPDATE: 1073 /* 1074 * This is the recommended option when VC1 is enabled. 1075 * While this isn't needed for SOF platforms it's added for 1076 * consistency and debug. 1077 */ 1078 pos = snd_hdac_stream_get_pos_posbuf(hstream); 1079 break; 1080 default: 1081 dev_err_once(sdev->dev, "hda_position_quirk value %d not supported\n", 1082 sof_hda_position_quirk); 1083 pos = 0; 1084 break; 1085 } 1086 1087 if (pos >= hstream->bufsize) 1088 pos = 0; 1089 1090 return pos; 1091 } 1092 EXPORT_SYMBOL_NS(hda_dsp_stream_get_position, SND_SOC_SOF_INTEL_HDA_COMMON); 1093 1094 #define merge_u64(u32_u, u32_l) (((u64)(u32_u) << 32) | (u32_l)) 1095 1096 /** 1097 * hda_dsp_get_stream_llp - Retrieve the LLP (Linear Link Position) of the stream 1098 * @sdev: SOF device 1099 * @component: ASoC component 1100 * @substream: PCM substream 1101 * 1102 * Returns the raw Linear Link Position value 1103 */ 1104 u64 hda_dsp_get_stream_llp(struct snd_sof_dev *sdev, 1105 struct snd_soc_component *component, 1106 struct snd_pcm_substream *substream) 1107 { 1108 struct hdac_stream *hstream = substream->runtime->private_data; 1109 struct hdac_ext_stream *hext_stream = stream_to_hdac_ext_stream(hstream); 1110 u32 llp_l, llp_u; 1111 1112 /* 1113 * The pplc_addr have been calculated during probe in 1114 * hda_dsp_stream_init(): 1115 * pplc_addr = sdev->bar[HDA_DSP_PP_BAR] + 1116 * SOF_HDA_PPLC_BASE + 1117 * SOF_HDA_PPLC_MULTI * total_stream + 1118 * SOF_HDA_PPLC_INTERVAL * stream_index 1119 * 1120 * Use this pre-calculated address to avoid repeated re-calculation. 1121 */ 1122 llp_l = readl(hext_stream->pplc_addr + AZX_REG_PPLCLLPL); 1123 llp_u = readl(hext_stream->pplc_addr + AZX_REG_PPLCLLPU); 1124 1125 /* Compensate the LLP counter with the saved offset */ 1126 if (hext_stream->pplcllpl || hext_stream->pplcllpu) 1127 return merge_u64(llp_u, llp_l) - 1128 merge_u64(hext_stream->pplcllpu, hext_stream->pplcllpl); 1129 1130 return merge_u64(llp_u, llp_l); 1131 } 1132 EXPORT_SYMBOL_NS(hda_dsp_get_stream_llp, SND_SOC_SOF_INTEL_HDA_COMMON); 1133 1134 /** 1135 * hda_dsp_get_stream_ldp - Retrieve the LDP (Linear DMA Position) of the stream 1136 * @sdev: SOF device 1137 * @component: ASoC component 1138 * @substream: PCM substream 1139 * 1140 * Returns the raw Linear Link Position value 1141 */ 1142 u64 hda_dsp_get_stream_ldp(struct snd_sof_dev *sdev, 1143 struct snd_soc_component *component, 1144 struct snd_pcm_substream *substream) 1145 { 1146 struct hdac_stream *hstream = substream->runtime->private_data; 1147 struct hdac_ext_stream *hext_stream = stream_to_hdac_ext_stream(hstream); 1148 u32 ldp_l, ldp_u; 1149 1150 /* 1151 * The pphc_addr have been calculated during probe in 1152 * hda_dsp_stream_init(): 1153 * pphc_addr = sdev->bar[HDA_DSP_PP_BAR] + 1154 * SOF_HDA_PPHC_BASE + 1155 * SOF_HDA_PPHC_INTERVAL * stream_index 1156 * 1157 * Use this pre-calculated address to avoid repeated re-calculation. 1158 */ 1159 ldp_l = readl(hext_stream->pphc_addr + AZX_REG_PPHCLDPL); 1160 ldp_u = readl(hext_stream->pphc_addr + AZX_REG_PPHCLDPU); 1161 1162 return ((u64)ldp_u << 32) | ldp_l; 1163 } 1164 EXPORT_SYMBOL_NS(hda_dsp_get_stream_ldp, SND_SOC_SOF_INTEL_HDA_COMMON); 1165