1 // SPDX-License-Identifier: (GPL-2.0-only OR BSD-3-Clause) 2 // 3 // This file is provided under a dual BSD/GPLv2 license. When using or 4 // redistributing this file, you may do so under either license. 5 // 6 // Copyright(c) 2018 Intel Corporation 7 // 8 // Authors: Liam Girdwood <liam.r.girdwood@linux.intel.com> 9 // Ranjani Sridharan <ranjani.sridharan@linux.intel.com> 10 // Rander Wang <rander.wang@intel.com> 11 // Keyon Jie <yang.jie@linux.intel.com> 12 // 13 14 /* 15 * Hardware interface for generic Intel audio DSP HDA IP 16 */ 17 18 #include <sound/hdaudio_ext.h> 19 #include <sound/hda_register.h> 20 #include <sound/sof.h> 21 #include <trace/events/sof_intel.h> 22 #include "../ops.h" 23 #include "../sof-audio.h" 24 #include "../ipc4-priv.h" 25 #include "hda.h" 26 27 #define HDA_LTRP_GB_VALUE_US 95 28 29 static inline const char *hda_hstream_direction_str(struct hdac_stream *hstream) 30 { 31 if (hstream->direction == SNDRV_PCM_STREAM_PLAYBACK) 32 return "Playback"; 33 else 34 return "Capture"; 35 } 36 37 static char *hda_hstream_dbg_get_stream_info_str(struct hdac_stream *hstream) 38 { 39 struct snd_soc_pcm_runtime *rtd; 40 41 if (hstream->substream) 42 rtd = snd_soc_substream_to_rtd(hstream->substream); 43 else if (hstream->cstream) 44 rtd = hstream->cstream->private_data; 45 else 46 /* Non audio DMA user, like dma-trace */ 47 return kasprintf(GFP_KERNEL, "-- (%s, stream_tag: %u)", 48 hda_hstream_direction_str(hstream), 49 hstream->stream_tag); 50 51 return kasprintf(GFP_KERNEL, "dai_link \"%s\" (%s, stream_tag: %u)", 52 rtd->dai_link->name, hda_hstream_direction_str(hstream), 53 hstream->stream_tag); 54 } 55 56 /* 57 * set up one of BDL entries for a stream 58 */ 59 static int hda_setup_bdle(struct snd_sof_dev *sdev, 60 struct snd_dma_buffer *dmab, 61 struct hdac_stream *hstream, 62 struct sof_intel_dsp_bdl **bdlp, 63 int offset, int size, int ioc) 64 { 65 struct hdac_bus *bus = sof_to_bus(sdev); 66 struct sof_intel_dsp_bdl *bdl = *bdlp; 67 68 while (size > 0) { 69 dma_addr_t addr; 70 int chunk; 71 72 if (hstream->frags >= HDA_DSP_MAX_BDL_ENTRIES) { 73 dev_err(sdev->dev, "error: stream frags exceeded\n"); 74 return -EINVAL; 75 } 76 77 addr = snd_sgbuf_get_addr(dmab, offset); 78 /* program BDL addr */ 79 bdl->addr_l = cpu_to_le32(lower_32_bits(addr)); 80 bdl->addr_h = cpu_to_le32(upper_32_bits(addr)); 81 /* program BDL size */ 82 chunk = snd_sgbuf_get_chunk_size(dmab, offset, size); 83 /* one BDLE should not cross 4K boundary */ 84 if (bus->align_bdle_4k) { 85 u32 remain = 0x1000 - (offset & 0xfff); 86 87 if (chunk > remain) 88 chunk = remain; 89 } 90 bdl->size = cpu_to_le32(chunk); 91 /* only program IOC when the whole segment is processed */ 92 size -= chunk; 93 bdl->ioc = (size || !ioc) ? 0 : cpu_to_le32(0x01); 94 bdl++; 95 hstream->frags++; 96 offset += chunk; 97 } 98 99 *bdlp = bdl; 100 return offset; 101 } 102 103 /* 104 * set up Buffer Descriptor List (BDL) for host memory transfer 105 * BDL describes the location of the individual buffers and is little endian. 106 */ 107 int hda_dsp_stream_setup_bdl(struct snd_sof_dev *sdev, 108 struct snd_dma_buffer *dmab, 109 struct hdac_stream *hstream) 110 { 111 struct sof_intel_hda_dev *hda = sdev->pdata->hw_pdata; 112 struct sof_intel_dsp_bdl *bdl; 113 int i, offset, period_bytes, periods; 114 int remain, ioc; 115 116 period_bytes = hstream->period_bytes; 117 dev_dbg(sdev->dev, "period_bytes:0x%x\n", period_bytes); 118 if (!period_bytes) 119 period_bytes = hstream->bufsize; 120 121 periods = hstream->bufsize / period_bytes; 122 123 dev_dbg(sdev->dev, "periods:%d\n", periods); 124 125 remain = hstream->bufsize % period_bytes; 126 if (remain) 127 periods++; 128 129 /* program the initial BDL entries */ 130 bdl = (struct sof_intel_dsp_bdl *)hstream->bdl.area; 131 offset = 0; 132 hstream->frags = 0; 133 134 /* 135 * set IOC if don't use position IPC 136 * and period_wakeup needed. 137 */ 138 ioc = hda->no_ipc_position ? 139 !hstream->no_period_wakeup : 0; 140 141 for (i = 0; i < periods; i++) { 142 if (i == (periods - 1) && remain) 143 /* set the last small entry */ 144 offset = hda_setup_bdle(sdev, dmab, 145 hstream, &bdl, offset, 146 remain, 0); 147 else 148 offset = hda_setup_bdle(sdev, dmab, 149 hstream, &bdl, offset, 150 period_bytes, ioc); 151 } 152 153 return offset; 154 } 155 156 int hda_dsp_stream_spib_config(struct snd_sof_dev *sdev, 157 struct hdac_ext_stream *hext_stream, 158 int enable, u32 size) 159 { 160 struct hdac_stream *hstream = &hext_stream->hstream; 161 u32 mask; 162 163 if (!sdev->bar[HDA_DSP_SPIB_BAR]) { 164 dev_err(sdev->dev, "error: address of spib capability is NULL\n"); 165 return -EINVAL; 166 } 167 168 mask = (1 << hstream->index); 169 170 /* enable/disable SPIB for the stream */ 171 snd_sof_dsp_update_bits(sdev, HDA_DSP_SPIB_BAR, 172 SOF_HDA_ADSP_REG_CL_SPBFIFO_SPBFCCTL, mask, 173 enable << hstream->index); 174 175 /* set the SPIB value */ 176 sof_io_write(sdev, hstream->spib_addr, size); 177 178 return 0; 179 } 180 181 /* get next unused stream */ 182 struct hdac_ext_stream * 183 hda_dsp_stream_get(struct snd_sof_dev *sdev, int direction, u32 flags) 184 { 185 const struct sof_intel_dsp_desc *chip_info = get_chip_info(sdev->pdata); 186 struct sof_intel_hda_dev *hda = sdev->pdata->hw_pdata; 187 struct hdac_bus *bus = sof_to_bus(sdev); 188 struct sof_intel_hda_stream *hda_stream; 189 struct hdac_ext_stream *hext_stream = NULL; 190 struct hdac_stream *s; 191 192 spin_lock_irq(&bus->reg_lock); 193 194 /* get an unused stream */ 195 list_for_each_entry(s, &bus->stream_list, list) { 196 if (s->direction == direction && !s->opened) { 197 hext_stream = stream_to_hdac_ext_stream(s); 198 hda_stream = container_of(hext_stream, 199 struct sof_intel_hda_stream, 200 hext_stream); 201 /* check if the host DMA channel is reserved */ 202 if (hda_stream->host_reserved) 203 continue; 204 205 s->opened = true; 206 break; 207 } 208 } 209 210 spin_unlock_irq(&bus->reg_lock); 211 212 /* stream found ? */ 213 if (!hext_stream) { 214 dev_err(sdev->dev, "error: no free %s streams\n", 215 direction == SNDRV_PCM_STREAM_PLAYBACK ? 216 "playback" : "capture"); 217 return hext_stream; 218 } 219 220 hda_stream->flags = flags; 221 222 /* 223 * Prevent DMI Link L1 entry for streams that don't support it. 224 * Workaround to address a known issue with host DMA that results 225 * in xruns during pause/release in capture scenarios. This is not needed for the ACE IP. 226 */ 227 if (chip_info->hw_ip_version < SOF_INTEL_ACE_1_0 && 228 !(flags & SOF_HDA_STREAM_DMI_L1_COMPATIBLE)) { 229 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, 230 HDA_VS_INTEL_EM2, 231 HDA_VS_INTEL_EM2_L1SEN, 0); 232 hda->l1_disabled = true; 233 } 234 235 return hext_stream; 236 } 237 238 /* free a stream */ 239 int hda_dsp_stream_put(struct snd_sof_dev *sdev, int direction, int stream_tag) 240 { 241 const struct sof_intel_dsp_desc *chip_info = get_chip_info(sdev->pdata); 242 struct sof_intel_hda_dev *hda = sdev->pdata->hw_pdata; 243 struct hdac_bus *bus = sof_to_bus(sdev); 244 struct sof_intel_hda_stream *hda_stream; 245 struct hdac_ext_stream *hext_stream; 246 struct hdac_stream *s; 247 bool dmi_l1_enable = true; 248 bool found = false; 249 250 spin_lock_irq(&bus->reg_lock); 251 252 /* 253 * close stream matching the stream tag and check if there are any open streams 254 * that are DMI L1 incompatible. 255 */ 256 list_for_each_entry(s, &bus->stream_list, list) { 257 hext_stream = stream_to_hdac_ext_stream(s); 258 hda_stream = container_of(hext_stream, struct sof_intel_hda_stream, hext_stream); 259 260 if (!s->opened) 261 continue; 262 263 if (s->direction == direction && s->stream_tag == stream_tag) { 264 s->opened = false; 265 found = true; 266 } else if (!(hda_stream->flags & SOF_HDA_STREAM_DMI_L1_COMPATIBLE)) { 267 dmi_l1_enable = false; 268 } 269 } 270 271 spin_unlock_irq(&bus->reg_lock); 272 273 /* Enable DMI L1 if permitted */ 274 if (chip_info->hw_ip_version < SOF_INTEL_ACE_1_0 && dmi_l1_enable) { 275 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, HDA_VS_INTEL_EM2, 276 HDA_VS_INTEL_EM2_L1SEN, HDA_VS_INTEL_EM2_L1SEN); 277 hda->l1_disabled = false; 278 } 279 280 if (!found) { 281 dev_err(sdev->dev, "%s: stream_tag %d not opened!\n", 282 __func__, stream_tag); 283 return -ENODEV; 284 } 285 286 return 0; 287 } 288 289 static int hda_dsp_stream_reset(struct snd_sof_dev *sdev, struct hdac_stream *hstream) 290 { 291 int sd_offset = SOF_STREAM_SD_OFFSET(hstream); 292 int timeout = HDA_DSP_STREAM_RESET_TIMEOUT; 293 u32 val; 294 295 /* enter stream reset */ 296 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset, SOF_STREAM_SD_OFFSET_CRST, 297 SOF_STREAM_SD_OFFSET_CRST); 298 do { 299 val = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, sd_offset); 300 if (val & SOF_STREAM_SD_OFFSET_CRST) 301 break; 302 } while (--timeout); 303 if (timeout == 0) { 304 dev_err(sdev->dev, "timeout waiting for stream reset\n"); 305 return -ETIMEDOUT; 306 } 307 308 timeout = HDA_DSP_STREAM_RESET_TIMEOUT; 309 310 /* exit stream reset and wait to read a zero before reading any other register */ 311 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset, SOF_STREAM_SD_OFFSET_CRST, 0x0); 312 313 /* wait for hardware to report that stream is out of reset */ 314 udelay(3); 315 do { 316 val = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, sd_offset); 317 if ((val & SOF_STREAM_SD_OFFSET_CRST) == 0) 318 break; 319 } while (--timeout); 320 if (timeout == 0) { 321 dev_err(sdev->dev, "timeout waiting for stream to exit reset\n"); 322 return -ETIMEDOUT; 323 } 324 325 return 0; 326 } 327 328 int hda_dsp_stream_trigger(struct snd_sof_dev *sdev, 329 struct hdac_ext_stream *hext_stream, int cmd) 330 { 331 struct hdac_stream *hstream = &hext_stream->hstream; 332 int sd_offset = SOF_STREAM_SD_OFFSET(hstream); 333 u32 dma_start = SOF_HDA_SD_CTL_DMA_START; 334 int ret = 0; 335 u32 run; 336 337 /* cmd must be for audio stream */ 338 switch (cmd) { 339 case SNDRV_PCM_TRIGGER_PAUSE_RELEASE: 340 if (!sdev->dspless_mode_selected) 341 break; 342 fallthrough; 343 case SNDRV_PCM_TRIGGER_START: 344 if (hstream->running) 345 break; 346 347 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, SOF_HDA_INTCTL, 348 1 << hstream->index, 349 1 << hstream->index); 350 351 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, 352 sd_offset, 353 SOF_HDA_SD_CTL_DMA_START | 354 SOF_HDA_CL_DMA_SD_INT_MASK, 355 SOF_HDA_SD_CTL_DMA_START | 356 SOF_HDA_CL_DMA_SD_INT_MASK); 357 358 ret = snd_sof_dsp_read_poll_timeout(sdev, 359 HDA_DSP_HDA_BAR, 360 sd_offset, run, 361 ((run & dma_start) == dma_start), 362 HDA_DSP_REG_POLL_INTERVAL_US, 363 HDA_DSP_STREAM_RUN_TIMEOUT); 364 365 if (ret >= 0) 366 hstream->running = true; 367 368 break; 369 case SNDRV_PCM_TRIGGER_PAUSE_PUSH: 370 if (!sdev->dspless_mode_selected) 371 break; 372 fallthrough; 373 case SNDRV_PCM_TRIGGER_SUSPEND: 374 case SNDRV_PCM_TRIGGER_STOP: 375 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, 376 sd_offset, 377 SOF_HDA_SD_CTL_DMA_START | 378 SOF_HDA_CL_DMA_SD_INT_MASK, 0x0); 379 380 ret = snd_sof_dsp_read_poll_timeout(sdev, HDA_DSP_HDA_BAR, 381 sd_offset, run, 382 !(run & dma_start), 383 HDA_DSP_REG_POLL_INTERVAL_US, 384 HDA_DSP_STREAM_RUN_TIMEOUT); 385 386 if (ret >= 0) { 387 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR, 388 sd_offset + SOF_HDA_ADSP_REG_SD_STS, 389 SOF_HDA_CL_DMA_SD_INT_MASK); 390 391 hstream->running = false; 392 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, 393 SOF_HDA_INTCTL, 394 1 << hstream->index, 0x0); 395 } 396 break; 397 default: 398 dev_err(sdev->dev, "error: unknown command: %d\n", cmd); 399 return -EINVAL; 400 } 401 402 if (ret < 0) { 403 char *stream_name = hda_hstream_dbg_get_stream_info_str(hstream); 404 405 dev_err(sdev->dev, 406 "%s: cmd %d on %s: timeout on STREAM_SD_OFFSET read\n", 407 __func__, cmd, stream_name ? stream_name : "unknown stream"); 408 kfree(stream_name); 409 } 410 411 return ret; 412 } 413 414 /* minimal recommended programming for ICCMAX stream */ 415 int hda_dsp_iccmax_stream_hw_params(struct snd_sof_dev *sdev, struct hdac_ext_stream *hext_stream, 416 struct snd_dma_buffer *dmab, 417 struct snd_pcm_hw_params *params) 418 { 419 struct hdac_stream *hstream = &hext_stream->hstream; 420 int sd_offset = SOF_STREAM_SD_OFFSET(hstream); 421 int ret; 422 u32 mask = 0x1 << hstream->index; 423 424 if (!hext_stream) { 425 dev_err(sdev->dev, "error: no stream available\n"); 426 return -ENODEV; 427 } 428 429 if (!dmab) { 430 dev_err(sdev->dev, "error: no dma buffer allocated!\n"); 431 return -ENODEV; 432 } 433 434 if (hstream->posbuf) 435 *hstream->posbuf = 0; 436 437 /* reset BDL address */ 438 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR, 439 sd_offset + SOF_HDA_ADSP_REG_SD_BDLPL, 440 0x0); 441 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR, 442 sd_offset + SOF_HDA_ADSP_REG_SD_BDLPU, 443 0x0); 444 445 hstream->frags = 0; 446 447 ret = hda_dsp_stream_setup_bdl(sdev, dmab, hstream); 448 if (ret < 0) { 449 dev_err(sdev->dev, "error: set up of BDL failed\n"); 450 return ret; 451 } 452 453 /* program BDL address */ 454 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR, 455 sd_offset + SOF_HDA_ADSP_REG_SD_BDLPL, 456 (u32)hstream->bdl.addr); 457 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR, 458 sd_offset + SOF_HDA_ADSP_REG_SD_BDLPU, 459 upper_32_bits(hstream->bdl.addr)); 460 461 /* program cyclic buffer length */ 462 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR, 463 sd_offset + SOF_HDA_ADSP_REG_SD_CBL, 464 hstream->bufsize); 465 466 /* program last valid index */ 467 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, 468 sd_offset + SOF_HDA_ADSP_REG_SD_LVI, 469 0xffff, (hstream->frags - 1)); 470 471 /* decouple host and link DMA, enable DSP features */ 472 snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR, SOF_HDA_REG_PP_PPCTL, 473 mask, mask); 474 475 /* Follow HW recommendation to set the guardband value to 95us during FW boot */ 476 snd_sof_dsp_update8(sdev, HDA_DSP_HDA_BAR, HDA_VS_INTEL_LTRP, 477 HDA_VS_INTEL_LTRP_GB_MASK, HDA_LTRP_GB_VALUE_US); 478 479 /* start DMA */ 480 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset, 481 SOF_HDA_SD_CTL_DMA_START, SOF_HDA_SD_CTL_DMA_START); 482 483 return 0; 484 } 485 486 /* 487 * prepare for common hdac registers settings, for both code loader 488 * and normal stream. 489 */ 490 int hda_dsp_stream_hw_params(struct snd_sof_dev *sdev, 491 struct hdac_ext_stream *hext_stream, 492 struct snd_dma_buffer *dmab, 493 struct snd_pcm_hw_params *params) 494 { 495 const struct sof_intel_dsp_desc *chip = get_chip_info(sdev->pdata); 496 struct hdac_bus *bus = sof_to_bus(sdev); 497 struct hdac_stream *hstream; 498 int sd_offset, ret; 499 u32 dma_start = SOF_HDA_SD_CTL_DMA_START; 500 u32 mask; 501 u32 run; 502 503 if (!hext_stream) { 504 dev_err(sdev->dev, "error: no stream available\n"); 505 return -ENODEV; 506 } 507 508 if (!dmab) { 509 dev_err(sdev->dev, "error: no dma buffer allocated!\n"); 510 return -ENODEV; 511 } 512 513 hstream = &hext_stream->hstream; 514 sd_offset = SOF_STREAM_SD_OFFSET(hstream); 515 mask = BIT(hstream->index); 516 517 /* decouple host and link DMA if the DSP is used */ 518 if (!sdev->dspless_mode_selected) 519 snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR, SOF_HDA_REG_PP_PPCTL, 520 mask, mask); 521 522 /* clear stream status */ 523 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset, 524 SOF_HDA_CL_DMA_SD_INT_MASK | 525 SOF_HDA_SD_CTL_DMA_START, 0); 526 527 ret = snd_sof_dsp_read_poll_timeout(sdev, HDA_DSP_HDA_BAR, 528 sd_offset, run, 529 !(run & dma_start), 530 HDA_DSP_REG_POLL_INTERVAL_US, 531 HDA_DSP_STREAM_RUN_TIMEOUT); 532 533 if (ret < 0) { 534 char *stream_name = hda_hstream_dbg_get_stream_info_str(hstream); 535 536 dev_err(sdev->dev, 537 "%s: on %s: timeout on STREAM_SD_OFFSET read1\n", 538 __func__, stream_name ? stream_name : "unknown stream"); 539 kfree(stream_name); 540 return ret; 541 } 542 543 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, 544 sd_offset + SOF_HDA_ADSP_REG_SD_STS, 545 SOF_HDA_CL_DMA_SD_INT_MASK, 546 SOF_HDA_CL_DMA_SD_INT_MASK); 547 548 /* stream reset */ 549 ret = hda_dsp_stream_reset(sdev, hstream); 550 if (ret < 0) 551 return ret; 552 553 if (hstream->posbuf) 554 *hstream->posbuf = 0; 555 556 /* reset BDL address */ 557 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR, 558 sd_offset + SOF_HDA_ADSP_REG_SD_BDLPL, 559 0x0); 560 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR, 561 sd_offset + SOF_HDA_ADSP_REG_SD_BDLPU, 562 0x0); 563 564 /* clear stream status */ 565 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset, 566 SOF_HDA_CL_DMA_SD_INT_MASK | 567 SOF_HDA_SD_CTL_DMA_START, 0); 568 569 ret = snd_sof_dsp_read_poll_timeout(sdev, HDA_DSP_HDA_BAR, 570 sd_offset, run, 571 !(run & dma_start), 572 HDA_DSP_REG_POLL_INTERVAL_US, 573 HDA_DSP_STREAM_RUN_TIMEOUT); 574 575 if (ret < 0) { 576 char *stream_name = hda_hstream_dbg_get_stream_info_str(hstream); 577 578 dev_err(sdev->dev, 579 "%s: on %s: timeout on STREAM_SD_OFFSET read1\n", 580 __func__, stream_name ? stream_name : "unknown stream"); 581 kfree(stream_name); 582 return ret; 583 } 584 585 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, 586 sd_offset + SOF_HDA_ADSP_REG_SD_STS, 587 SOF_HDA_CL_DMA_SD_INT_MASK, 588 SOF_HDA_CL_DMA_SD_INT_MASK); 589 590 hstream->frags = 0; 591 592 ret = hda_dsp_stream_setup_bdl(sdev, dmab, hstream); 593 if (ret < 0) { 594 dev_err(sdev->dev, "error: set up of BDL failed\n"); 595 return ret; 596 } 597 598 /* program stream tag to set up stream descriptor for DMA */ 599 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset, 600 SOF_HDA_CL_SD_CTL_STREAM_TAG_MASK, 601 hstream->stream_tag << 602 SOF_HDA_CL_SD_CTL_STREAM_TAG_SHIFT); 603 604 /* program cyclic buffer length */ 605 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR, 606 sd_offset + SOF_HDA_ADSP_REG_SD_CBL, 607 hstream->bufsize); 608 609 /* 610 * Recommended hardware programming sequence for HDAudio DMA format 611 * on earlier platforms - this is not needed on newer platforms 612 * 613 * 1. Put DMA into coupled mode by clearing PPCTL.PROCEN bit 614 * for corresponding stream index before the time of writing 615 * format to SDxFMT register. 616 * 2. Write SDxFMT 617 * 3. Set PPCTL.PROCEN bit for corresponding stream index to 618 * enable decoupled mode 619 */ 620 621 if (!sdev->dspless_mode_selected && (chip->quirks & SOF_INTEL_PROCEN_FMT_QUIRK)) 622 /* couple host and link DMA, disable DSP features */ 623 snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR, SOF_HDA_REG_PP_PPCTL, 624 mask, 0); 625 626 /* program stream format */ 627 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, 628 sd_offset + 629 SOF_HDA_ADSP_REG_SD_FORMAT, 630 0xffff, hstream->format_val); 631 632 if (!sdev->dspless_mode_selected && (chip->quirks & SOF_INTEL_PROCEN_FMT_QUIRK)) 633 /* decouple host and link DMA, enable DSP features */ 634 snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR, SOF_HDA_REG_PP_PPCTL, 635 mask, mask); 636 637 /* program last valid index */ 638 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, 639 sd_offset + SOF_HDA_ADSP_REG_SD_LVI, 640 0xffff, (hstream->frags - 1)); 641 642 /* program BDL address */ 643 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR, 644 sd_offset + SOF_HDA_ADSP_REG_SD_BDLPL, 645 (u32)hstream->bdl.addr); 646 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR, 647 sd_offset + SOF_HDA_ADSP_REG_SD_BDLPU, 648 upper_32_bits(hstream->bdl.addr)); 649 650 /* enable position buffer, if needed */ 651 if (bus->use_posbuf && bus->posbuf.addr && 652 !(snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, SOF_HDA_ADSP_DPLBASE) 653 & SOF_HDA_ADSP_DPLBASE_ENABLE)) { 654 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR, SOF_HDA_ADSP_DPUBASE, 655 upper_32_bits(bus->posbuf.addr)); 656 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR, SOF_HDA_ADSP_DPLBASE, 657 (u32)bus->posbuf.addr | 658 SOF_HDA_ADSP_DPLBASE_ENABLE); 659 } 660 661 /* set interrupt enable bits */ 662 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset, 663 SOF_HDA_CL_DMA_SD_INT_MASK, 664 SOF_HDA_CL_DMA_SD_INT_MASK); 665 666 /* read FIFO size */ 667 if (hstream->direction == SNDRV_PCM_STREAM_PLAYBACK) { 668 hstream->fifo_size = 669 snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, 670 sd_offset + 671 SOF_HDA_ADSP_REG_SD_FIFOSIZE); 672 hstream->fifo_size &= SOF_HDA_SD_FIFOSIZE_FIFOS_MASK; 673 hstream->fifo_size += 1; 674 } else { 675 hstream->fifo_size = 0; 676 } 677 678 return ret; 679 } 680 681 int hda_dsp_stream_hw_free(struct snd_sof_dev *sdev, 682 struct snd_pcm_substream *substream) 683 { 684 struct hdac_stream *hstream = substream->runtime->private_data; 685 struct hdac_ext_stream *hext_stream = container_of(hstream, 686 struct hdac_ext_stream, 687 hstream); 688 int ret; 689 690 ret = hda_dsp_stream_reset(sdev, hstream); 691 if (ret < 0) 692 return ret; 693 694 if (!sdev->dspless_mode_selected) { 695 struct hdac_bus *bus = sof_to_bus(sdev); 696 u32 mask = BIT(hstream->index); 697 698 spin_lock_irq(&bus->reg_lock); 699 /* couple host and link DMA if link DMA channel is idle */ 700 if (!hext_stream->link_locked) 701 snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR, 702 SOF_HDA_REG_PP_PPCTL, mask, 0); 703 spin_unlock_irq(&bus->reg_lock); 704 } 705 706 hda_dsp_stream_spib_config(sdev, hext_stream, HDA_DSP_SPIB_DISABLE, 0); 707 708 hstream->substream = NULL; 709 710 return 0; 711 } 712 713 bool hda_dsp_check_stream_irq(struct snd_sof_dev *sdev) 714 { 715 struct hdac_bus *bus = sof_to_bus(sdev); 716 bool ret = false; 717 u32 status; 718 719 /* The function can be called at irq thread, so use spin_lock_irq */ 720 spin_lock_irq(&bus->reg_lock); 721 722 status = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, SOF_HDA_INTSTS); 723 724 trace_sof_intel_hda_dsp_check_stream_irq(sdev, status); 725 726 /* if Register inaccessible, ignore it.*/ 727 if (status != 0xffffffff) 728 ret = true; 729 730 spin_unlock_irq(&bus->reg_lock); 731 732 return ret; 733 } 734 735 static void 736 hda_dsp_compr_bytes_transferred(struct hdac_stream *hstream, int direction) 737 { 738 u64 buffer_size = hstream->bufsize; 739 u64 prev_pos, pos, num_bytes; 740 741 div64_u64_rem(hstream->curr_pos, buffer_size, &prev_pos); 742 pos = hda_dsp_stream_get_position(hstream, direction, false); 743 744 if (pos < prev_pos) 745 num_bytes = (buffer_size - prev_pos) + pos; 746 else 747 num_bytes = pos - prev_pos; 748 749 hstream->curr_pos += num_bytes; 750 } 751 752 static bool hda_dsp_stream_check(struct hdac_bus *bus, u32 status) 753 { 754 struct sof_intel_hda_dev *sof_hda = bus_to_sof_hda(bus); 755 struct hdac_stream *s; 756 bool active = false; 757 u32 sd_status; 758 759 list_for_each_entry(s, &bus->stream_list, list) { 760 if (status & BIT(s->index) && s->opened) { 761 sd_status = readb(s->sd_addr + SOF_HDA_ADSP_REG_SD_STS); 762 763 trace_sof_intel_hda_dsp_stream_status(bus->dev, s, sd_status); 764 765 writeb(sd_status, s->sd_addr + SOF_HDA_ADSP_REG_SD_STS); 766 767 active = true; 768 if (!s->running) 769 continue; 770 if ((sd_status & SOF_HDA_CL_DMA_SD_INT_COMPLETE) == 0) 771 continue; 772 if (!s->substream && !s->cstream) { 773 /* 774 * when no substream is found, the DMA may used for code loading 775 * or data transfers which can rely on wait_for_completion() 776 */ 777 struct sof_intel_hda_stream *hda_stream; 778 struct hdac_ext_stream *hext_stream; 779 780 hext_stream = stream_to_hdac_ext_stream(s); 781 hda_stream = container_of(hext_stream, struct sof_intel_hda_stream, 782 hext_stream); 783 784 complete(&hda_stream->ioc); 785 continue; 786 } 787 788 /* Inform ALSA only if the IPC position is not used */ 789 if (s->substream && sof_hda->no_ipc_position) { 790 snd_sof_pcm_period_elapsed(s->substream); 791 } else if (s->cstream) { 792 hda_dsp_compr_bytes_transferred(s, s->cstream->direction); 793 snd_compr_fragment_elapsed(s->cstream); 794 } 795 } 796 } 797 798 return active; 799 } 800 801 irqreturn_t hda_dsp_stream_threaded_handler(int irq, void *context) 802 { 803 struct snd_sof_dev *sdev = context; 804 struct hdac_bus *bus = sof_to_bus(sdev); 805 bool active; 806 u32 status; 807 int i; 808 809 /* 810 * Loop 10 times to handle missed interrupts caused by 811 * unsolicited responses from the codec 812 */ 813 for (i = 0, active = true; i < 10 && active; i++) { 814 spin_lock_irq(&bus->reg_lock); 815 816 status = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, SOF_HDA_INTSTS); 817 818 /* check streams */ 819 active = hda_dsp_stream_check(bus, status); 820 821 /* check and clear RIRB interrupt */ 822 if (status & AZX_INT_CTRL_EN) { 823 active |= hda_codec_check_rirb_status(sdev); 824 } 825 spin_unlock_irq(&bus->reg_lock); 826 } 827 828 return IRQ_HANDLED; 829 } 830 831 int hda_dsp_stream_init(struct snd_sof_dev *sdev) 832 { 833 struct hdac_bus *bus = sof_to_bus(sdev); 834 struct hdac_ext_stream *hext_stream; 835 struct hdac_stream *hstream; 836 struct pci_dev *pci = to_pci_dev(sdev->dev); 837 struct sof_intel_hda_dev *sof_hda = bus_to_sof_hda(bus); 838 int sd_offset; 839 int i, num_playback, num_capture, num_total, ret; 840 u32 gcap; 841 842 gcap = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, SOF_HDA_GCAP); 843 dev_dbg(sdev->dev, "hda global caps = 0x%x\n", gcap); 844 845 /* get stream count from GCAP */ 846 num_capture = (gcap >> 8) & 0x0f; 847 num_playback = (gcap >> 12) & 0x0f; 848 num_total = num_playback + num_capture; 849 850 dev_dbg(sdev->dev, "detected %d playback and %d capture streams\n", 851 num_playback, num_capture); 852 853 if (num_playback >= SOF_HDA_PLAYBACK_STREAMS) { 854 dev_err(sdev->dev, "error: too many playback streams %d\n", 855 num_playback); 856 return -EINVAL; 857 } 858 859 if (num_capture >= SOF_HDA_CAPTURE_STREAMS) { 860 dev_err(sdev->dev, "error: too many capture streams %d\n", 861 num_playback); 862 return -EINVAL; 863 } 864 865 /* 866 * mem alloc for the position buffer 867 * TODO: check position buffer update 868 */ 869 ret = snd_dma_alloc_pages(SNDRV_DMA_TYPE_DEV, &pci->dev, 870 SOF_HDA_DPIB_ENTRY_SIZE * num_total, 871 &bus->posbuf); 872 if (ret < 0) { 873 dev_err(sdev->dev, "error: posbuffer dma alloc failed\n"); 874 return -ENOMEM; 875 } 876 877 /* 878 * mem alloc for the CORB/RIRB ringbuffers - this will be used only for 879 * HDAudio codecs 880 */ 881 ret = snd_dma_alloc_pages(SNDRV_DMA_TYPE_DEV, &pci->dev, 882 PAGE_SIZE, &bus->rb); 883 if (ret < 0) { 884 dev_err(sdev->dev, "error: RB alloc failed\n"); 885 return -ENOMEM; 886 } 887 888 /* create capture and playback streams */ 889 for (i = 0; i < num_total; i++) { 890 struct sof_intel_hda_stream *hda_stream; 891 892 hda_stream = devm_kzalloc(sdev->dev, sizeof(*hda_stream), 893 GFP_KERNEL); 894 if (!hda_stream) 895 return -ENOMEM; 896 897 hda_stream->sdev = sdev; 898 init_completion(&hda_stream->ioc); 899 900 hext_stream = &hda_stream->hext_stream; 901 902 if (sdev->bar[HDA_DSP_PP_BAR]) { 903 hext_stream->pphc_addr = sdev->bar[HDA_DSP_PP_BAR] + 904 SOF_HDA_PPHC_BASE + SOF_HDA_PPHC_INTERVAL * i; 905 906 hext_stream->pplc_addr = sdev->bar[HDA_DSP_PP_BAR] + 907 SOF_HDA_PPLC_BASE + SOF_HDA_PPLC_MULTI * num_total + 908 SOF_HDA_PPLC_INTERVAL * i; 909 } 910 911 hstream = &hext_stream->hstream; 912 913 /* do we support SPIB */ 914 if (sdev->bar[HDA_DSP_SPIB_BAR]) { 915 hstream->spib_addr = sdev->bar[HDA_DSP_SPIB_BAR] + 916 SOF_HDA_SPIB_BASE + SOF_HDA_SPIB_INTERVAL * i + 917 SOF_HDA_SPIB_SPIB; 918 919 hstream->fifo_addr = sdev->bar[HDA_DSP_SPIB_BAR] + 920 SOF_HDA_SPIB_BASE + SOF_HDA_SPIB_INTERVAL * i + 921 SOF_HDA_SPIB_MAXFIFO; 922 } 923 924 hstream->bus = bus; 925 hstream->sd_int_sta_mask = 1 << i; 926 hstream->index = i; 927 sd_offset = SOF_STREAM_SD_OFFSET(hstream); 928 hstream->sd_addr = sdev->bar[HDA_DSP_HDA_BAR] + sd_offset; 929 hstream->opened = false; 930 hstream->running = false; 931 932 if (i < num_capture) { 933 hstream->stream_tag = i + 1; 934 hstream->direction = SNDRV_PCM_STREAM_CAPTURE; 935 } else { 936 hstream->stream_tag = i - num_capture + 1; 937 hstream->direction = SNDRV_PCM_STREAM_PLAYBACK; 938 } 939 940 /* mem alloc for stream BDL */ 941 ret = snd_dma_alloc_pages(SNDRV_DMA_TYPE_DEV, &pci->dev, 942 HDA_DSP_BDL_SIZE, &hstream->bdl); 943 if (ret < 0) { 944 dev_err(sdev->dev, "error: stream bdl dma alloc failed\n"); 945 return -ENOMEM; 946 } 947 948 hstream->posbuf = (__le32 *)(bus->posbuf.area + 949 (hstream->index) * 8); 950 951 list_add_tail(&hstream->list, &bus->stream_list); 952 } 953 954 /* store total stream count (playback + capture) from GCAP */ 955 sof_hda->stream_max = num_total; 956 957 /* store stream count from GCAP required for CHAIN_DMA */ 958 if (sdev->pdata->ipc_type == SOF_IPC_TYPE_4) { 959 struct sof_ipc4_fw_data *ipc4_data = sdev->private; 960 961 ipc4_data->num_playback_streams = num_playback; 962 ipc4_data->num_capture_streams = num_capture; 963 } 964 965 return 0; 966 } 967 968 void hda_dsp_stream_free(struct snd_sof_dev *sdev) 969 { 970 struct hdac_bus *bus = sof_to_bus(sdev); 971 struct hdac_stream *s, *_s; 972 struct hdac_ext_stream *hext_stream; 973 struct sof_intel_hda_stream *hda_stream; 974 975 /* free position buffer */ 976 if (bus->posbuf.area) 977 snd_dma_free_pages(&bus->posbuf); 978 979 /* free CORB/RIRB buffer - only used for HDaudio codecs */ 980 if (bus->rb.area) 981 snd_dma_free_pages(&bus->rb); 982 983 list_for_each_entry_safe(s, _s, &bus->stream_list, list) { 984 /* TODO: decouple */ 985 986 /* free bdl buffer */ 987 if (s->bdl.area) 988 snd_dma_free_pages(&s->bdl); 989 list_del(&s->list); 990 hext_stream = stream_to_hdac_ext_stream(s); 991 hda_stream = container_of(hext_stream, struct sof_intel_hda_stream, 992 hext_stream); 993 devm_kfree(sdev->dev, hda_stream); 994 } 995 } 996 997 snd_pcm_uframes_t hda_dsp_stream_get_position(struct hdac_stream *hstream, 998 int direction, bool can_sleep) 999 { 1000 struct hdac_ext_stream *hext_stream = stream_to_hdac_ext_stream(hstream); 1001 struct sof_intel_hda_stream *hda_stream = hstream_to_sof_hda_stream(hext_stream); 1002 struct snd_sof_dev *sdev = hda_stream->sdev; 1003 snd_pcm_uframes_t pos; 1004 1005 switch (sof_hda_position_quirk) { 1006 case SOF_HDA_POSITION_QUIRK_USE_SKYLAKE_LEGACY: 1007 /* 1008 * This legacy code, inherited from the Skylake driver, 1009 * mixes DPIB registers and DPIB DDR updates and 1010 * does not seem to follow any known hardware recommendations. 1011 * It's not clear e.g. why there is a different flow 1012 * for capture and playback, the only information that matters is 1013 * what traffic class is used, and on all SOF-enabled platforms 1014 * only VC0 is supported so the work-around was likely not necessary 1015 * and quite possibly wrong. 1016 */ 1017 1018 /* DPIB/posbuf position mode: 1019 * For Playback, Use DPIB register from HDA space which 1020 * reflects the actual data transferred. 1021 * For Capture, Use the position buffer for pointer, as DPIB 1022 * is not accurate enough, its update may be completed 1023 * earlier than the data written to DDR. 1024 */ 1025 if (direction == SNDRV_PCM_STREAM_PLAYBACK) { 1026 pos = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, 1027 AZX_REG_VS_SDXDPIB_XBASE + 1028 (AZX_REG_VS_SDXDPIB_XINTERVAL * 1029 hstream->index)); 1030 } else { 1031 /* 1032 * For capture stream, we need more workaround to fix the 1033 * position incorrect issue: 1034 * 1035 * 1. Wait at least 20us before reading position buffer after 1036 * the interrupt generated(IOC), to make sure position update 1037 * happens on frame boundary i.e. 20.833uSec for 48KHz. 1038 * 2. Perform a dummy Read to DPIB register to flush DMA 1039 * position value. 1040 * 3. Read the DMA Position from posbuf. Now the readback 1041 * value should be >= period boundary. 1042 */ 1043 if (can_sleep) 1044 usleep_range(20, 21); 1045 1046 snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, 1047 AZX_REG_VS_SDXDPIB_XBASE + 1048 (AZX_REG_VS_SDXDPIB_XINTERVAL * 1049 hstream->index)); 1050 pos = snd_hdac_stream_get_pos_posbuf(hstream); 1051 } 1052 break; 1053 case SOF_HDA_POSITION_QUIRK_USE_DPIB_REGISTERS: 1054 /* 1055 * In case VC1 traffic is disabled this is the recommended option 1056 */ 1057 pos = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, 1058 AZX_REG_VS_SDXDPIB_XBASE + 1059 (AZX_REG_VS_SDXDPIB_XINTERVAL * 1060 hstream->index)); 1061 break; 1062 case SOF_HDA_POSITION_QUIRK_USE_DPIB_DDR_UPDATE: 1063 /* 1064 * This is the recommended option when VC1 is enabled. 1065 * While this isn't needed for SOF platforms it's added for 1066 * consistency and debug. 1067 */ 1068 pos = snd_hdac_stream_get_pos_posbuf(hstream); 1069 break; 1070 default: 1071 dev_err_once(sdev->dev, "hda_position_quirk value %d not supported\n", 1072 sof_hda_position_quirk); 1073 pos = 0; 1074 break; 1075 } 1076 1077 if (pos >= hstream->bufsize) 1078 pos = 0; 1079 1080 return pos; 1081 } 1082 1083 #define merge_u64(u32_u, u32_l) (((u64)(u32_u) << 32) | (u32_l)) 1084 1085 /** 1086 * hda_dsp_get_stream_llp - Retrieve the LLP (Linear Link Position) of the stream 1087 * @sdev: SOF device 1088 * @component: ASoC component 1089 * @substream: PCM substream 1090 * 1091 * Returns the raw Linear Link Position value 1092 */ 1093 u64 hda_dsp_get_stream_llp(struct snd_sof_dev *sdev, 1094 struct snd_soc_component *component, 1095 struct snd_pcm_substream *substream) 1096 { 1097 struct hdac_stream *hstream = substream->runtime->private_data; 1098 struct hdac_ext_stream *hext_stream = stream_to_hdac_ext_stream(hstream); 1099 u32 llp_l, llp_u; 1100 1101 /* 1102 * The pplc_addr have been calculated during probe in 1103 * hda_dsp_stream_init(): 1104 * pplc_addr = sdev->bar[HDA_DSP_PP_BAR] + 1105 * SOF_HDA_PPLC_BASE + 1106 * SOF_HDA_PPLC_MULTI * total_stream + 1107 * SOF_HDA_PPLC_INTERVAL * stream_index 1108 * 1109 * Use this pre-calculated address to avoid repeated re-calculation. 1110 */ 1111 llp_l = readl(hext_stream->pplc_addr + AZX_REG_PPLCLLPL); 1112 llp_u = readl(hext_stream->pplc_addr + AZX_REG_PPLCLLPU); 1113 1114 /* Compensate the LLP counter with the saved offset */ 1115 if (hext_stream->pplcllpl || hext_stream->pplcllpu) 1116 return merge_u64(llp_u, llp_l) - 1117 merge_u64(hext_stream->pplcllpu, hext_stream->pplcllpl); 1118 1119 return merge_u64(llp_u, llp_l); 1120 } 1121 1122 /** 1123 * hda_dsp_get_stream_ldp - Retrieve the LDP (Linear DMA Position) of the stream 1124 * @sdev: SOF device 1125 * @component: ASoC component 1126 * @substream: PCM substream 1127 * 1128 * Returns the raw Linear Link Position value 1129 */ 1130 u64 hda_dsp_get_stream_ldp(struct snd_sof_dev *sdev, 1131 struct snd_soc_component *component, 1132 struct snd_pcm_substream *substream) 1133 { 1134 struct hdac_stream *hstream = substream->runtime->private_data; 1135 struct hdac_ext_stream *hext_stream = stream_to_hdac_ext_stream(hstream); 1136 u32 ldp_l, ldp_u; 1137 1138 /* 1139 * The pphc_addr have been calculated during probe in 1140 * hda_dsp_stream_init(): 1141 * pphc_addr = sdev->bar[HDA_DSP_PP_BAR] + 1142 * SOF_HDA_PPHC_BASE + 1143 * SOF_HDA_PPHC_INTERVAL * stream_index 1144 * 1145 * Use this pre-calculated address to avoid repeated re-calculation. 1146 */ 1147 ldp_l = readl(hext_stream->pphc_addr + AZX_REG_PPHCLDPL); 1148 ldp_u = readl(hext_stream->pphc_addr + AZX_REG_PPHCLDPU); 1149 1150 return ((u64)ldp_u << 32) | ldp_l; 1151 } 1152