1 // SPDX-License-Identifier: (GPL-2.0-only OR BSD-3-Clause) 2 // 3 // This file is provided under a dual BSD/GPLv2 license. When using or 4 // redistributing this file, you may do so under either license. 5 // 6 // Copyright(c) 2018 Intel Corporation. All rights reserved. 7 // 8 // Authors: Liam Girdwood <liam.r.girdwood@linux.intel.com> 9 // Ranjani Sridharan <ranjani.sridharan@linux.intel.com> 10 // Rander Wang <rander.wang@intel.com> 11 // Keyon Jie <yang.jie@linux.intel.com> 12 // 13 14 /* 15 * Hardware interface for generic Intel audio DSP HDA IP 16 */ 17 18 #include <sound/hdaudio_ext.h> 19 #include <sound/hda_register.h> 20 #include <sound/sof.h> 21 #include <trace/events/sof_intel.h> 22 #include "../ops.h" 23 #include "../sof-audio.h" 24 #include "hda.h" 25 26 #define HDA_LTRP_GB_VALUE_US 95 27 28 static inline const char *hda_hstream_direction_str(struct hdac_stream *hstream) 29 { 30 if (hstream->direction == SNDRV_PCM_STREAM_PLAYBACK) 31 return "Playback"; 32 else 33 return "Capture"; 34 } 35 36 static char *hda_hstream_dbg_get_stream_info_str(struct hdac_stream *hstream) 37 { 38 struct snd_soc_pcm_runtime *rtd; 39 40 if (hstream->substream) 41 rtd = asoc_substream_to_rtd(hstream->substream); 42 else if (hstream->cstream) 43 rtd = hstream->cstream->private_data; 44 else 45 /* Non audio DMA user, like dma-trace */ 46 return kasprintf(GFP_KERNEL, "-- (%s, stream_tag: %u)", 47 hda_hstream_direction_str(hstream), 48 hstream->stream_tag); 49 50 return kasprintf(GFP_KERNEL, "dai_link \"%s\" (%s, stream_tag: %u)", 51 rtd->dai_link->name, hda_hstream_direction_str(hstream), 52 hstream->stream_tag); 53 } 54 55 /* 56 * set up one of BDL entries for a stream 57 */ 58 static int hda_setup_bdle(struct snd_sof_dev *sdev, 59 struct snd_dma_buffer *dmab, 60 struct hdac_stream *hstream, 61 struct sof_intel_dsp_bdl **bdlp, 62 int offset, int size, int ioc) 63 { 64 struct hdac_bus *bus = sof_to_bus(sdev); 65 struct sof_intel_dsp_bdl *bdl = *bdlp; 66 67 while (size > 0) { 68 dma_addr_t addr; 69 int chunk; 70 71 if (hstream->frags >= HDA_DSP_MAX_BDL_ENTRIES) { 72 dev_err(sdev->dev, "error: stream frags exceeded\n"); 73 return -EINVAL; 74 } 75 76 addr = snd_sgbuf_get_addr(dmab, offset); 77 /* program BDL addr */ 78 bdl->addr_l = cpu_to_le32(lower_32_bits(addr)); 79 bdl->addr_h = cpu_to_le32(upper_32_bits(addr)); 80 /* program BDL size */ 81 chunk = snd_sgbuf_get_chunk_size(dmab, offset, size); 82 /* one BDLE should not cross 4K boundary */ 83 if (bus->align_bdle_4k) { 84 u32 remain = 0x1000 - (offset & 0xfff); 85 86 if (chunk > remain) 87 chunk = remain; 88 } 89 bdl->size = cpu_to_le32(chunk); 90 /* only program IOC when the whole segment is processed */ 91 size -= chunk; 92 bdl->ioc = (size || !ioc) ? 0 : cpu_to_le32(0x01); 93 bdl++; 94 hstream->frags++; 95 offset += chunk; 96 } 97 98 *bdlp = bdl; 99 return offset; 100 } 101 102 /* 103 * set up Buffer Descriptor List (BDL) for host memory transfer 104 * BDL describes the location of the individual buffers and is little endian. 105 */ 106 int hda_dsp_stream_setup_bdl(struct snd_sof_dev *sdev, 107 struct snd_dma_buffer *dmab, 108 struct hdac_stream *hstream) 109 { 110 struct sof_intel_hda_dev *hda = sdev->pdata->hw_pdata; 111 struct sof_intel_dsp_bdl *bdl; 112 int i, offset, period_bytes, periods; 113 int remain, ioc; 114 115 period_bytes = hstream->period_bytes; 116 dev_dbg(sdev->dev, "period_bytes:0x%x\n", period_bytes); 117 if (!period_bytes) 118 period_bytes = hstream->bufsize; 119 120 periods = hstream->bufsize / period_bytes; 121 122 dev_dbg(sdev->dev, "periods:%d\n", periods); 123 124 remain = hstream->bufsize % period_bytes; 125 if (remain) 126 periods++; 127 128 /* program the initial BDL entries */ 129 bdl = (struct sof_intel_dsp_bdl *)hstream->bdl.area; 130 offset = 0; 131 hstream->frags = 0; 132 133 /* 134 * set IOC if don't use position IPC 135 * and period_wakeup needed. 136 */ 137 ioc = hda->no_ipc_position ? 138 !hstream->no_period_wakeup : 0; 139 140 for (i = 0; i < periods; i++) { 141 if (i == (periods - 1) && remain) 142 /* set the last small entry */ 143 offset = hda_setup_bdle(sdev, dmab, 144 hstream, &bdl, offset, 145 remain, 0); 146 else 147 offset = hda_setup_bdle(sdev, dmab, 148 hstream, &bdl, offset, 149 period_bytes, ioc); 150 } 151 152 return offset; 153 } 154 155 int hda_dsp_stream_spib_config(struct snd_sof_dev *sdev, 156 struct hdac_ext_stream *hext_stream, 157 int enable, u32 size) 158 { 159 struct hdac_stream *hstream = &hext_stream->hstream; 160 u32 mask; 161 162 if (!sdev->bar[HDA_DSP_SPIB_BAR]) { 163 dev_err(sdev->dev, "error: address of spib capability is NULL\n"); 164 return -EINVAL; 165 } 166 167 mask = (1 << hstream->index); 168 169 /* enable/disable SPIB for the stream */ 170 snd_sof_dsp_update_bits(sdev, HDA_DSP_SPIB_BAR, 171 SOF_HDA_ADSP_REG_CL_SPBFIFO_SPBFCCTL, mask, 172 enable << hstream->index); 173 174 /* set the SPIB value */ 175 sof_io_write(sdev, hstream->spib_addr, size); 176 177 return 0; 178 } 179 180 /* get next unused stream */ 181 struct hdac_ext_stream * 182 hda_dsp_stream_get(struct snd_sof_dev *sdev, int direction, u32 flags) 183 { 184 const struct sof_intel_dsp_desc *chip_info = get_chip_info(sdev->pdata); 185 struct sof_intel_hda_dev *hda = sdev->pdata->hw_pdata; 186 struct hdac_bus *bus = sof_to_bus(sdev); 187 struct sof_intel_hda_stream *hda_stream; 188 struct hdac_ext_stream *hext_stream = NULL; 189 struct hdac_stream *s; 190 191 spin_lock_irq(&bus->reg_lock); 192 193 /* get an unused stream */ 194 list_for_each_entry(s, &bus->stream_list, list) { 195 if (s->direction == direction && !s->opened) { 196 hext_stream = stream_to_hdac_ext_stream(s); 197 hda_stream = container_of(hext_stream, 198 struct sof_intel_hda_stream, 199 hext_stream); 200 /* check if the host DMA channel is reserved */ 201 if (hda_stream->host_reserved) 202 continue; 203 204 s->opened = true; 205 break; 206 } 207 } 208 209 spin_unlock_irq(&bus->reg_lock); 210 211 /* stream found ? */ 212 if (!hext_stream) { 213 dev_err(sdev->dev, "error: no free %s streams\n", 214 direction == SNDRV_PCM_STREAM_PLAYBACK ? 215 "playback" : "capture"); 216 return hext_stream; 217 } 218 219 hda_stream->flags = flags; 220 221 /* 222 * Prevent DMI Link L1 entry for streams that don't support it. 223 * Workaround to address a known issue with host DMA that results 224 * in xruns during pause/release in capture scenarios. This is not needed for the ACE IP. 225 */ 226 if (chip_info->hw_ip_version < SOF_INTEL_ACE_1_0 && 227 !(flags & SOF_HDA_STREAM_DMI_L1_COMPATIBLE)) { 228 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, 229 HDA_VS_INTEL_EM2, 230 HDA_VS_INTEL_EM2_L1SEN, 0); 231 hda->l1_disabled = true; 232 } 233 234 return hext_stream; 235 } 236 237 /* free a stream */ 238 int hda_dsp_stream_put(struct snd_sof_dev *sdev, int direction, int stream_tag) 239 { 240 const struct sof_intel_dsp_desc *chip_info = get_chip_info(sdev->pdata); 241 struct sof_intel_hda_dev *hda = sdev->pdata->hw_pdata; 242 struct hdac_bus *bus = sof_to_bus(sdev); 243 struct sof_intel_hda_stream *hda_stream; 244 struct hdac_ext_stream *hext_stream; 245 struct hdac_stream *s; 246 bool dmi_l1_enable = true; 247 bool found = false; 248 249 spin_lock_irq(&bus->reg_lock); 250 251 /* 252 * close stream matching the stream tag and check if there are any open streams 253 * that are DMI L1 incompatible. 254 */ 255 list_for_each_entry(s, &bus->stream_list, list) { 256 hext_stream = stream_to_hdac_ext_stream(s); 257 hda_stream = container_of(hext_stream, struct sof_intel_hda_stream, hext_stream); 258 259 if (!s->opened) 260 continue; 261 262 if (s->direction == direction && s->stream_tag == stream_tag) { 263 s->opened = false; 264 found = true; 265 } else if (!(hda_stream->flags & SOF_HDA_STREAM_DMI_L1_COMPATIBLE)) { 266 dmi_l1_enable = false; 267 } 268 } 269 270 spin_unlock_irq(&bus->reg_lock); 271 272 /* Enable DMI L1 if permitted */ 273 if (chip_info->hw_ip_version < SOF_INTEL_ACE_1_0 && dmi_l1_enable) { 274 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, HDA_VS_INTEL_EM2, 275 HDA_VS_INTEL_EM2_L1SEN, HDA_VS_INTEL_EM2_L1SEN); 276 hda->l1_disabled = false; 277 } 278 279 if (!found) { 280 dev_err(sdev->dev, "%s: stream_tag %d not opened!\n", 281 __func__, stream_tag); 282 return -ENODEV; 283 } 284 285 return 0; 286 } 287 288 static int hda_dsp_stream_reset(struct snd_sof_dev *sdev, struct hdac_stream *hstream) 289 { 290 int sd_offset = SOF_STREAM_SD_OFFSET(hstream); 291 int timeout = HDA_DSP_STREAM_RESET_TIMEOUT; 292 u32 val; 293 294 /* enter stream reset */ 295 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset, SOF_STREAM_SD_OFFSET_CRST, 296 SOF_STREAM_SD_OFFSET_CRST); 297 do { 298 val = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, sd_offset); 299 if (val & SOF_STREAM_SD_OFFSET_CRST) 300 break; 301 } while (--timeout); 302 if (timeout == 0) { 303 dev_err(sdev->dev, "timeout waiting for stream reset\n"); 304 return -ETIMEDOUT; 305 } 306 307 timeout = HDA_DSP_STREAM_RESET_TIMEOUT; 308 309 /* exit stream reset and wait to read a zero before reading any other register */ 310 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset, SOF_STREAM_SD_OFFSET_CRST, 0x0); 311 312 /* wait for hardware to report that stream is out of reset */ 313 udelay(3); 314 do { 315 val = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, sd_offset); 316 if ((val & SOF_STREAM_SD_OFFSET_CRST) == 0) 317 break; 318 } while (--timeout); 319 if (timeout == 0) { 320 dev_err(sdev->dev, "timeout waiting for stream to exit reset\n"); 321 return -ETIMEDOUT; 322 } 323 324 return 0; 325 } 326 327 int hda_dsp_stream_trigger(struct snd_sof_dev *sdev, 328 struct hdac_ext_stream *hext_stream, int cmd) 329 { 330 struct hdac_stream *hstream = &hext_stream->hstream; 331 int sd_offset = SOF_STREAM_SD_OFFSET(hstream); 332 u32 dma_start = SOF_HDA_SD_CTL_DMA_START; 333 int ret = 0; 334 u32 run; 335 336 /* cmd must be for audio stream */ 337 switch (cmd) { 338 case SNDRV_PCM_TRIGGER_PAUSE_RELEASE: 339 if (!sdev->dspless_mode_selected) 340 break; 341 fallthrough; 342 case SNDRV_PCM_TRIGGER_START: 343 if (hstream->running) 344 break; 345 346 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, SOF_HDA_INTCTL, 347 1 << hstream->index, 348 1 << hstream->index); 349 350 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, 351 sd_offset, 352 SOF_HDA_SD_CTL_DMA_START | 353 SOF_HDA_CL_DMA_SD_INT_MASK, 354 SOF_HDA_SD_CTL_DMA_START | 355 SOF_HDA_CL_DMA_SD_INT_MASK); 356 357 ret = snd_sof_dsp_read_poll_timeout(sdev, 358 HDA_DSP_HDA_BAR, 359 sd_offset, run, 360 ((run & dma_start) == dma_start), 361 HDA_DSP_REG_POLL_INTERVAL_US, 362 HDA_DSP_STREAM_RUN_TIMEOUT); 363 364 if (ret >= 0) 365 hstream->running = true; 366 367 break; 368 case SNDRV_PCM_TRIGGER_PAUSE_PUSH: 369 if (!sdev->dspless_mode_selected) 370 break; 371 fallthrough; 372 case SNDRV_PCM_TRIGGER_SUSPEND: 373 case SNDRV_PCM_TRIGGER_STOP: 374 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, 375 sd_offset, 376 SOF_HDA_SD_CTL_DMA_START | 377 SOF_HDA_CL_DMA_SD_INT_MASK, 0x0); 378 379 ret = snd_sof_dsp_read_poll_timeout(sdev, HDA_DSP_HDA_BAR, 380 sd_offset, run, 381 !(run & dma_start), 382 HDA_DSP_REG_POLL_INTERVAL_US, 383 HDA_DSP_STREAM_RUN_TIMEOUT); 384 385 if (ret >= 0) { 386 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR, 387 sd_offset + SOF_HDA_ADSP_REG_SD_STS, 388 SOF_HDA_CL_DMA_SD_INT_MASK); 389 390 hstream->running = false; 391 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, 392 SOF_HDA_INTCTL, 393 1 << hstream->index, 0x0); 394 } 395 break; 396 default: 397 dev_err(sdev->dev, "error: unknown command: %d\n", cmd); 398 return -EINVAL; 399 } 400 401 if (ret < 0) { 402 char *stream_name = hda_hstream_dbg_get_stream_info_str(hstream); 403 404 dev_err(sdev->dev, 405 "%s: cmd %d on %s: timeout on STREAM_SD_OFFSET read\n", 406 __func__, cmd, stream_name ? stream_name : "unknown stream"); 407 kfree(stream_name); 408 } 409 410 return ret; 411 } 412 413 /* minimal recommended programming for ICCMAX stream */ 414 int hda_dsp_iccmax_stream_hw_params(struct snd_sof_dev *sdev, struct hdac_ext_stream *hext_stream, 415 struct snd_dma_buffer *dmab, 416 struct snd_pcm_hw_params *params) 417 { 418 struct hdac_stream *hstream = &hext_stream->hstream; 419 int sd_offset = SOF_STREAM_SD_OFFSET(hstream); 420 int ret; 421 u32 mask = 0x1 << hstream->index; 422 423 if (!hext_stream) { 424 dev_err(sdev->dev, "error: no stream available\n"); 425 return -ENODEV; 426 } 427 428 if (!dmab) { 429 dev_err(sdev->dev, "error: no dma buffer allocated!\n"); 430 return -ENODEV; 431 } 432 433 if (hstream->posbuf) 434 *hstream->posbuf = 0; 435 436 /* reset BDL address */ 437 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR, 438 sd_offset + SOF_HDA_ADSP_REG_SD_BDLPL, 439 0x0); 440 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR, 441 sd_offset + SOF_HDA_ADSP_REG_SD_BDLPU, 442 0x0); 443 444 hstream->frags = 0; 445 446 ret = hda_dsp_stream_setup_bdl(sdev, dmab, hstream); 447 if (ret < 0) { 448 dev_err(sdev->dev, "error: set up of BDL failed\n"); 449 return ret; 450 } 451 452 /* program BDL address */ 453 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR, 454 sd_offset + SOF_HDA_ADSP_REG_SD_BDLPL, 455 (u32)hstream->bdl.addr); 456 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR, 457 sd_offset + SOF_HDA_ADSP_REG_SD_BDLPU, 458 upper_32_bits(hstream->bdl.addr)); 459 460 /* program cyclic buffer length */ 461 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR, 462 sd_offset + SOF_HDA_ADSP_REG_SD_CBL, 463 hstream->bufsize); 464 465 /* program last valid index */ 466 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, 467 sd_offset + SOF_HDA_ADSP_REG_SD_LVI, 468 0xffff, (hstream->frags - 1)); 469 470 /* decouple host and link DMA, enable DSP features */ 471 snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR, SOF_HDA_REG_PP_PPCTL, 472 mask, mask); 473 474 /* Follow HW recommendation to set the guardband value to 95us during FW boot */ 475 snd_sof_dsp_update8(sdev, HDA_DSP_HDA_BAR, HDA_VS_INTEL_LTRP, 476 HDA_VS_INTEL_LTRP_GB_MASK, HDA_LTRP_GB_VALUE_US); 477 478 /* start DMA */ 479 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset, 480 SOF_HDA_SD_CTL_DMA_START, SOF_HDA_SD_CTL_DMA_START); 481 482 return 0; 483 } 484 485 /* 486 * prepare for common hdac registers settings, for both code loader 487 * and normal stream. 488 */ 489 int hda_dsp_stream_hw_params(struct snd_sof_dev *sdev, 490 struct hdac_ext_stream *hext_stream, 491 struct snd_dma_buffer *dmab, 492 struct snd_pcm_hw_params *params) 493 { 494 const struct sof_intel_dsp_desc *chip = get_chip_info(sdev->pdata); 495 struct hdac_bus *bus = sof_to_bus(sdev); 496 struct hdac_stream *hstream; 497 int sd_offset, ret; 498 u32 dma_start = SOF_HDA_SD_CTL_DMA_START; 499 u32 mask; 500 u32 run; 501 502 if (!hext_stream) { 503 dev_err(sdev->dev, "error: no stream available\n"); 504 return -ENODEV; 505 } 506 507 if (!dmab) { 508 dev_err(sdev->dev, "error: no dma buffer allocated!\n"); 509 return -ENODEV; 510 } 511 512 hstream = &hext_stream->hstream; 513 sd_offset = SOF_STREAM_SD_OFFSET(hstream); 514 mask = BIT(hstream->index); 515 516 /* decouple host and link DMA if the DSP is used */ 517 if (!sdev->dspless_mode_selected) 518 snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR, SOF_HDA_REG_PP_PPCTL, 519 mask, mask); 520 521 /* clear stream status */ 522 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset, 523 SOF_HDA_CL_DMA_SD_INT_MASK | 524 SOF_HDA_SD_CTL_DMA_START, 0); 525 526 ret = snd_sof_dsp_read_poll_timeout(sdev, HDA_DSP_HDA_BAR, 527 sd_offset, run, 528 !(run & dma_start), 529 HDA_DSP_REG_POLL_INTERVAL_US, 530 HDA_DSP_STREAM_RUN_TIMEOUT); 531 532 if (ret < 0) { 533 char *stream_name = hda_hstream_dbg_get_stream_info_str(hstream); 534 535 dev_err(sdev->dev, 536 "%s: on %s: timeout on STREAM_SD_OFFSET read1\n", 537 __func__, stream_name ? stream_name : "unknown stream"); 538 kfree(stream_name); 539 return ret; 540 } 541 542 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, 543 sd_offset + SOF_HDA_ADSP_REG_SD_STS, 544 SOF_HDA_CL_DMA_SD_INT_MASK, 545 SOF_HDA_CL_DMA_SD_INT_MASK); 546 547 /* stream reset */ 548 ret = hda_dsp_stream_reset(sdev, hstream); 549 if (ret < 0) 550 return ret; 551 552 if (hstream->posbuf) 553 *hstream->posbuf = 0; 554 555 /* reset BDL address */ 556 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR, 557 sd_offset + SOF_HDA_ADSP_REG_SD_BDLPL, 558 0x0); 559 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR, 560 sd_offset + SOF_HDA_ADSP_REG_SD_BDLPU, 561 0x0); 562 563 /* clear stream status */ 564 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset, 565 SOF_HDA_CL_DMA_SD_INT_MASK | 566 SOF_HDA_SD_CTL_DMA_START, 0); 567 568 ret = snd_sof_dsp_read_poll_timeout(sdev, HDA_DSP_HDA_BAR, 569 sd_offset, run, 570 !(run & dma_start), 571 HDA_DSP_REG_POLL_INTERVAL_US, 572 HDA_DSP_STREAM_RUN_TIMEOUT); 573 574 if (ret < 0) { 575 char *stream_name = hda_hstream_dbg_get_stream_info_str(hstream); 576 577 dev_err(sdev->dev, 578 "%s: on %s: timeout on STREAM_SD_OFFSET read1\n", 579 __func__, stream_name ? stream_name : "unknown stream"); 580 kfree(stream_name); 581 return ret; 582 } 583 584 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, 585 sd_offset + SOF_HDA_ADSP_REG_SD_STS, 586 SOF_HDA_CL_DMA_SD_INT_MASK, 587 SOF_HDA_CL_DMA_SD_INT_MASK); 588 589 hstream->frags = 0; 590 591 ret = hda_dsp_stream_setup_bdl(sdev, dmab, hstream); 592 if (ret < 0) { 593 dev_err(sdev->dev, "error: set up of BDL failed\n"); 594 return ret; 595 } 596 597 /* program stream tag to set up stream descriptor for DMA */ 598 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset, 599 SOF_HDA_CL_SD_CTL_STREAM_TAG_MASK, 600 hstream->stream_tag << 601 SOF_HDA_CL_SD_CTL_STREAM_TAG_SHIFT); 602 603 /* program cyclic buffer length */ 604 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR, 605 sd_offset + SOF_HDA_ADSP_REG_SD_CBL, 606 hstream->bufsize); 607 608 /* 609 * Recommended hardware programming sequence for HDAudio DMA format 610 * on earlier platforms - this is not needed on newer platforms 611 * 612 * 1. Put DMA into coupled mode by clearing PPCTL.PROCEN bit 613 * for corresponding stream index before the time of writing 614 * format to SDxFMT register. 615 * 2. Write SDxFMT 616 * 3. Set PPCTL.PROCEN bit for corresponding stream index to 617 * enable decoupled mode 618 */ 619 620 if (!sdev->dspless_mode_selected && (chip->quirks & SOF_INTEL_PROCEN_FMT_QUIRK)) 621 /* couple host and link DMA, disable DSP features */ 622 snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR, SOF_HDA_REG_PP_PPCTL, 623 mask, 0); 624 625 /* program stream format */ 626 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, 627 sd_offset + 628 SOF_HDA_ADSP_REG_SD_FORMAT, 629 0xffff, hstream->format_val); 630 631 if (!sdev->dspless_mode_selected && (chip->quirks & SOF_INTEL_PROCEN_FMT_QUIRK)) 632 /* decouple host and link DMA, enable DSP features */ 633 snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR, SOF_HDA_REG_PP_PPCTL, 634 mask, mask); 635 636 /* program last valid index */ 637 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, 638 sd_offset + SOF_HDA_ADSP_REG_SD_LVI, 639 0xffff, (hstream->frags - 1)); 640 641 /* program BDL address */ 642 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR, 643 sd_offset + SOF_HDA_ADSP_REG_SD_BDLPL, 644 (u32)hstream->bdl.addr); 645 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR, 646 sd_offset + SOF_HDA_ADSP_REG_SD_BDLPU, 647 upper_32_bits(hstream->bdl.addr)); 648 649 /* enable position buffer, if needed */ 650 if (bus->use_posbuf && bus->posbuf.addr && 651 !(snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, SOF_HDA_ADSP_DPLBASE) 652 & SOF_HDA_ADSP_DPLBASE_ENABLE)) { 653 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR, SOF_HDA_ADSP_DPUBASE, 654 upper_32_bits(bus->posbuf.addr)); 655 snd_sof_dsp_write(sdev, HDA_DSP_HDA_BAR, SOF_HDA_ADSP_DPLBASE, 656 (u32)bus->posbuf.addr | 657 SOF_HDA_ADSP_DPLBASE_ENABLE); 658 } 659 660 /* set interrupt enable bits */ 661 snd_sof_dsp_update_bits(sdev, HDA_DSP_HDA_BAR, sd_offset, 662 SOF_HDA_CL_DMA_SD_INT_MASK, 663 SOF_HDA_CL_DMA_SD_INT_MASK); 664 665 /* read FIFO size */ 666 if (hstream->direction == SNDRV_PCM_STREAM_PLAYBACK) { 667 hstream->fifo_size = 668 snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, 669 sd_offset + 670 SOF_HDA_ADSP_REG_SD_FIFOSIZE); 671 hstream->fifo_size &= 0xffff; 672 hstream->fifo_size += 1; 673 } else { 674 hstream->fifo_size = 0; 675 } 676 677 return ret; 678 } 679 680 int hda_dsp_stream_hw_free(struct snd_sof_dev *sdev, 681 struct snd_pcm_substream *substream) 682 { 683 struct hdac_stream *hstream = substream->runtime->private_data; 684 struct hdac_ext_stream *hext_stream = container_of(hstream, 685 struct hdac_ext_stream, 686 hstream); 687 int ret; 688 689 ret = hda_dsp_stream_reset(sdev, hstream); 690 if (ret < 0) 691 return ret; 692 693 if (!sdev->dspless_mode_selected) { 694 struct hdac_bus *bus = sof_to_bus(sdev); 695 u32 mask = BIT(hstream->index); 696 697 spin_lock_irq(&bus->reg_lock); 698 /* couple host and link DMA if link DMA channel is idle */ 699 if (!hext_stream->link_locked) 700 snd_sof_dsp_update_bits(sdev, HDA_DSP_PP_BAR, 701 SOF_HDA_REG_PP_PPCTL, mask, 0); 702 spin_unlock_irq(&bus->reg_lock); 703 } 704 705 hda_dsp_stream_spib_config(sdev, hext_stream, HDA_DSP_SPIB_DISABLE, 0); 706 707 hstream->substream = NULL; 708 709 return 0; 710 } 711 712 bool hda_dsp_check_stream_irq(struct snd_sof_dev *sdev) 713 { 714 struct hdac_bus *bus = sof_to_bus(sdev); 715 bool ret = false; 716 u32 status; 717 718 /* The function can be called at irq thread, so use spin_lock_irq */ 719 spin_lock_irq(&bus->reg_lock); 720 721 status = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, SOF_HDA_INTSTS); 722 723 trace_sof_intel_hda_dsp_check_stream_irq(sdev, status); 724 725 /* if Register inaccessible, ignore it.*/ 726 if (status != 0xffffffff) 727 ret = true; 728 729 spin_unlock_irq(&bus->reg_lock); 730 731 return ret; 732 } 733 734 static void 735 hda_dsp_compr_bytes_transferred(struct hdac_stream *hstream, int direction) 736 { 737 u64 buffer_size = hstream->bufsize; 738 u64 prev_pos, pos, num_bytes; 739 740 div64_u64_rem(hstream->curr_pos, buffer_size, &prev_pos); 741 pos = hda_dsp_stream_get_position(hstream, direction, false); 742 743 if (pos < prev_pos) 744 num_bytes = (buffer_size - prev_pos) + pos; 745 else 746 num_bytes = pos - prev_pos; 747 748 hstream->curr_pos += num_bytes; 749 } 750 751 static bool hda_dsp_stream_check(struct hdac_bus *bus, u32 status) 752 { 753 struct sof_intel_hda_dev *sof_hda = bus_to_sof_hda(bus); 754 struct hdac_stream *s; 755 bool active = false; 756 u32 sd_status; 757 758 list_for_each_entry(s, &bus->stream_list, list) { 759 if (status & BIT(s->index) && s->opened) { 760 sd_status = readb(s->sd_addr + SOF_HDA_ADSP_REG_SD_STS); 761 762 trace_sof_intel_hda_dsp_stream_status(bus->dev, s, sd_status); 763 764 writeb(sd_status, s->sd_addr + SOF_HDA_ADSP_REG_SD_STS); 765 766 active = true; 767 if ((!s->substream && !s->cstream) || 768 !s->running || 769 (sd_status & SOF_HDA_CL_DMA_SD_INT_COMPLETE) == 0) 770 continue; 771 772 /* Inform ALSA only in case not do that with IPC */ 773 if (s->substream && sof_hda->no_ipc_position) { 774 snd_sof_pcm_period_elapsed(s->substream); 775 } else if (s->cstream) { 776 hda_dsp_compr_bytes_transferred(s, s->cstream->direction); 777 snd_compr_fragment_elapsed(s->cstream); 778 } 779 } 780 } 781 782 return active; 783 } 784 785 irqreturn_t hda_dsp_stream_threaded_handler(int irq, void *context) 786 { 787 struct snd_sof_dev *sdev = context; 788 struct hdac_bus *bus = sof_to_bus(sdev); 789 bool active; 790 u32 status; 791 int i; 792 793 /* 794 * Loop 10 times to handle missed interrupts caused by 795 * unsolicited responses from the codec 796 */ 797 for (i = 0, active = true; i < 10 && active; i++) { 798 spin_lock_irq(&bus->reg_lock); 799 800 status = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, SOF_HDA_INTSTS); 801 802 /* check streams */ 803 active = hda_dsp_stream_check(bus, status); 804 805 /* check and clear RIRB interrupt */ 806 if (status & AZX_INT_CTRL_EN) { 807 active |= hda_codec_check_rirb_status(sdev); 808 } 809 spin_unlock_irq(&bus->reg_lock); 810 } 811 812 return IRQ_HANDLED; 813 } 814 815 int hda_dsp_stream_init(struct snd_sof_dev *sdev) 816 { 817 struct hdac_bus *bus = sof_to_bus(sdev); 818 struct hdac_ext_stream *hext_stream; 819 struct hdac_stream *hstream; 820 struct pci_dev *pci = to_pci_dev(sdev->dev); 821 struct sof_intel_hda_dev *sof_hda = bus_to_sof_hda(bus); 822 int sd_offset; 823 int i, num_playback, num_capture, num_total, ret; 824 u32 gcap; 825 826 gcap = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, SOF_HDA_GCAP); 827 dev_dbg(sdev->dev, "hda global caps = 0x%x\n", gcap); 828 829 /* get stream count from GCAP */ 830 num_capture = (gcap >> 8) & 0x0f; 831 num_playback = (gcap >> 12) & 0x0f; 832 num_total = num_playback + num_capture; 833 834 dev_dbg(sdev->dev, "detected %d playback and %d capture streams\n", 835 num_playback, num_capture); 836 837 if (num_playback >= SOF_HDA_PLAYBACK_STREAMS) { 838 dev_err(sdev->dev, "error: too many playback streams %d\n", 839 num_playback); 840 return -EINVAL; 841 } 842 843 if (num_capture >= SOF_HDA_CAPTURE_STREAMS) { 844 dev_err(sdev->dev, "error: too many capture streams %d\n", 845 num_playback); 846 return -EINVAL; 847 } 848 849 /* 850 * mem alloc for the position buffer 851 * TODO: check position buffer update 852 */ 853 ret = snd_dma_alloc_pages(SNDRV_DMA_TYPE_DEV, &pci->dev, 854 SOF_HDA_DPIB_ENTRY_SIZE * num_total, 855 &bus->posbuf); 856 if (ret < 0) { 857 dev_err(sdev->dev, "error: posbuffer dma alloc failed\n"); 858 return -ENOMEM; 859 } 860 861 /* 862 * mem alloc for the CORB/RIRB ringbuffers - this will be used only for 863 * HDAudio codecs 864 */ 865 ret = snd_dma_alloc_pages(SNDRV_DMA_TYPE_DEV, &pci->dev, 866 PAGE_SIZE, &bus->rb); 867 if (ret < 0) { 868 dev_err(sdev->dev, "error: RB alloc failed\n"); 869 return -ENOMEM; 870 } 871 872 /* create capture streams */ 873 for (i = 0; i < num_capture; i++) { 874 struct sof_intel_hda_stream *hda_stream; 875 876 hda_stream = devm_kzalloc(sdev->dev, sizeof(*hda_stream), 877 GFP_KERNEL); 878 if (!hda_stream) 879 return -ENOMEM; 880 881 hda_stream->sdev = sdev; 882 883 hext_stream = &hda_stream->hext_stream; 884 885 if (sdev->bar[HDA_DSP_PP_BAR]) { 886 hext_stream->pphc_addr = sdev->bar[HDA_DSP_PP_BAR] + 887 SOF_HDA_PPHC_BASE + SOF_HDA_PPHC_INTERVAL * i; 888 889 hext_stream->pplc_addr = sdev->bar[HDA_DSP_PP_BAR] + 890 SOF_HDA_PPLC_BASE + SOF_HDA_PPLC_MULTI * num_total + 891 SOF_HDA_PPLC_INTERVAL * i; 892 } 893 894 hstream = &hext_stream->hstream; 895 896 /* do we support SPIB */ 897 if (sdev->bar[HDA_DSP_SPIB_BAR]) { 898 hstream->spib_addr = sdev->bar[HDA_DSP_SPIB_BAR] + 899 SOF_HDA_SPIB_BASE + SOF_HDA_SPIB_INTERVAL * i + 900 SOF_HDA_SPIB_SPIB; 901 902 hstream->fifo_addr = sdev->bar[HDA_DSP_SPIB_BAR] + 903 SOF_HDA_SPIB_BASE + SOF_HDA_SPIB_INTERVAL * i + 904 SOF_HDA_SPIB_MAXFIFO; 905 } 906 907 hstream->bus = bus; 908 hstream->sd_int_sta_mask = 1 << i; 909 hstream->index = i; 910 sd_offset = SOF_STREAM_SD_OFFSET(hstream); 911 hstream->sd_addr = sdev->bar[HDA_DSP_HDA_BAR] + sd_offset; 912 hstream->stream_tag = i + 1; 913 hstream->opened = false; 914 hstream->running = false; 915 hstream->direction = SNDRV_PCM_STREAM_CAPTURE; 916 917 /* memory alloc for stream BDL */ 918 ret = snd_dma_alloc_pages(SNDRV_DMA_TYPE_DEV, &pci->dev, 919 HDA_DSP_BDL_SIZE, &hstream->bdl); 920 if (ret < 0) { 921 dev_err(sdev->dev, "error: stream bdl dma alloc failed\n"); 922 return -ENOMEM; 923 } 924 hstream->posbuf = (__le32 *)(bus->posbuf.area + 925 (hstream->index) * 8); 926 927 list_add_tail(&hstream->list, &bus->stream_list); 928 } 929 930 /* create playback streams */ 931 for (i = num_capture; i < num_total; i++) { 932 struct sof_intel_hda_stream *hda_stream; 933 934 hda_stream = devm_kzalloc(sdev->dev, sizeof(*hda_stream), 935 GFP_KERNEL); 936 if (!hda_stream) 937 return -ENOMEM; 938 939 hda_stream->sdev = sdev; 940 941 hext_stream = &hda_stream->hext_stream; 942 943 if (sdev->bar[HDA_DSP_PP_BAR]) { 944 hext_stream->pphc_addr = sdev->bar[HDA_DSP_PP_BAR] + 945 SOF_HDA_PPHC_BASE + SOF_HDA_PPHC_INTERVAL * i; 946 947 hext_stream->pplc_addr = sdev->bar[HDA_DSP_PP_BAR] + 948 SOF_HDA_PPLC_BASE + SOF_HDA_PPLC_MULTI * num_total + 949 SOF_HDA_PPLC_INTERVAL * i; 950 } 951 952 hstream = &hext_stream->hstream; 953 954 /* do we support SPIB */ 955 if (sdev->bar[HDA_DSP_SPIB_BAR]) { 956 hstream->spib_addr = sdev->bar[HDA_DSP_SPIB_BAR] + 957 SOF_HDA_SPIB_BASE + SOF_HDA_SPIB_INTERVAL * i + 958 SOF_HDA_SPIB_SPIB; 959 960 hstream->fifo_addr = sdev->bar[HDA_DSP_SPIB_BAR] + 961 SOF_HDA_SPIB_BASE + SOF_HDA_SPIB_INTERVAL * i + 962 SOF_HDA_SPIB_MAXFIFO; 963 } 964 965 hstream->bus = bus; 966 hstream->sd_int_sta_mask = 1 << i; 967 hstream->index = i; 968 sd_offset = SOF_STREAM_SD_OFFSET(hstream); 969 hstream->sd_addr = sdev->bar[HDA_DSP_HDA_BAR] + sd_offset; 970 hstream->stream_tag = i - num_capture + 1; 971 hstream->opened = false; 972 hstream->running = false; 973 hstream->direction = SNDRV_PCM_STREAM_PLAYBACK; 974 975 /* mem alloc for stream BDL */ 976 ret = snd_dma_alloc_pages(SNDRV_DMA_TYPE_DEV, &pci->dev, 977 HDA_DSP_BDL_SIZE, &hstream->bdl); 978 if (ret < 0) { 979 dev_err(sdev->dev, "error: stream bdl dma alloc failed\n"); 980 return -ENOMEM; 981 } 982 983 hstream->posbuf = (__le32 *)(bus->posbuf.area + 984 (hstream->index) * 8); 985 986 list_add_tail(&hstream->list, &bus->stream_list); 987 } 988 989 /* store total stream count (playback + capture) from GCAP */ 990 sof_hda->stream_max = num_total; 991 992 return 0; 993 } 994 995 void hda_dsp_stream_free(struct snd_sof_dev *sdev) 996 { 997 struct hdac_bus *bus = sof_to_bus(sdev); 998 struct hdac_stream *s, *_s; 999 struct hdac_ext_stream *hext_stream; 1000 struct sof_intel_hda_stream *hda_stream; 1001 1002 /* free position buffer */ 1003 if (bus->posbuf.area) 1004 snd_dma_free_pages(&bus->posbuf); 1005 1006 /* free CORB/RIRB buffer - only used for HDaudio codecs */ 1007 if (bus->rb.area) 1008 snd_dma_free_pages(&bus->rb); 1009 1010 list_for_each_entry_safe(s, _s, &bus->stream_list, list) { 1011 /* TODO: decouple */ 1012 1013 /* free bdl buffer */ 1014 if (s->bdl.area) 1015 snd_dma_free_pages(&s->bdl); 1016 list_del(&s->list); 1017 hext_stream = stream_to_hdac_ext_stream(s); 1018 hda_stream = container_of(hext_stream, struct sof_intel_hda_stream, 1019 hext_stream); 1020 devm_kfree(sdev->dev, hda_stream); 1021 } 1022 } 1023 1024 snd_pcm_uframes_t hda_dsp_stream_get_position(struct hdac_stream *hstream, 1025 int direction, bool can_sleep) 1026 { 1027 struct hdac_ext_stream *hext_stream = stream_to_hdac_ext_stream(hstream); 1028 struct sof_intel_hda_stream *hda_stream = hstream_to_sof_hda_stream(hext_stream); 1029 struct snd_sof_dev *sdev = hda_stream->sdev; 1030 snd_pcm_uframes_t pos; 1031 1032 switch (sof_hda_position_quirk) { 1033 case SOF_HDA_POSITION_QUIRK_USE_SKYLAKE_LEGACY: 1034 /* 1035 * This legacy code, inherited from the Skylake driver, 1036 * mixes DPIB registers and DPIB DDR updates and 1037 * does not seem to follow any known hardware recommendations. 1038 * It's not clear e.g. why there is a different flow 1039 * for capture and playback, the only information that matters is 1040 * what traffic class is used, and on all SOF-enabled platforms 1041 * only VC0 is supported so the work-around was likely not necessary 1042 * and quite possibly wrong. 1043 */ 1044 1045 /* DPIB/posbuf position mode: 1046 * For Playback, Use DPIB register from HDA space which 1047 * reflects the actual data transferred. 1048 * For Capture, Use the position buffer for pointer, as DPIB 1049 * is not accurate enough, its update may be completed 1050 * earlier than the data written to DDR. 1051 */ 1052 if (direction == SNDRV_PCM_STREAM_PLAYBACK) { 1053 pos = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, 1054 AZX_REG_VS_SDXDPIB_XBASE + 1055 (AZX_REG_VS_SDXDPIB_XINTERVAL * 1056 hstream->index)); 1057 } else { 1058 /* 1059 * For capture stream, we need more workaround to fix the 1060 * position incorrect issue: 1061 * 1062 * 1. Wait at least 20us before reading position buffer after 1063 * the interrupt generated(IOC), to make sure position update 1064 * happens on frame boundary i.e. 20.833uSec for 48KHz. 1065 * 2. Perform a dummy Read to DPIB register to flush DMA 1066 * position value. 1067 * 3. Read the DMA Position from posbuf. Now the readback 1068 * value should be >= period boundary. 1069 */ 1070 if (can_sleep) 1071 usleep_range(20, 21); 1072 1073 snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, 1074 AZX_REG_VS_SDXDPIB_XBASE + 1075 (AZX_REG_VS_SDXDPIB_XINTERVAL * 1076 hstream->index)); 1077 pos = snd_hdac_stream_get_pos_posbuf(hstream); 1078 } 1079 break; 1080 case SOF_HDA_POSITION_QUIRK_USE_DPIB_REGISTERS: 1081 /* 1082 * In case VC1 traffic is disabled this is the recommended option 1083 */ 1084 pos = snd_sof_dsp_read(sdev, HDA_DSP_HDA_BAR, 1085 AZX_REG_VS_SDXDPIB_XBASE + 1086 (AZX_REG_VS_SDXDPIB_XINTERVAL * 1087 hstream->index)); 1088 break; 1089 case SOF_HDA_POSITION_QUIRK_USE_DPIB_DDR_UPDATE: 1090 /* 1091 * This is the recommended option when VC1 is enabled. 1092 * While this isn't needed for SOF platforms it's added for 1093 * consistency and debug. 1094 */ 1095 pos = snd_hdac_stream_get_pos_posbuf(hstream); 1096 break; 1097 default: 1098 dev_err_once(sdev->dev, "hda_position_quirk value %d not supported\n", 1099 sof_hda_position_quirk); 1100 pos = 0; 1101 break; 1102 } 1103 1104 if (pos >= hstream->bufsize) 1105 pos = 0; 1106 1107 return pos; 1108 } 1109