1 /* 2 * AMD ALSA SoC PCM Driver for ACP 2.x 3 * 4 * Copyright 2014-2015 Advanced Micro Devices, Inc. 5 * 6 * This program is free software; you can redistribute it and/or modify it 7 * under the terms and conditions of the GNU General Public License, 8 * version 2, as published by the Free Software Foundation. 9 * 10 * This program is distributed in the hope it will be useful, but WITHOUT 11 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 12 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for 13 * more details. 14 */ 15 16 #include <linux/module.h> 17 #include <linux/delay.h> 18 #include <linux/io.h> 19 #include <linux/sizes.h> 20 #include <linux/pm_runtime.h> 21 22 #include <sound/soc.h> 23 24 #include "acp.h" 25 26 #define PLAYBACK_MIN_NUM_PERIODS 2 27 #define PLAYBACK_MAX_NUM_PERIODS 2 28 #define PLAYBACK_MAX_PERIOD_SIZE 16384 29 #define PLAYBACK_MIN_PERIOD_SIZE 1024 30 #define CAPTURE_MIN_NUM_PERIODS 2 31 #define CAPTURE_MAX_NUM_PERIODS 2 32 #define CAPTURE_MAX_PERIOD_SIZE 16384 33 #define CAPTURE_MIN_PERIOD_SIZE 1024 34 35 #define MAX_BUFFER (PLAYBACK_MAX_PERIOD_SIZE * PLAYBACK_MAX_NUM_PERIODS) 36 #define MIN_BUFFER MAX_BUFFER 37 38 static const struct snd_pcm_hardware acp_pcm_hardware_playback = { 39 .info = SNDRV_PCM_INFO_INTERLEAVED | 40 SNDRV_PCM_INFO_BLOCK_TRANSFER | SNDRV_PCM_INFO_MMAP | 41 SNDRV_PCM_INFO_MMAP_VALID | SNDRV_PCM_INFO_BATCH | 42 SNDRV_PCM_INFO_PAUSE | SNDRV_PCM_INFO_RESUME, 43 .formats = SNDRV_PCM_FMTBIT_S16_LE | 44 SNDRV_PCM_FMTBIT_S24_LE | SNDRV_PCM_FMTBIT_S32_LE, 45 .channels_min = 1, 46 .channels_max = 8, 47 .rates = SNDRV_PCM_RATE_8000_96000, 48 .rate_min = 8000, 49 .rate_max = 96000, 50 .buffer_bytes_max = PLAYBACK_MAX_NUM_PERIODS * PLAYBACK_MAX_PERIOD_SIZE, 51 .period_bytes_min = PLAYBACK_MIN_PERIOD_SIZE, 52 .period_bytes_max = PLAYBACK_MAX_PERIOD_SIZE, 53 .periods_min = PLAYBACK_MIN_NUM_PERIODS, 54 .periods_max = PLAYBACK_MAX_NUM_PERIODS, 55 }; 56 57 static const struct snd_pcm_hardware acp_pcm_hardware_capture = { 58 .info = SNDRV_PCM_INFO_INTERLEAVED | 59 SNDRV_PCM_INFO_BLOCK_TRANSFER | SNDRV_PCM_INFO_MMAP | 60 SNDRV_PCM_INFO_MMAP_VALID | SNDRV_PCM_INFO_BATCH | 61 SNDRV_PCM_INFO_PAUSE | SNDRV_PCM_INFO_RESUME, 62 .formats = SNDRV_PCM_FMTBIT_S16_LE | 63 SNDRV_PCM_FMTBIT_S24_LE | SNDRV_PCM_FMTBIT_S32_LE, 64 .channels_min = 1, 65 .channels_max = 2, 66 .rates = SNDRV_PCM_RATE_8000_48000, 67 .rate_min = 8000, 68 .rate_max = 48000, 69 .buffer_bytes_max = CAPTURE_MAX_NUM_PERIODS * CAPTURE_MAX_PERIOD_SIZE, 70 .period_bytes_min = CAPTURE_MIN_PERIOD_SIZE, 71 .period_bytes_max = CAPTURE_MAX_PERIOD_SIZE, 72 .periods_min = CAPTURE_MIN_NUM_PERIODS, 73 .periods_max = CAPTURE_MAX_NUM_PERIODS, 74 }; 75 76 struct audio_drv_data { 77 struct snd_pcm_substream *play_stream; 78 struct snd_pcm_substream *capture_stream; 79 void __iomem *acp_mmio; 80 }; 81 82 static u32 acp_reg_read(void __iomem *acp_mmio, u32 reg) 83 { 84 return readl(acp_mmio + (reg * 4)); 85 } 86 87 static void acp_reg_write(u32 val, void __iomem *acp_mmio, u32 reg) 88 { 89 writel(val, acp_mmio + (reg * 4)); 90 } 91 92 /* Configure a given dma channel parameters - enable/disable, 93 * number of descriptors, priority 94 */ 95 static void config_acp_dma_channel(void __iomem *acp_mmio, u8 ch_num, 96 u16 dscr_strt_idx, u16 num_dscrs, 97 enum acp_dma_priority_level priority_level) 98 { 99 u32 dma_ctrl; 100 101 /* disable the channel run field */ 102 dma_ctrl = acp_reg_read(acp_mmio, mmACP_DMA_CNTL_0 + ch_num); 103 dma_ctrl &= ~ACP_DMA_CNTL_0__DMAChRun_MASK; 104 acp_reg_write(dma_ctrl, acp_mmio, mmACP_DMA_CNTL_0 + ch_num); 105 106 /* program a DMA channel with first descriptor to be processed. */ 107 acp_reg_write((ACP_DMA_DSCR_STRT_IDX_0__DMAChDscrStrtIdx_MASK 108 & dscr_strt_idx), 109 acp_mmio, mmACP_DMA_DSCR_STRT_IDX_0 + ch_num); 110 111 /* program a DMA channel with the number of descriptors to be 112 * processed in the transfer 113 */ 114 acp_reg_write(ACP_DMA_DSCR_CNT_0__DMAChDscrCnt_MASK & num_dscrs, 115 acp_mmio, mmACP_DMA_DSCR_CNT_0 + ch_num); 116 117 /* set DMA channel priority */ 118 acp_reg_write(priority_level, acp_mmio, mmACP_DMA_PRIO_0 + ch_num); 119 } 120 121 /* Initialize a dma descriptor in SRAM based on descritor information passed */ 122 static void config_dma_descriptor_in_sram(void __iomem *acp_mmio, 123 u16 descr_idx, 124 acp_dma_dscr_transfer_t *descr_info) 125 { 126 u32 sram_offset; 127 128 sram_offset = (descr_idx * sizeof(acp_dma_dscr_transfer_t)); 129 130 /* program the source base address. */ 131 acp_reg_write(sram_offset, acp_mmio, mmACP_SRBM_Targ_Idx_Addr); 132 acp_reg_write(descr_info->src, acp_mmio, mmACP_SRBM_Targ_Idx_Data); 133 /* program the destination base address. */ 134 acp_reg_write(sram_offset + 4, acp_mmio, mmACP_SRBM_Targ_Idx_Addr); 135 acp_reg_write(descr_info->dest, acp_mmio, mmACP_SRBM_Targ_Idx_Data); 136 137 /* program the number of bytes to be transferred for this descriptor. */ 138 acp_reg_write(sram_offset + 8, acp_mmio, mmACP_SRBM_Targ_Idx_Addr); 139 acp_reg_write(descr_info->xfer_val, acp_mmio, mmACP_SRBM_Targ_Idx_Data); 140 } 141 142 /* Initialize the DMA descriptor information for transfer between 143 * system memory <-> ACP SRAM 144 */ 145 static void set_acp_sysmem_dma_descriptors(void __iomem *acp_mmio, 146 u32 size, int direction, 147 u32 pte_offset) 148 { 149 u16 i; 150 u16 dma_dscr_idx = PLAYBACK_START_DMA_DESCR_CH12; 151 acp_dma_dscr_transfer_t dmadscr[NUM_DSCRS_PER_CHANNEL]; 152 153 for (i = 0; i < NUM_DSCRS_PER_CHANNEL; i++) { 154 dmadscr[i].xfer_val = 0; 155 if (direction == SNDRV_PCM_STREAM_PLAYBACK) { 156 dma_dscr_idx = PLAYBACK_START_DMA_DESCR_CH12 + i; 157 dmadscr[i].dest = ACP_SHARED_RAM_BANK_1_ADDRESS + 158 (size / 2) - (i * (size/2)); 159 dmadscr[i].src = ACP_INTERNAL_APERTURE_WINDOW_0_ADDRESS 160 + (pte_offset * SZ_4K) + (i * (size/2)); 161 dmadscr[i].xfer_val |= 162 (ACP_DMA_ATTRIBUTES_DAGB_ONION_TO_SHAREDMEM << 16) | 163 (size / 2); 164 } else { 165 dma_dscr_idx = CAPTURE_START_DMA_DESCR_CH14 + i; 166 dmadscr[i].src = ACP_SHARED_RAM_BANK_5_ADDRESS + 167 (i * (size/2)); 168 dmadscr[i].dest = ACP_INTERNAL_APERTURE_WINDOW_0_ADDRESS 169 + (pte_offset * SZ_4K) + 170 (i * (size/2)); 171 dmadscr[i].xfer_val |= 172 BIT(22) | 173 (ACP_DMA_ATTRIBUTES_SHAREDMEM_TO_DAGB_ONION << 16) | 174 (size / 2); 175 } 176 config_dma_descriptor_in_sram(acp_mmio, dma_dscr_idx, 177 &dmadscr[i]); 178 } 179 if (direction == SNDRV_PCM_STREAM_PLAYBACK) 180 config_acp_dma_channel(acp_mmio, SYSRAM_TO_ACP_CH_NUM, 181 PLAYBACK_START_DMA_DESCR_CH12, 182 NUM_DSCRS_PER_CHANNEL, 183 ACP_DMA_PRIORITY_LEVEL_NORMAL); 184 else 185 config_acp_dma_channel(acp_mmio, ACP_TO_SYSRAM_CH_NUM, 186 CAPTURE_START_DMA_DESCR_CH14, 187 NUM_DSCRS_PER_CHANNEL, 188 ACP_DMA_PRIORITY_LEVEL_NORMAL); 189 } 190 191 /* Initialize the DMA descriptor information for transfer between 192 * ACP SRAM <-> I2S 193 */ 194 static void set_acp_to_i2s_dma_descriptors(void __iomem *acp_mmio, 195 u32 size, int direction) 196 { 197 198 u16 i; 199 u16 dma_dscr_idx = PLAYBACK_START_DMA_DESCR_CH13; 200 acp_dma_dscr_transfer_t dmadscr[NUM_DSCRS_PER_CHANNEL]; 201 202 for (i = 0; i < NUM_DSCRS_PER_CHANNEL; i++) { 203 dmadscr[i].xfer_val = 0; 204 if (direction == SNDRV_PCM_STREAM_PLAYBACK) { 205 dma_dscr_idx = PLAYBACK_START_DMA_DESCR_CH13 + i; 206 dmadscr[i].src = ACP_SHARED_RAM_BANK_1_ADDRESS + 207 (i * (size/2)); 208 /* dmadscr[i].dest is unused by hardware. */ 209 dmadscr[i].dest = 0; 210 dmadscr[i].xfer_val |= BIT(22) | (TO_ACP_I2S_1 << 16) | 211 (size / 2); 212 } else { 213 dma_dscr_idx = CAPTURE_START_DMA_DESCR_CH15 + i; 214 /* dmadscr[i].src is unused by hardware. */ 215 dmadscr[i].src = 0; 216 dmadscr[i].dest = ACP_SHARED_RAM_BANK_5_ADDRESS + 217 (i * (size / 2)); 218 dmadscr[i].xfer_val |= BIT(22) | 219 (FROM_ACP_I2S_1 << 16) | (size / 2); 220 } 221 config_dma_descriptor_in_sram(acp_mmio, dma_dscr_idx, 222 &dmadscr[i]); 223 } 224 /* Configure the DMA channel with the above descriptore */ 225 if (direction == SNDRV_PCM_STREAM_PLAYBACK) 226 config_acp_dma_channel(acp_mmio, ACP_TO_I2S_DMA_CH_NUM, 227 PLAYBACK_START_DMA_DESCR_CH13, 228 NUM_DSCRS_PER_CHANNEL, 229 ACP_DMA_PRIORITY_LEVEL_NORMAL); 230 else 231 config_acp_dma_channel(acp_mmio, I2S_TO_ACP_DMA_CH_NUM, 232 CAPTURE_START_DMA_DESCR_CH15, 233 NUM_DSCRS_PER_CHANNEL, 234 ACP_DMA_PRIORITY_LEVEL_NORMAL); 235 } 236 237 /* Create page table entries in ACP SRAM for the allocated memory */ 238 static void acp_pte_config(void __iomem *acp_mmio, struct page *pg, 239 u16 num_of_pages, u32 pte_offset) 240 { 241 u16 page_idx; 242 u64 addr; 243 u32 low; 244 u32 high; 245 u32 offset; 246 247 offset = ACP_DAGB_GRP_SRBM_SRAM_BASE_OFFSET + (pte_offset * 8); 248 for (page_idx = 0; page_idx < (num_of_pages); page_idx++) { 249 /* Load the low address of page int ACP SRAM through SRBM */ 250 acp_reg_write((offset + (page_idx * 8)), 251 acp_mmio, mmACP_SRBM_Targ_Idx_Addr); 252 addr = page_to_phys(pg); 253 254 low = lower_32_bits(addr); 255 high = upper_32_bits(addr); 256 257 acp_reg_write(low, acp_mmio, mmACP_SRBM_Targ_Idx_Data); 258 259 /* Load the High address of page int ACP SRAM through SRBM */ 260 acp_reg_write((offset + (page_idx * 8) + 4), 261 acp_mmio, mmACP_SRBM_Targ_Idx_Addr); 262 263 /* page enable in ACP */ 264 high |= BIT(31); 265 acp_reg_write(high, acp_mmio, mmACP_SRBM_Targ_Idx_Data); 266 267 /* Move to next physically contiguos page */ 268 pg++; 269 } 270 } 271 272 static void config_acp_dma(void __iomem *acp_mmio, 273 struct audio_substream_data *audio_config) 274 { 275 u32 pte_offset; 276 277 if (audio_config->direction == SNDRV_PCM_STREAM_PLAYBACK) 278 pte_offset = ACP_PLAYBACK_PTE_OFFSET; 279 else 280 pte_offset = ACP_CAPTURE_PTE_OFFSET; 281 282 acp_pte_config(acp_mmio, audio_config->pg, audio_config->num_of_pages, 283 pte_offset); 284 285 /* Configure System memory <-> ACP SRAM DMA descriptors */ 286 set_acp_sysmem_dma_descriptors(acp_mmio, audio_config->size, 287 audio_config->direction, pte_offset); 288 289 /* Configure ACP SRAM <-> I2S DMA descriptors */ 290 set_acp_to_i2s_dma_descriptors(acp_mmio, audio_config->size, 291 audio_config->direction); 292 } 293 294 /* Start a given DMA channel transfer */ 295 static void acp_dma_start(void __iomem *acp_mmio, 296 u16 ch_num, bool is_circular) 297 { 298 u32 dma_ctrl; 299 300 /* read the dma control register and disable the channel run field */ 301 dma_ctrl = acp_reg_read(acp_mmio, mmACP_DMA_CNTL_0 + ch_num); 302 303 /* Invalidating the DAGB cache */ 304 acp_reg_write(1, acp_mmio, mmACP_DAGB_ATU_CTRL); 305 306 /* configure the DMA channel and start the DMA transfer 307 * set dmachrun bit to start the transfer and enable the 308 * interrupt on completion of the dma transfer 309 */ 310 dma_ctrl |= ACP_DMA_CNTL_0__DMAChRun_MASK; 311 312 switch (ch_num) { 313 case ACP_TO_I2S_DMA_CH_NUM: 314 case ACP_TO_SYSRAM_CH_NUM: 315 case I2S_TO_ACP_DMA_CH_NUM: 316 dma_ctrl |= ACP_DMA_CNTL_0__DMAChIOCEn_MASK; 317 break; 318 default: 319 dma_ctrl &= ~ACP_DMA_CNTL_0__DMAChIOCEn_MASK; 320 break; 321 } 322 323 /* enable for ACP SRAM to/from I2S DMA channel */ 324 if (is_circular == true) 325 dma_ctrl |= ACP_DMA_CNTL_0__Circular_DMA_En_MASK; 326 else 327 dma_ctrl &= ~ACP_DMA_CNTL_0__Circular_DMA_En_MASK; 328 329 acp_reg_write(dma_ctrl, acp_mmio, mmACP_DMA_CNTL_0 + ch_num); 330 } 331 332 /* Stop a given DMA channel transfer */ 333 static int acp_dma_stop(void __iomem *acp_mmio, u8 ch_num) 334 { 335 u32 dma_ctrl; 336 u32 dma_ch_sts; 337 u32 count = ACP_DMA_RESET_TIME; 338 339 dma_ctrl = acp_reg_read(acp_mmio, mmACP_DMA_CNTL_0 + ch_num); 340 341 /* clear the dma control register fields before writing zero 342 * in reset bit 343 */ 344 dma_ctrl &= ~ACP_DMA_CNTL_0__DMAChRun_MASK; 345 dma_ctrl &= ~ACP_DMA_CNTL_0__DMAChIOCEn_MASK; 346 347 acp_reg_write(dma_ctrl, acp_mmio, mmACP_DMA_CNTL_0 + ch_num); 348 dma_ch_sts = acp_reg_read(acp_mmio, mmACP_DMA_CH_STS); 349 350 if (dma_ch_sts & BIT(ch_num)) { 351 /* set the reset bit for this channel to stop the dma 352 * transfer 353 */ 354 dma_ctrl |= ACP_DMA_CNTL_0__DMAChRst_MASK; 355 acp_reg_write(dma_ctrl, acp_mmio, mmACP_DMA_CNTL_0 + ch_num); 356 } 357 358 /* check the channel status bit for some time and return the status */ 359 while (true) { 360 dma_ch_sts = acp_reg_read(acp_mmio, mmACP_DMA_CH_STS); 361 if (!(dma_ch_sts & BIT(ch_num))) { 362 /* clear the reset flag after successfully stopping 363 * the dma transfer and break from the loop 364 */ 365 dma_ctrl &= ~ACP_DMA_CNTL_0__DMAChRst_MASK; 366 367 acp_reg_write(dma_ctrl, acp_mmio, mmACP_DMA_CNTL_0 368 + ch_num); 369 break; 370 } 371 if (--count == 0) { 372 pr_err("Failed to stop ACP DMA channel : %d\n", ch_num); 373 return -ETIMEDOUT; 374 } 375 udelay(100); 376 } 377 return 0; 378 } 379 380 static void acp_set_sram_bank_state(void __iomem *acp_mmio, u16 bank, 381 bool power_on) 382 { 383 u32 val, req_reg, sts_reg, sts_reg_mask; 384 u32 loops = 1000; 385 386 if (bank < 32) { 387 req_reg = mmACP_MEM_SHUT_DOWN_REQ_LO; 388 sts_reg = mmACP_MEM_SHUT_DOWN_STS_LO; 389 sts_reg_mask = 0xFFFFFFFF; 390 391 } else { 392 bank -= 32; 393 req_reg = mmACP_MEM_SHUT_DOWN_REQ_HI; 394 sts_reg = mmACP_MEM_SHUT_DOWN_STS_HI; 395 sts_reg_mask = 0x0000FFFF; 396 } 397 398 val = acp_reg_read(acp_mmio, req_reg); 399 if (val & (1 << bank)) { 400 /* bank is in off state */ 401 if (power_on == true) 402 /* request to on */ 403 val &= ~(1 << bank); 404 else 405 /* request to off */ 406 return; 407 } else { 408 /* bank is in on state */ 409 if (power_on == false) 410 /* request to off */ 411 val |= 1 << bank; 412 else 413 /* request to on */ 414 return; 415 } 416 acp_reg_write(val, acp_mmio, req_reg); 417 418 while (acp_reg_read(acp_mmio, sts_reg) != sts_reg_mask) { 419 if (!loops--) { 420 pr_err("ACP SRAM bank %d state change failed\n", bank); 421 break; 422 } 423 cpu_relax(); 424 } 425 } 426 427 /* Initialize and bring ACP hardware to default state. */ 428 static int acp_init(void __iomem *acp_mmio) 429 { 430 u16 bank; 431 u32 val, count, sram_pte_offset; 432 433 /* Assert Soft reset of ACP */ 434 val = acp_reg_read(acp_mmio, mmACP_SOFT_RESET); 435 436 val |= ACP_SOFT_RESET__SoftResetAud_MASK; 437 acp_reg_write(val, acp_mmio, mmACP_SOFT_RESET); 438 439 count = ACP_SOFT_RESET_DONE_TIME_OUT_VALUE; 440 while (true) { 441 val = acp_reg_read(acp_mmio, mmACP_SOFT_RESET); 442 if (ACP_SOFT_RESET__SoftResetAudDone_MASK == 443 (val & ACP_SOFT_RESET__SoftResetAudDone_MASK)) 444 break; 445 if (--count == 0) { 446 pr_err("Failed to reset ACP\n"); 447 return -ETIMEDOUT; 448 } 449 udelay(100); 450 } 451 452 /* Enable clock to ACP and wait until the clock is enabled */ 453 val = acp_reg_read(acp_mmio, mmACP_CONTROL); 454 val = val | ACP_CONTROL__ClkEn_MASK; 455 acp_reg_write(val, acp_mmio, mmACP_CONTROL); 456 457 count = ACP_CLOCK_EN_TIME_OUT_VALUE; 458 459 while (true) { 460 val = acp_reg_read(acp_mmio, mmACP_STATUS); 461 if (val & (u32) 0x1) 462 break; 463 if (--count == 0) { 464 pr_err("Failed to reset ACP\n"); 465 return -ETIMEDOUT; 466 } 467 udelay(100); 468 } 469 470 /* Deassert the SOFT RESET flags */ 471 val = acp_reg_read(acp_mmio, mmACP_SOFT_RESET); 472 val &= ~ACP_SOFT_RESET__SoftResetAud_MASK; 473 acp_reg_write(val, acp_mmio, mmACP_SOFT_RESET); 474 475 /* initiailize Onion control DAGB register */ 476 acp_reg_write(ACP_ONION_CNTL_DEFAULT, acp_mmio, 477 mmACP_AXI2DAGB_ONION_CNTL); 478 479 /* initiailize Garlic control DAGB registers */ 480 acp_reg_write(ACP_GARLIC_CNTL_DEFAULT, acp_mmio, 481 mmACP_AXI2DAGB_GARLIC_CNTL); 482 483 sram_pte_offset = ACP_DAGB_GRP_SRAM_BASE_ADDRESS | 484 ACP_DAGB_BASE_ADDR_GRP_1__AXI2DAGBSnoopSel_MASK | 485 ACP_DAGB_BASE_ADDR_GRP_1__AXI2DAGBTargetMemSel_MASK | 486 ACP_DAGB_BASE_ADDR_GRP_1__AXI2DAGBGrpEnable_MASK; 487 acp_reg_write(sram_pte_offset, acp_mmio, mmACP_DAGB_BASE_ADDR_GRP_1); 488 acp_reg_write(ACP_PAGE_SIZE_4K_ENABLE, acp_mmio, 489 mmACP_DAGB_PAGE_SIZE_GRP_1); 490 491 acp_reg_write(ACP_SRAM_BASE_ADDRESS, acp_mmio, 492 mmACP_DMA_DESC_BASE_ADDR); 493 494 /* Num of descriptiors in SRAM 0x4, means 256 descriptors;(64 * 4) */ 495 acp_reg_write(0x4, acp_mmio, mmACP_DMA_DESC_MAX_NUM_DSCR); 496 acp_reg_write(ACP_EXTERNAL_INTR_CNTL__DMAIOCMask_MASK, 497 acp_mmio, mmACP_EXTERNAL_INTR_CNTL); 498 499 /* When ACP_TILE_P1 is turned on, all SRAM banks get turned on. 500 * Now, turn off all of them. This can't be done in 'poweron' of 501 * ACP pm domain, as this requires ACP to be initialized. 502 */ 503 for (bank = 1; bank < 48; bank++) 504 acp_set_sram_bank_state(acp_mmio, bank, false); 505 506 return 0; 507 } 508 509 /* Deinitialize ACP */ 510 static int acp_deinit(void __iomem *acp_mmio) 511 { 512 u32 val; 513 u32 count; 514 515 /* Assert Soft reset of ACP */ 516 val = acp_reg_read(acp_mmio, mmACP_SOFT_RESET); 517 518 val |= ACP_SOFT_RESET__SoftResetAud_MASK; 519 acp_reg_write(val, acp_mmio, mmACP_SOFT_RESET); 520 521 count = ACP_SOFT_RESET_DONE_TIME_OUT_VALUE; 522 while (true) { 523 val = acp_reg_read(acp_mmio, mmACP_SOFT_RESET); 524 if (ACP_SOFT_RESET__SoftResetAudDone_MASK == 525 (val & ACP_SOFT_RESET__SoftResetAudDone_MASK)) 526 break; 527 if (--count == 0) { 528 pr_err("Failed to reset ACP\n"); 529 return -ETIMEDOUT; 530 } 531 udelay(100); 532 } 533 /** Disable ACP clock */ 534 val = acp_reg_read(acp_mmio, mmACP_CONTROL); 535 val &= ~ACP_CONTROL__ClkEn_MASK; 536 acp_reg_write(val, acp_mmio, mmACP_CONTROL); 537 538 count = ACP_CLOCK_EN_TIME_OUT_VALUE; 539 540 while (true) { 541 val = acp_reg_read(acp_mmio, mmACP_STATUS); 542 if (!(val & (u32) 0x1)) 543 break; 544 if (--count == 0) { 545 pr_err("Failed to reset ACP\n"); 546 return -ETIMEDOUT; 547 } 548 udelay(100); 549 } 550 return 0; 551 } 552 553 /* ACP DMA irq handler routine for playback, capture usecases */ 554 static irqreturn_t dma_irq_handler(int irq, void *arg) 555 { 556 u16 dscr_idx; 557 u32 intr_flag, ext_intr_status; 558 struct audio_drv_data *irq_data; 559 void __iomem *acp_mmio; 560 struct device *dev = arg; 561 bool valid_irq = false; 562 563 irq_data = dev_get_drvdata(dev); 564 acp_mmio = irq_data->acp_mmio; 565 566 ext_intr_status = acp_reg_read(acp_mmio, mmACP_EXTERNAL_INTR_STAT); 567 intr_flag = (((ext_intr_status & 568 ACP_EXTERNAL_INTR_STAT__DMAIOCStat_MASK) >> 569 ACP_EXTERNAL_INTR_STAT__DMAIOCStat__SHIFT)); 570 571 if ((intr_flag & BIT(ACP_TO_I2S_DMA_CH_NUM)) != 0) { 572 valid_irq = true; 573 if (acp_reg_read(acp_mmio, mmACP_DMA_CUR_DSCR_13) == 574 PLAYBACK_START_DMA_DESCR_CH13) 575 dscr_idx = PLAYBACK_START_DMA_DESCR_CH12; 576 else 577 dscr_idx = PLAYBACK_END_DMA_DESCR_CH12; 578 config_acp_dma_channel(acp_mmio, SYSRAM_TO_ACP_CH_NUM, dscr_idx, 579 1, 0); 580 acp_dma_start(acp_mmio, SYSRAM_TO_ACP_CH_NUM, false); 581 582 snd_pcm_period_elapsed(irq_data->play_stream); 583 584 acp_reg_write((intr_flag & BIT(ACP_TO_I2S_DMA_CH_NUM)) << 16, 585 acp_mmio, mmACP_EXTERNAL_INTR_STAT); 586 } 587 588 if ((intr_flag & BIT(I2S_TO_ACP_DMA_CH_NUM)) != 0) { 589 valid_irq = true; 590 if (acp_reg_read(acp_mmio, mmACP_DMA_CUR_DSCR_15) == 591 CAPTURE_START_DMA_DESCR_CH15) 592 dscr_idx = CAPTURE_END_DMA_DESCR_CH14; 593 else 594 dscr_idx = CAPTURE_START_DMA_DESCR_CH14; 595 config_acp_dma_channel(acp_mmio, ACP_TO_SYSRAM_CH_NUM, dscr_idx, 596 1, 0); 597 acp_dma_start(acp_mmio, ACP_TO_SYSRAM_CH_NUM, false); 598 599 acp_reg_write((intr_flag & BIT(I2S_TO_ACP_DMA_CH_NUM)) << 16, 600 acp_mmio, mmACP_EXTERNAL_INTR_STAT); 601 } 602 603 if ((intr_flag & BIT(ACP_TO_SYSRAM_CH_NUM)) != 0) { 604 valid_irq = true; 605 snd_pcm_period_elapsed(irq_data->capture_stream); 606 acp_reg_write((intr_flag & BIT(ACP_TO_SYSRAM_CH_NUM)) << 16, 607 acp_mmio, mmACP_EXTERNAL_INTR_STAT); 608 } 609 610 if (valid_irq) 611 return IRQ_HANDLED; 612 else 613 return IRQ_NONE; 614 } 615 616 static int acp_dma_open(struct snd_pcm_substream *substream) 617 { 618 u16 bank; 619 int ret = 0; 620 struct snd_pcm_runtime *runtime = substream->runtime; 621 struct snd_soc_pcm_runtime *prtd = substream->private_data; 622 struct audio_drv_data *intr_data = dev_get_drvdata(prtd->platform->dev); 623 624 struct audio_substream_data *adata = 625 kzalloc(sizeof(struct audio_substream_data), GFP_KERNEL); 626 if (adata == NULL) 627 return -ENOMEM; 628 629 if (substream->stream == SNDRV_PCM_STREAM_PLAYBACK) 630 runtime->hw = acp_pcm_hardware_playback; 631 else 632 runtime->hw = acp_pcm_hardware_capture; 633 634 ret = snd_pcm_hw_constraint_integer(runtime, 635 SNDRV_PCM_HW_PARAM_PERIODS); 636 if (ret < 0) { 637 dev_err(prtd->platform->dev, "set integer constraint failed\n"); 638 kfree(adata); 639 return ret; 640 } 641 642 adata->acp_mmio = intr_data->acp_mmio; 643 runtime->private_data = adata; 644 645 /* Enable ACP irq, when neither playback or capture streams are 646 * active by the time when a new stream is being opened. 647 * This enablement is not required for another stream, if current 648 * stream is not closed 649 */ 650 if (!intr_data->play_stream && !intr_data->capture_stream) 651 acp_reg_write(1, adata->acp_mmio, mmACP_EXTERNAL_INTR_ENB); 652 653 if (substream->stream == SNDRV_PCM_STREAM_PLAYBACK) { 654 intr_data->play_stream = substream; 655 for (bank = 1; bank <= 4; bank++) 656 acp_set_sram_bank_state(intr_data->acp_mmio, bank, 657 true); 658 } else { 659 intr_data->capture_stream = substream; 660 for (bank = 5; bank <= 8; bank++) 661 acp_set_sram_bank_state(intr_data->acp_mmio, bank, 662 true); 663 } 664 665 return 0; 666 } 667 668 static int acp_dma_hw_params(struct snd_pcm_substream *substream, 669 struct snd_pcm_hw_params *params) 670 { 671 int status; 672 uint64_t size; 673 struct page *pg; 674 struct snd_pcm_runtime *runtime; 675 struct audio_substream_data *rtd; 676 677 runtime = substream->runtime; 678 rtd = runtime->private_data; 679 680 if (WARN_ON(!rtd)) 681 return -EINVAL; 682 683 size = params_buffer_bytes(params); 684 status = snd_pcm_lib_malloc_pages(substream, size); 685 if (status < 0) 686 return status; 687 688 memset(substream->runtime->dma_area, 0, params_buffer_bytes(params)); 689 pg = virt_to_page(substream->dma_buffer.area); 690 691 if (pg != NULL) { 692 acp_set_sram_bank_state(rtd->acp_mmio, 0, true); 693 /* Save for runtime private data */ 694 rtd->pg = pg; 695 rtd->order = get_order(size); 696 697 /* Fill the page table entries in ACP SRAM */ 698 rtd->pg = pg; 699 rtd->size = size; 700 rtd->num_of_pages = PAGE_ALIGN(size) >> PAGE_SHIFT; 701 rtd->direction = substream->stream; 702 703 config_acp_dma(rtd->acp_mmio, rtd); 704 status = 0; 705 } else { 706 status = -ENOMEM; 707 } 708 return status; 709 } 710 711 static int acp_dma_hw_free(struct snd_pcm_substream *substream) 712 { 713 return snd_pcm_lib_free_pages(substream); 714 } 715 716 static snd_pcm_uframes_t acp_dma_pointer(struct snd_pcm_substream *substream) 717 { 718 u16 dscr; 719 u32 mul, dma_config, period_bytes; 720 u32 pos = 0; 721 722 struct snd_pcm_runtime *runtime = substream->runtime; 723 struct audio_substream_data *rtd = runtime->private_data; 724 725 period_bytes = frames_to_bytes(runtime, runtime->period_size); 726 if (substream->stream == SNDRV_PCM_STREAM_PLAYBACK) { 727 dscr = acp_reg_read(rtd->acp_mmio, mmACP_DMA_CUR_DSCR_13); 728 729 if (dscr == PLAYBACK_START_DMA_DESCR_CH13) 730 mul = 0; 731 else 732 mul = 1; 733 pos = (mul * period_bytes); 734 } else { 735 dma_config = acp_reg_read(rtd->acp_mmio, mmACP_DMA_CNTL_14); 736 if (dma_config != 0) { 737 dscr = acp_reg_read(rtd->acp_mmio, 738 mmACP_DMA_CUR_DSCR_14); 739 if (dscr == CAPTURE_START_DMA_DESCR_CH14) 740 mul = 1; 741 else 742 mul = 2; 743 pos = (mul * period_bytes); 744 } 745 746 if (pos >= (2 * period_bytes)) 747 pos = 0; 748 749 } 750 return bytes_to_frames(runtime, pos); 751 } 752 753 static int acp_dma_mmap(struct snd_pcm_substream *substream, 754 struct vm_area_struct *vma) 755 { 756 return snd_pcm_lib_default_mmap(substream, vma); 757 } 758 759 static int acp_dma_prepare(struct snd_pcm_substream *substream) 760 { 761 struct snd_pcm_runtime *runtime = substream->runtime; 762 struct audio_substream_data *rtd = runtime->private_data; 763 764 if (substream->stream == SNDRV_PCM_STREAM_PLAYBACK) { 765 config_acp_dma_channel(rtd->acp_mmio, SYSRAM_TO_ACP_CH_NUM, 766 PLAYBACK_START_DMA_DESCR_CH12, 767 NUM_DSCRS_PER_CHANNEL, 0); 768 config_acp_dma_channel(rtd->acp_mmio, ACP_TO_I2S_DMA_CH_NUM, 769 PLAYBACK_START_DMA_DESCR_CH13, 770 NUM_DSCRS_PER_CHANNEL, 0); 771 /* Fill ACP SRAM (2 periods) with zeros from System RAM 772 * which is zero-ed in hw_params 773 */ 774 acp_dma_start(rtd->acp_mmio, SYSRAM_TO_ACP_CH_NUM, false); 775 776 /* ACP SRAM (2 periods of buffer size) is intially filled with 777 * zeros. Before rendering starts, 2nd half of SRAM will be 778 * filled with valid audio data DMA'ed from first half of system 779 * RAM and 1st half of SRAM will be filled with Zeros. This is 780 * the initial scenario when redering starts from SRAM. Later 781 * on, 2nd half of system memory will be DMA'ed to 1st half of 782 * SRAM, 1st half of system memory will be DMA'ed to 2nd half of 783 * SRAM in ping-pong way till rendering stops. 784 */ 785 config_acp_dma_channel(rtd->acp_mmio, SYSRAM_TO_ACP_CH_NUM, 786 PLAYBACK_START_DMA_DESCR_CH12, 787 1, 0); 788 } else { 789 config_acp_dma_channel(rtd->acp_mmio, ACP_TO_SYSRAM_CH_NUM, 790 CAPTURE_START_DMA_DESCR_CH14, 791 NUM_DSCRS_PER_CHANNEL, 0); 792 config_acp_dma_channel(rtd->acp_mmio, I2S_TO_ACP_DMA_CH_NUM, 793 CAPTURE_START_DMA_DESCR_CH15, 794 NUM_DSCRS_PER_CHANNEL, 0); 795 } 796 return 0; 797 } 798 799 static int acp_dma_trigger(struct snd_pcm_substream *substream, int cmd) 800 { 801 int ret; 802 u32 loops = 1000; 803 804 struct snd_pcm_runtime *runtime = substream->runtime; 805 struct snd_soc_pcm_runtime *prtd = substream->private_data; 806 struct audio_substream_data *rtd = runtime->private_data; 807 808 if (!rtd) 809 return -EINVAL; 810 switch (cmd) { 811 case SNDRV_PCM_TRIGGER_START: 812 case SNDRV_PCM_TRIGGER_PAUSE_RELEASE: 813 case SNDRV_PCM_TRIGGER_RESUME: 814 if (substream->stream == SNDRV_PCM_STREAM_PLAYBACK) { 815 acp_dma_start(rtd->acp_mmio, 816 SYSRAM_TO_ACP_CH_NUM, false); 817 while (acp_reg_read(rtd->acp_mmio, mmACP_DMA_CH_STS) & 818 BIT(SYSRAM_TO_ACP_CH_NUM)) { 819 if (!loops--) { 820 dev_err(prtd->platform->dev, 821 "acp dma start timeout\n"); 822 return -ETIMEDOUT; 823 } 824 cpu_relax(); 825 } 826 827 acp_dma_start(rtd->acp_mmio, 828 ACP_TO_I2S_DMA_CH_NUM, true); 829 830 } else { 831 acp_dma_start(rtd->acp_mmio, 832 I2S_TO_ACP_DMA_CH_NUM, true); 833 } 834 ret = 0; 835 break; 836 case SNDRV_PCM_TRIGGER_STOP: 837 case SNDRV_PCM_TRIGGER_PAUSE_PUSH: 838 case SNDRV_PCM_TRIGGER_SUSPEND: 839 /* Need to stop only circular DMA channels : 840 * ACP_TO_I2S_DMA_CH_NUM / I2S_TO_ACP_DMA_CH_NUM. Non-circular 841 * channels will stopped automatically after its transfer 842 * completes : SYSRAM_TO_ACP_CH_NUM / ACP_TO_SYSRAM_CH_NUM 843 */ 844 if (substream->stream == SNDRV_PCM_STREAM_PLAYBACK) 845 ret = acp_dma_stop(rtd->acp_mmio, 846 ACP_TO_I2S_DMA_CH_NUM); 847 else 848 ret = acp_dma_stop(rtd->acp_mmio, 849 I2S_TO_ACP_DMA_CH_NUM); 850 break; 851 default: 852 ret = -EINVAL; 853 854 } 855 return ret; 856 } 857 858 static int acp_dma_new(struct snd_soc_pcm_runtime *rtd) 859 { 860 return snd_pcm_lib_preallocate_pages_for_all(rtd->pcm, 861 SNDRV_DMA_TYPE_DEV, 862 NULL, MIN_BUFFER, 863 MAX_BUFFER); 864 } 865 866 static int acp_dma_close(struct snd_pcm_substream *substream) 867 { 868 u16 bank; 869 struct snd_pcm_runtime *runtime = substream->runtime; 870 struct audio_substream_data *rtd = runtime->private_data; 871 struct snd_soc_pcm_runtime *prtd = substream->private_data; 872 struct audio_drv_data *adata = dev_get_drvdata(prtd->platform->dev); 873 874 kfree(rtd); 875 876 if (substream->stream == SNDRV_PCM_STREAM_PLAYBACK) { 877 adata->play_stream = NULL; 878 for (bank = 1; bank <= 4; bank++) 879 acp_set_sram_bank_state(adata->acp_mmio, bank, 880 false); 881 } else { 882 adata->capture_stream = NULL; 883 for (bank = 5; bank <= 8; bank++) 884 acp_set_sram_bank_state(adata->acp_mmio, bank, 885 false); 886 } 887 888 /* Disable ACP irq, when the current stream is being closed and 889 * another stream is also not active. 890 */ 891 if (!adata->play_stream && !adata->capture_stream) 892 acp_reg_write(0, adata->acp_mmio, mmACP_EXTERNAL_INTR_ENB); 893 894 return 0; 895 } 896 897 static const struct snd_pcm_ops acp_dma_ops = { 898 .open = acp_dma_open, 899 .close = acp_dma_close, 900 .ioctl = snd_pcm_lib_ioctl, 901 .hw_params = acp_dma_hw_params, 902 .hw_free = acp_dma_hw_free, 903 .trigger = acp_dma_trigger, 904 .pointer = acp_dma_pointer, 905 .mmap = acp_dma_mmap, 906 .prepare = acp_dma_prepare, 907 }; 908 909 static struct snd_soc_platform_driver acp_asoc_platform = { 910 .ops = &acp_dma_ops, 911 .pcm_new = acp_dma_new, 912 }; 913 914 static int acp_audio_probe(struct platform_device *pdev) 915 { 916 int status; 917 struct audio_drv_data *audio_drv_data; 918 struct resource *res; 919 920 audio_drv_data = devm_kzalloc(&pdev->dev, sizeof(struct audio_drv_data), 921 GFP_KERNEL); 922 if (audio_drv_data == NULL) 923 return -ENOMEM; 924 925 res = platform_get_resource(pdev, IORESOURCE_MEM, 0); 926 audio_drv_data->acp_mmio = devm_ioremap_resource(&pdev->dev, res); 927 928 /* The following members gets populated in device 'open' 929 * function. Till then interrupts are disabled in 'acp_init' 930 * and device doesn't generate any interrupts. 931 */ 932 933 audio_drv_data->play_stream = NULL; 934 audio_drv_data->capture_stream = NULL; 935 936 res = platform_get_resource(pdev, IORESOURCE_IRQ, 0); 937 if (!res) { 938 dev_err(&pdev->dev, "IORESOURCE_IRQ FAILED\n"); 939 return -ENODEV; 940 } 941 942 status = devm_request_irq(&pdev->dev, res->start, dma_irq_handler, 943 0, "ACP_IRQ", &pdev->dev); 944 if (status) { 945 dev_err(&pdev->dev, "ACP IRQ request failed\n"); 946 return status; 947 } 948 949 dev_set_drvdata(&pdev->dev, audio_drv_data); 950 951 /* Initialize the ACP */ 952 acp_init(audio_drv_data->acp_mmio); 953 954 status = snd_soc_register_platform(&pdev->dev, &acp_asoc_platform); 955 if (status != 0) { 956 dev_err(&pdev->dev, "Fail to register ALSA platform device\n"); 957 return status; 958 } 959 960 pm_runtime_set_autosuspend_delay(&pdev->dev, 10000); 961 pm_runtime_use_autosuspend(&pdev->dev); 962 pm_runtime_enable(&pdev->dev); 963 964 return status; 965 } 966 967 static int acp_audio_remove(struct platform_device *pdev) 968 { 969 struct audio_drv_data *adata = dev_get_drvdata(&pdev->dev); 970 971 acp_deinit(adata->acp_mmio); 972 snd_soc_unregister_platform(&pdev->dev); 973 pm_runtime_disable(&pdev->dev); 974 975 return 0; 976 } 977 978 static int acp_pcm_resume(struct device *dev) 979 { 980 u16 bank; 981 struct audio_drv_data *adata = dev_get_drvdata(dev); 982 983 acp_init(adata->acp_mmio); 984 985 if (adata->play_stream && adata->play_stream->runtime) { 986 for (bank = 1; bank <= 4; bank++) 987 acp_set_sram_bank_state(adata->acp_mmio, bank, 988 true); 989 config_acp_dma(adata->acp_mmio, 990 adata->play_stream->runtime->private_data); 991 } 992 if (adata->capture_stream && adata->capture_stream->runtime) { 993 for (bank = 5; bank <= 8; bank++) 994 acp_set_sram_bank_state(adata->acp_mmio, bank, 995 true); 996 config_acp_dma(adata->acp_mmio, 997 adata->capture_stream->runtime->private_data); 998 } 999 acp_reg_write(1, adata->acp_mmio, mmACP_EXTERNAL_INTR_ENB); 1000 return 0; 1001 } 1002 1003 static int acp_pcm_runtime_suspend(struct device *dev) 1004 { 1005 struct audio_drv_data *adata = dev_get_drvdata(dev); 1006 1007 acp_deinit(adata->acp_mmio); 1008 acp_reg_write(0, adata->acp_mmio, mmACP_EXTERNAL_INTR_ENB); 1009 return 0; 1010 } 1011 1012 static int acp_pcm_runtime_resume(struct device *dev) 1013 { 1014 struct audio_drv_data *adata = dev_get_drvdata(dev); 1015 1016 acp_init(adata->acp_mmio); 1017 acp_reg_write(1, adata->acp_mmio, mmACP_EXTERNAL_INTR_ENB); 1018 return 0; 1019 } 1020 1021 static const struct dev_pm_ops acp_pm_ops = { 1022 .resume = acp_pcm_resume, 1023 .runtime_suspend = acp_pcm_runtime_suspend, 1024 .runtime_resume = acp_pcm_runtime_resume, 1025 }; 1026 1027 static struct platform_driver acp_dma_driver = { 1028 .probe = acp_audio_probe, 1029 .remove = acp_audio_remove, 1030 .driver = { 1031 .name = "acp_audio_dma", 1032 .pm = &acp_pm_ops, 1033 }, 1034 }; 1035 1036 module_platform_driver(acp_dma_driver); 1037 1038 MODULE_AUTHOR("Maruthi.Bayyavarapu@amd.com"); 1039 MODULE_DESCRIPTION("AMD ACP PCM Driver"); 1040 MODULE_LICENSE("GPL v2"); 1041 MODULE_ALIAS("platform:acp-dma-audio"); 1042