1 // SPDX-License-Identifier: GPL-2.0 2 // 3 // Renesas RZ/G2L ASoC Serial Sound Interface (SSIF-2) Driver 4 // 5 // Copyright (C) 2021 Renesas Electronics Corp. 6 // Copyright (C) 2019 Chris Brandt. 7 // 8 9 #include <linux/clk.h> 10 #include <linux/dmaengine.h> 11 #include <linux/io.h> 12 #include <linux/module.h> 13 #include <linux/pm_runtime.h> 14 #include <linux/reset.h> 15 #include <sound/soc.h> 16 17 /* REGISTER OFFSET */ 18 #define SSICR 0x000 19 #define SSISR 0x004 20 #define SSIFCR 0x010 21 #define SSIFSR 0x014 22 #define SSIFTDR 0x018 23 #define SSIFRDR 0x01c 24 #define SSIOFR 0x020 25 #define SSISCR 0x024 26 27 /* SSI REGISTER BITS */ 28 #define SSICR_DWL(x) (((x) & 0x7) << 19) 29 #define SSICR_SWL(x) (((x) & 0x7) << 16) 30 31 #define SSICR_CKS BIT(30) 32 #define SSICR_TUIEN BIT(29) 33 #define SSICR_TOIEN BIT(28) 34 #define SSICR_RUIEN BIT(27) 35 #define SSICR_ROIEN BIT(26) 36 #define SSICR_MST BIT(14) 37 #define SSICR_BCKP BIT(13) 38 #define SSICR_LRCKP BIT(12) 39 #define SSICR_CKDV(x) (((x) & 0xf) << 4) 40 #define SSICR_TEN BIT(1) 41 #define SSICR_REN BIT(0) 42 43 #define SSISR_TUIRQ BIT(29) 44 #define SSISR_TOIRQ BIT(28) 45 #define SSISR_RUIRQ BIT(27) 46 #define SSISR_ROIRQ BIT(26) 47 #define SSISR_IIRQ BIT(25) 48 49 #define SSIFCR_AUCKE BIT(31) 50 #define SSIFCR_SSIRST BIT(16) 51 #define SSIFCR_TIE BIT(3) 52 #define SSIFCR_RIE BIT(2) 53 #define SSIFCR_TFRST BIT(1) 54 #define SSIFCR_RFRST BIT(0) 55 #define SSIFCR_FIFO_RST (SSIFCR_TFRST | SSIFCR_RFRST) 56 57 #define SSIFSR_TDC_MASK 0x3f 58 #define SSIFSR_TDC_SHIFT 24 59 #define SSIFSR_RDC_MASK 0x3f 60 #define SSIFSR_RDC_SHIFT 8 61 62 #define SSIFSR_TDE BIT(16) 63 #define SSIFSR_RDF BIT(0) 64 65 #define SSIOFR_LRCONT BIT(8) 66 67 #define SSISCR_TDES(x) (((x) & 0x1f) << 8) 68 #define SSISCR_RDFS(x) (((x) & 0x1f) << 0) 69 70 /* Pre allocated buffers sizes */ 71 #define PREALLOC_BUFFER (SZ_32K) 72 #define PREALLOC_BUFFER_MAX (SZ_32K) 73 74 #define SSI_RATES SNDRV_PCM_RATE_8000_48000 /* 8k-44.1kHz */ 75 #define SSI_FMTS SNDRV_PCM_FMTBIT_S16_LE 76 #define SSI_CHAN_MIN 2 77 #define SSI_CHAN_MAX 2 78 #define SSI_FIFO_DEPTH 32 79 80 struct rz_ssi_priv; 81 82 struct rz_ssi_stream { 83 struct rz_ssi_priv *priv; 84 struct snd_pcm_substream *substream; 85 int fifo_sample_size; /* sample capacity of SSI FIFO */ 86 int dma_buffer_pos; /* The address for the next DMA descriptor */ 87 int period_counter; /* for keeping track of periods transferred */ 88 int sample_width; 89 int buffer_pos; /* current frame position in the buffer */ 90 int running; /* 0=stopped, 1=running */ 91 92 int uerr_num; 93 int oerr_num; 94 95 struct dma_chan *dma_ch; 96 97 int (*transfer)(struct rz_ssi_priv *ssi, struct rz_ssi_stream *strm); 98 }; 99 100 struct rz_ssi_priv { 101 void __iomem *base; 102 struct platform_device *pdev; 103 struct reset_control *rstc; 104 struct device *dev; 105 struct clk *sfr_clk; 106 struct clk *clk; 107 108 phys_addr_t phys; 109 int irq_int; 110 int irq_tx; 111 int irq_rx; 112 int irq_rt; 113 114 spinlock_t lock; 115 116 /* 117 * The SSI supports full-duplex transmission and reception. 118 * However, if an error occurs, channel reset (both transmission 119 * and reception reset) is required. 120 * So it is better to use as half-duplex (playing and recording 121 * should be done on separate channels). 122 */ 123 struct rz_ssi_stream playback; 124 struct rz_ssi_stream capture; 125 126 /* clock */ 127 unsigned long audio_mck; 128 unsigned long audio_clk_1; 129 unsigned long audio_clk_2; 130 131 bool lrckp_fsync_fall; /* LR clock polarity (SSICR.LRCKP) */ 132 bool bckp_rise; /* Bit clock polarity (SSICR.BCKP) */ 133 bool dma_rt; 134 135 /* Full duplex communication support */ 136 struct { 137 unsigned int rate; 138 unsigned int channels; 139 unsigned int sample_width; 140 unsigned int sample_bits; 141 } hw_params_cache; 142 }; 143 144 static void rz_ssi_dma_complete(void *data); 145 146 static void rz_ssi_reg_writel(struct rz_ssi_priv *priv, uint reg, u32 data) 147 { 148 writel(data, (priv->base + reg)); 149 } 150 151 static u32 rz_ssi_reg_readl(struct rz_ssi_priv *priv, uint reg) 152 { 153 return readl(priv->base + reg); 154 } 155 156 static void rz_ssi_reg_mask_setl(struct rz_ssi_priv *priv, uint reg, 157 u32 bclr, u32 bset) 158 { 159 u32 val; 160 161 val = readl(priv->base + reg); 162 val = (val & ~bclr) | bset; 163 writel(val, (priv->base + reg)); 164 } 165 166 static inline struct snd_soc_dai * 167 rz_ssi_get_dai(struct snd_pcm_substream *substream) 168 { 169 struct snd_soc_pcm_runtime *rtd = snd_soc_substream_to_rtd(substream); 170 171 return snd_soc_rtd_to_cpu(rtd, 0); 172 } 173 174 static inline bool rz_ssi_stream_is_play(struct rz_ssi_priv *ssi, 175 struct snd_pcm_substream *substream) 176 { 177 return substream->stream == SNDRV_PCM_STREAM_PLAYBACK; 178 } 179 180 static inline struct rz_ssi_stream * 181 rz_ssi_stream_get(struct rz_ssi_priv *ssi, struct snd_pcm_substream *substream) 182 { 183 struct rz_ssi_stream *stream = &ssi->playback; 184 185 if (substream->stream != SNDRV_PCM_STREAM_PLAYBACK) 186 stream = &ssi->capture; 187 188 return stream; 189 } 190 191 static inline bool rz_ssi_is_dma_enabled(struct rz_ssi_priv *ssi) 192 { 193 return (ssi->playback.dma_ch && (ssi->dma_rt || ssi->capture.dma_ch)); 194 } 195 196 static void rz_ssi_set_substream(struct rz_ssi_stream *strm, 197 struct snd_pcm_substream *substream) 198 { 199 struct rz_ssi_priv *ssi = strm->priv; 200 unsigned long flags; 201 202 spin_lock_irqsave(&ssi->lock, flags); 203 strm->substream = substream; 204 spin_unlock_irqrestore(&ssi->lock, flags); 205 } 206 207 static bool rz_ssi_stream_is_valid(struct rz_ssi_priv *ssi, 208 struct rz_ssi_stream *strm) 209 { 210 unsigned long flags; 211 bool ret; 212 213 spin_lock_irqsave(&ssi->lock, flags); 214 ret = strm->substream && strm->substream->runtime; 215 spin_unlock_irqrestore(&ssi->lock, flags); 216 217 return ret; 218 } 219 220 static inline bool rz_ssi_is_stream_running(struct rz_ssi_stream *strm) 221 { 222 return strm->substream && strm->running; 223 } 224 225 static void rz_ssi_stream_init(struct rz_ssi_stream *strm, 226 struct snd_pcm_substream *substream) 227 { 228 struct snd_pcm_runtime *runtime = substream->runtime; 229 230 rz_ssi_set_substream(strm, substream); 231 strm->sample_width = samples_to_bytes(runtime, 1); 232 strm->dma_buffer_pos = 0; 233 strm->period_counter = 0; 234 strm->buffer_pos = 0; 235 236 strm->oerr_num = 0; 237 strm->uerr_num = 0; 238 strm->running = 0; 239 240 /* fifo init */ 241 strm->fifo_sample_size = SSI_FIFO_DEPTH; 242 } 243 244 static void rz_ssi_stream_quit(struct rz_ssi_priv *ssi, 245 struct rz_ssi_stream *strm) 246 { 247 struct snd_soc_dai *dai = rz_ssi_get_dai(strm->substream); 248 249 rz_ssi_set_substream(strm, NULL); 250 251 if (strm->oerr_num > 0) 252 dev_info(dai->dev, "overrun = %d\n", strm->oerr_num); 253 254 if (strm->uerr_num > 0) 255 dev_info(dai->dev, "underrun = %d\n", strm->uerr_num); 256 } 257 258 static int rz_ssi_clk_setup(struct rz_ssi_priv *ssi, unsigned int rate, 259 unsigned int channels) 260 { 261 static s8 ckdv[16] = { 1, 2, 4, 8, 16, 32, 64, 128, 262 6, 12, 24, 48, 96, -1, -1, -1 }; 263 unsigned int channel_bits = 32; /* System Word Length */ 264 unsigned long bclk_rate = rate * channels * channel_bits; 265 unsigned int div; 266 unsigned int i; 267 u32 ssicr = 0; 268 u32 clk_ckdv; 269 270 /* Clear AUCKE so we can set MST */ 271 rz_ssi_reg_writel(ssi, SSIFCR, 0); 272 273 /* Continue to output LRCK pin even when idle */ 274 rz_ssi_reg_writel(ssi, SSIOFR, SSIOFR_LRCONT); 275 if (ssi->audio_clk_1 && ssi->audio_clk_2) { 276 if (ssi->audio_clk_1 % bclk_rate) 277 ssi->audio_mck = ssi->audio_clk_2; 278 else 279 ssi->audio_mck = ssi->audio_clk_1; 280 } 281 282 /* Clock setting */ 283 ssicr |= SSICR_MST; 284 if (ssi->audio_mck == ssi->audio_clk_1) 285 ssicr |= SSICR_CKS; 286 if (ssi->bckp_rise) 287 ssicr |= SSICR_BCKP; 288 if (ssi->lrckp_fsync_fall) 289 ssicr |= SSICR_LRCKP; 290 291 /* Determine the clock divider */ 292 clk_ckdv = 0; 293 div = ssi->audio_mck / bclk_rate; 294 /* try to find an match */ 295 for (i = 0; i < ARRAY_SIZE(ckdv); i++) { 296 if (ckdv[i] == div) { 297 clk_ckdv = i; 298 break; 299 } 300 } 301 302 if (i == ARRAY_SIZE(ckdv)) { 303 dev_err(ssi->dev, "Rate not divisible by audio clock source\n"); 304 return -EINVAL; 305 } 306 307 /* 308 * DWL: Data Word Length = 16 bits 309 * SWL: System Word Length = 32 bits 310 */ 311 ssicr |= SSICR_CKDV(clk_ckdv); 312 ssicr |= SSICR_DWL(1) | SSICR_SWL(3); 313 rz_ssi_reg_writel(ssi, SSICR, ssicr); 314 rz_ssi_reg_writel(ssi, SSIFCR, SSIFCR_AUCKE | SSIFCR_FIFO_RST); 315 316 return 0; 317 } 318 319 static void rz_ssi_set_idle(struct rz_ssi_priv *ssi) 320 { 321 int timeout; 322 323 /* Disable irqs */ 324 rz_ssi_reg_mask_setl(ssi, SSICR, SSICR_TUIEN | SSICR_TOIEN | 325 SSICR_RUIEN | SSICR_ROIEN, 0); 326 rz_ssi_reg_mask_setl(ssi, SSIFCR, SSIFCR_TIE | SSIFCR_RIE, 0); 327 328 /* Clear all error flags */ 329 rz_ssi_reg_mask_setl(ssi, SSISR, 330 (SSISR_TOIRQ | SSISR_TUIRQ | SSISR_ROIRQ | 331 SSISR_RUIRQ), 0); 332 333 /* Wait for idle */ 334 timeout = 100; 335 while (--timeout) { 336 if (rz_ssi_reg_readl(ssi, SSISR) & SSISR_IIRQ) 337 break; 338 udelay(1); 339 } 340 341 if (!timeout) 342 dev_info(ssi->dev, "timeout waiting for SSI idle\n"); 343 344 /* Hold FIFOs in reset */ 345 rz_ssi_reg_mask_setl(ssi, SSIFCR, 0, SSIFCR_FIFO_RST); 346 } 347 348 static int rz_ssi_start(struct rz_ssi_priv *ssi, struct rz_ssi_stream *strm) 349 { 350 bool is_play = rz_ssi_stream_is_play(ssi, strm->substream); 351 bool is_full_duplex; 352 u32 ssicr, ssifcr; 353 354 is_full_duplex = rz_ssi_is_stream_running(&ssi->playback) || 355 rz_ssi_is_stream_running(&ssi->capture); 356 ssicr = rz_ssi_reg_readl(ssi, SSICR); 357 ssifcr = rz_ssi_reg_readl(ssi, SSIFCR); 358 if (!is_full_duplex) { 359 ssifcr &= ~0xF; 360 } else { 361 rz_ssi_reg_mask_setl(ssi, SSICR, SSICR_TEN | SSICR_REN, 0); 362 rz_ssi_set_idle(ssi); 363 ssifcr &= ~SSIFCR_FIFO_RST; 364 } 365 366 /* FIFO interrupt thresholds */ 367 if (rz_ssi_is_dma_enabled(ssi)) 368 rz_ssi_reg_writel(ssi, SSISCR, 0); 369 else 370 rz_ssi_reg_writel(ssi, SSISCR, 371 SSISCR_TDES(strm->fifo_sample_size / 2 - 1) | 372 SSISCR_RDFS(0)); 373 374 /* enable IRQ */ 375 if (is_play) { 376 ssicr |= SSICR_TUIEN | SSICR_TOIEN; 377 ssifcr |= SSIFCR_TIE; 378 if (!is_full_duplex) 379 ssifcr |= SSIFCR_RFRST; 380 } else { 381 ssicr |= SSICR_RUIEN | SSICR_ROIEN; 382 ssifcr |= SSIFCR_RIE; 383 if (!is_full_duplex) 384 ssifcr |= SSIFCR_TFRST; 385 } 386 387 rz_ssi_reg_writel(ssi, SSICR, ssicr); 388 rz_ssi_reg_writel(ssi, SSIFCR, ssifcr); 389 390 /* Clear all error flags */ 391 rz_ssi_reg_mask_setl(ssi, SSISR, 392 (SSISR_TOIRQ | SSISR_TUIRQ | SSISR_ROIRQ | 393 SSISR_RUIRQ), 0); 394 395 strm->running = 1; 396 if (is_full_duplex) 397 ssicr |= SSICR_TEN | SSICR_REN; 398 else 399 ssicr |= is_play ? SSICR_TEN : SSICR_REN; 400 401 rz_ssi_reg_writel(ssi, SSICR, ssicr); 402 403 return 0; 404 } 405 406 static int rz_ssi_stop(struct rz_ssi_priv *ssi, struct rz_ssi_stream *strm) 407 { 408 strm->running = 0; 409 410 if (rz_ssi_is_stream_running(&ssi->playback) || 411 rz_ssi_is_stream_running(&ssi->capture)) 412 return 0; 413 414 /* Disable TX/RX */ 415 rz_ssi_reg_mask_setl(ssi, SSICR, SSICR_TEN | SSICR_REN, 0); 416 417 /* Cancel all remaining DMA transactions */ 418 if (rz_ssi_is_dma_enabled(ssi)) { 419 if (ssi->playback.dma_ch) 420 dmaengine_terminate_async(ssi->playback.dma_ch); 421 if (ssi->capture.dma_ch) 422 dmaengine_terminate_async(ssi->capture.dma_ch); 423 } 424 425 rz_ssi_set_idle(ssi); 426 427 return 0; 428 } 429 430 static void rz_ssi_pointer_update(struct rz_ssi_stream *strm, int frames) 431 { 432 struct snd_pcm_substream *substream = strm->substream; 433 struct snd_pcm_runtime *runtime; 434 int current_period; 435 436 if (!strm->running || !substream || !substream->runtime) 437 return; 438 439 runtime = substream->runtime; 440 strm->buffer_pos += frames; 441 WARN_ON(strm->buffer_pos > runtime->buffer_size); 442 443 /* ring buffer */ 444 if (strm->buffer_pos == runtime->buffer_size) 445 strm->buffer_pos = 0; 446 447 current_period = strm->buffer_pos / runtime->period_size; 448 if (strm->period_counter != current_period) { 449 snd_pcm_period_elapsed(strm->substream); 450 strm->period_counter = current_period; 451 } 452 } 453 454 static int rz_ssi_pio_recv(struct rz_ssi_priv *ssi, struct rz_ssi_stream *strm) 455 { 456 struct snd_pcm_substream *substream = strm->substream; 457 struct snd_pcm_runtime *runtime; 458 u16 *buf; 459 int fifo_samples; 460 int frames_left; 461 int samples; 462 int i; 463 464 if (!rz_ssi_stream_is_valid(ssi, strm)) 465 return -EINVAL; 466 467 runtime = substream->runtime; 468 469 do { 470 /* frames left in this period */ 471 frames_left = runtime->period_size - 472 (strm->buffer_pos % runtime->period_size); 473 if (!frames_left) 474 frames_left = runtime->period_size; 475 476 /* Samples in RX FIFO */ 477 fifo_samples = (rz_ssi_reg_readl(ssi, SSIFSR) >> 478 SSIFSR_RDC_SHIFT) & SSIFSR_RDC_MASK; 479 480 /* Only read full frames at a time */ 481 samples = 0; 482 while (frames_left && (fifo_samples >= runtime->channels)) { 483 samples += runtime->channels; 484 fifo_samples -= runtime->channels; 485 frames_left--; 486 } 487 488 /* not enough samples yet */ 489 if (!samples) 490 break; 491 492 /* calculate new buffer index */ 493 buf = (u16 *)runtime->dma_area; 494 buf += strm->buffer_pos * runtime->channels; 495 496 /* Note, only supports 16-bit samples */ 497 for (i = 0; i < samples; i++) 498 *buf++ = (u16)(rz_ssi_reg_readl(ssi, SSIFRDR) >> 16); 499 500 rz_ssi_reg_mask_setl(ssi, SSIFSR, SSIFSR_RDF, 0); 501 rz_ssi_pointer_update(strm, samples / runtime->channels); 502 } while (!frames_left && fifo_samples >= runtime->channels); 503 504 return 0; 505 } 506 507 static int rz_ssi_pio_send(struct rz_ssi_priv *ssi, struct rz_ssi_stream *strm) 508 { 509 struct snd_pcm_substream *substream = strm->substream; 510 struct snd_pcm_runtime *runtime = substream->runtime; 511 int sample_space; 512 int samples = 0; 513 int frames_left; 514 int i; 515 u32 ssifsr; 516 u16 *buf; 517 518 if (!rz_ssi_stream_is_valid(ssi, strm)) 519 return -EINVAL; 520 521 /* frames left in this period */ 522 frames_left = runtime->period_size - (strm->buffer_pos % 523 runtime->period_size); 524 if (frames_left == 0) 525 frames_left = runtime->period_size; 526 527 sample_space = strm->fifo_sample_size; 528 ssifsr = rz_ssi_reg_readl(ssi, SSIFSR); 529 sample_space -= (ssifsr >> SSIFSR_TDC_SHIFT) & SSIFSR_TDC_MASK; 530 531 /* Only add full frames at a time */ 532 while (frames_left && (sample_space >= runtime->channels)) { 533 samples += runtime->channels; 534 sample_space -= runtime->channels; 535 frames_left--; 536 } 537 538 /* no space to send anything right now */ 539 if (samples == 0) 540 return 0; 541 542 /* calculate new buffer index */ 543 buf = (u16 *)(runtime->dma_area); 544 buf += strm->buffer_pos * runtime->channels; 545 546 /* Note, only supports 16-bit samples */ 547 for (i = 0; i < samples; i++) 548 rz_ssi_reg_writel(ssi, SSIFTDR, ((u32)(*buf++) << 16)); 549 550 rz_ssi_reg_mask_setl(ssi, SSIFSR, SSIFSR_TDE, 0); 551 rz_ssi_pointer_update(strm, samples / runtime->channels); 552 553 return 0; 554 } 555 556 static irqreturn_t rz_ssi_interrupt(int irq, void *data) 557 { 558 struct rz_ssi_stream *strm_playback = NULL; 559 struct rz_ssi_stream *strm_capture = NULL; 560 struct rz_ssi_priv *ssi = data; 561 u32 ssisr = rz_ssi_reg_readl(ssi, SSISR); 562 563 if (ssi->playback.substream) 564 strm_playback = &ssi->playback; 565 if (ssi->capture.substream) 566 strm_capture = &ssi->capture; 567 568 if (!strm_playback && !strm_capture) 569 return IRQ_HANDLED; /* Left over TX/RX interrupt */ 570 571 if (irq == ssi->irq_int) { /* error or idle */ 572 bool is_stopped = false; 573 int i, count; 574 575 if (rz_ssi_is_dma_enabled(ssi)) 576 count = 4; 577 else 578 count = 1; 579 580 if (ssisr & (SSISR_RUIRQ | SSISR_ROIRQ | SSISR_TUIRQ | SSISR_TOIRQ)) 581 is_stopped = true; 582 583 if (ssi->capture.substream && is_stopped) { 584 if (ssisr & SSISR_RUIRQ) 585 strm_capture->uerr_num++; 586 if (ssisr & SSISR_ROIRQ) 587 strm_capture->oerr_num++; 588 589 rz_ssi_stop(ssi, strm_capture); 590 } 591 592 if (ssi->playback.substream && is_stopped) { 593 if (ssisr & SSISR_TUIRQ) 594 strm_playback->uerr_num++; 595 if (ssisr & SSISR_TOIRQ) 596 strm_playback->oerr_num++; 597 598 rz_ssi_stop(ssi, strm_playback); 599 } 600 601 /* Clear all flags */ 602 rz_ssi_reg_mask_setl(ssi, SSISR, SSISR_TOIRQ | SSISR_TUIRQ | 603 SSISR_ROIRQ | SSISR_RUIRQ, 0); 604 605 /* Add/remove more data */ 606 if (ssi->capture.substream && is_stopped) { 607 for (i = 0; i < count; i++) 608 strm_capture->transfer(ssi, strm_capture); 609 } 610 611 if (ssi->playback.substream && is_stopped) { 612 for (i = 0; i < count; i++) 613 strm_playback->transfer(ssi, strm_playback); 614 } 615 616 /* Resume */ 617 if (ssi->playback.substream && is_stopped) 618 rz_ssi_start(ssi, &ssi->playback); 619 if (ssi->capture.substream && is_stopped) 620 rz_ssi_start(ssi, &ssi->capture); 621 } 622 623 if (!rz_ssi_is_stream_running(&ssi->playback) && 624 !rz_ssi_is_stream_running(&ssi->capture)) 625 return IRQ_HANDLED; 626 627 /* tx data empty */ 628 if (irq == ssi->irq_tx && rz_ssi_is_stream_running(&ssi->playback)) 629 strm_playback->transfer(ssi, &ssi->playback); 630 631 /* rx data full */ 632 if (irq == ssi->irq_rx && rz_ssi_is_stream_running(&ssi->capture)) { 633 strm_capture->transfer(ssi, &ssi->capture); 634 rz_ssi_reg_mask_setl(ssi, SSIFSR, SSIFSR_RDF, 0); 635 } 636 637 if (irq == ssi->irq_rt) { 638 if (ssi->playback.substream) { 639 strm_playback->transfer(ssi, &ssi->playback); 640 } else { 641 strm_capture->transfer(ssi, &ssi->capture); 642 rz_ssi_reg_mask_setl(ssi, SSIFSR, SSIFSR_RDF, 0); 643 } 644 } 645 646 return IRQ_HANDLED; 647 } 648 649 static int rz_ssi_dma_slave_config(struct rz_ssi_priv *ssi, 650 struct dma_chan *dma_ch, bool is_play) 651 { 652 struct dma_slave_config cfg; 653 654 memset(&cfg, 0, sizeof(cfg)); 655 656 cfg.direction = is_play ? DMA_MEM_TO_DEV : DMA_DEV_TO_MEM; 657 cfg.dst_addr = ssi->phys + SSIFTDR; 658 cfg.src_addr = ssi->phys + SSIFRDR; 659 cfg.src_addr_width = DMA_SLAVE_BUSWIDTH_2_BYTES; 660 cfg.dst_addr_width = DMA_SLAVE_BUSWIDTH_2_BYTES; 661 662 return dmaengine_slave_config(dma_ch, &cfg); 663 } 664 665 static int rz_ssi_dma_transfer(struct rz_ssi_priv *ssi, 666 struct rz_ssi_stream *strm) 667 { 668 struct snd_pcm_substream *substream = strm->substream; 669 struct dma_async_tx_descriptor *desc; 670 struct snd_pcm_runtime *runtime; 671 enum dma_transfer_direction dir; 672 u32 dma_paddr, dma_size; 673 int amount; 674 675 if (!rz_ssi_stream_is_valid(ssi, strm)) 676 return -EINVAL; 677 678 runtime = substream->runtime; 679 if (runtime->state == SNDRV_PCM_STATE_DRAINING) 680 /* 681 * Stream is ending, so do not queue up any more DMA 682 * transfers otherwise we play partial sound clips 683 * because we can't shut off the DMA quick enough. 684 */ 685 return 0; 686 687 dir = rz_ssi_stream_is_play(ssi, substream) ? DMA_MEM_TO_DEV : DMA_DEV_TO_MEM; 688 689 /* Always transfer 1 period */ 690 amount = runtime->period_size; 691 692 /* DMA physical address and size */ 693 dma_paddr = runtime->dma_addr + frames_to_bytes(runtime, 694 strm->dma_buffer_pos); 695 dma_size = frames_to_bytes(runtime, amount); 696 desc = dmaengine_prep_slave_single(strm->dma_ch, dma_paddr, dma_size, 697 dir, 698 DMA_PREP_INTERRUPT | DMA_CTRL_ACK); 699 if (!desc) { 700 dev_err(ssi->dev, "dmaengine_prep_slave_single() fail\n"); 701 return -ENOMEM; 702 } 703 704 desc->callback = rz_ssi_dma_complete; 705 desc->callback_param = strm; 706 707 if (dmaengine_submit(desc) < 0) { 708 dev_err(ssi->dev, "dmaengine_submit() fail\n"); 709 return -EIO; 710 } 711 712 /* Update DMA pointer */ 713 strm->dma_buffer_pos += amount; 714 if (strm->dma_buffer_pos >= runtime->buffer_size) 715 strm->dma_buffer_pos = 0; 716 717 /* Start DMA */ 718 dma_async_issue_pending(strm->dma_ch); 719 720 return 0; 721 } 722 723 static void rz_ssi_dma_complete(void *data) 724 { 725 struct rz_ssi_stream *strm = (struct rz_ssi_stream *)data; 726 727 if (!strm->running || !strm->substream || !strm->substream->runtime) 728 return; 729 730 /* Note that next DMA transaction has probably already started */ 731 rz_ssi_pointer_update(strm, strm->substream->runtime->period_size); 732 733 /* Queue up another DMA transaction */ 734 rz_ssi_dma_transfer(strm->priv, strm); 735 } 736 737 static void rz_ssi_release_dma_channels(struct rz_ssi_priv *ssi) 738 { 739 if (ssi->playback.dma_ch) { 740 dma_release_channel(ssi->playback.dma_ch); 741 ssi->playback.dma_ch = NULL; 742 if (ssi->dma_rt) 743 ssi->dma_rt = false; 744 } 745 746 if (ssi->capture.dma_ch) { 747 dma_release_channel(ssi->capture.dma_ch); 748 ssi->capture.dma_ch = NULL; 749 } 750 } 751 752 static int rz_ssi_dma_request(struct rz_ssi_priv *ssi, struct device *dev) 753 { 754 ssi->playback.dma_ch = dma_request_chan(dev, "tx"); 755 if (IS_ERR(ssi->playback.dma_ch)) 756 ssi->playback.dma_ch = NULL; 757 758 ssi->capture.dma_ch = dma_request_chan(dev, "rx"); 759 if (IS_ERR(ssi->capture.dma_ch)) 760 ssi->capture.dma_ch = NULL; 761 762 if (!ssi->playback.dma_ch && !ssi->capture.dma_ch) { 763 ssi->playback.dma_ch = dma_request_chan(dev, "rt"); 764 if (IS_ERR(ssi->playback.dma_ch)) { 765 ssi->playback.dma_ch = NULL; 766 goto no_dma; 767 } 768 769 ssi->dma_rt = true; 770 } 771 772 if (!rz_ssi_is_dma_enabled(ssi)) 773 goto no_dma; 774 775 if (ssi->playback.dma_ch && 776 (rz_ssi_dma_slave_config(ssi, ssi->playback.dma_ch, true) < 0)) 777 goto no_dma; 778 779 if (ssi->capture.dma_ch && 780 (rz_ssi_dma_slave_config(ssi, ssi->capture.dma_ch, false) < 0)) 781 goto no_dma; 782 783 return 0; 784 785 no_dma: 786 rz_ssi_release_dma_channels(ssi); 787 788 return -ENODEV; 789 } 790 791 static int rz_ssi_dai_trigger(struct snd_pcm_substream *substream, int cmd, 792 struct snd_soc_dai *dai) 793 { 794 struct rz_ssi_priv *ssi = snd_soc_dai_get_drvdata(dai); 795 struct rz_ssi_stream *strm = rz_ssi_stream_get(ssi, substream); 796 int ret = 0, i, num_transfer = 1; 797 798 switch (cmd) { 799 case SNDRV_PCM_TRIGGER_START: 800 /* Soft Reset */ 801 if (!rz_ssi_is_stream_running(&ssi->playback) && 802 !rz_ssi_is_stream_running(&ssi->capture)) { 803 rz_ssi_reg_mask_setl(ssi, SSIFCR, 0, SSIFCR_SSIRST); 804 rz_ssi_reg_mask_setl(ssi, SSIFCR, SSIFCR_SSIRST, 0); 805 udelay(5); 806 } 807 808 rz_ssi_stream_init(strm, substream); 809 810 if (ssi->dma_rt) { 811 bool is_playback; 812 813 is_playback = rz_ssi_stream_is_play(ssi, substream); 814 ret = rz_ssi_dma_slave_config(ssi, ssi->playback.dma_ch, 815 is_playback); 816 /* Fallback to pio */ 817 if (ret < 0) { 818 ssi->playback.transfer = rz_ssi_pio_send; 819 ssi->capture.transfer = rz_ssi_pio_recv; 820 rz_ssi_release_dma_channels(ssi); 821 } 822 } 823 824 /* For DMA, queue up multiple DMA descriptors */ 825 if (rz_ssi_is_dma_enabled(ssi)) 826 num_transfer = 4; 827 828 for (i = 0; i < num_transfer; i++) { 829 ret = strm->transfer(ssi, strm); 830 if (ret) 831 goto done; 832 } 833 834 ret = rz_ssi_start(ssi, strm); 835 break; 836 case SNDRV_PCM_TRIGGER_STOP: 837 rz_ssi_stop(ssi, strm); 838 rz_ssi_stream_quit(ssi, strm); 839 break; 840 } 841 842 done: 843 return ret; 844 } 845 846 static int rz_ssi_dai_set_fmt(struct snd_soc_dai *dai, unsigned int fmt) 847 { 848 struct rz_ssi_priv *ssi = snd_soc_dai_get_drvdata(dai); 849 850 switch (fmt & SND_SOC_DAIFMT_CLOCK_PROVIDER_MASK) { 851 case SND_SOC_DAIFMT_BP_FP: 852 break; 853 default: 854 dev_err(ssi->dev, "Codec should be clk and frame consumer\n"); 855 return -EINVAL; 856 } 857 858 /* 859 * set clock polarity 860 * 861 * "normal" BCLK = Signal is available at rising edge of BCLK 862 * "normal" FSYNC = (I2S) Left ch starts with falling FSYNC edge 863 */ 864 switch (fmt & SND_SOC_DAIFMT_INV_MASK) { 865 case SND_SOC_DAIFMT_NB_NF: 866 ssi->bckp_rise = false; 867 ssi->lrckp_fsync_fall = false; 868 break; 869 case SND_SOC_DAIFMT_NB_IF: 870 ssi->bckp_rise = false; 871 ssi->lrckp_fsync_fall = true; 872 break; 873 case SND_SOC_DAIFMT_IB_NF: 874 ssi->bckp_rise = true; 875 ssi->lrckp_fsync_fall = false; 876 break; 877 case SND_SOC_DAIFMT_IB_IF: 878 ssi->bckp_rise = true; 879 ssi->lrckp_fsync_fall = true; 880 break; 881 default: 882 return -EINVAL; 883 } 884 885 /* only i2s support */ 886 switch (fmt & SND_SOC_DAIFMT_FORMAT_MASK) { 887 case SND_SOC_DAIFMT_I2S: 888 break; 889 default: 890 dev_err(ssi->dev, "Only I2S mode is supported.\n"); 891 return -EINVAL; 892 } 893 894 return 0; 895 } 896 897 static bool rz_ssi_is_valid_hw_params(struct rz_ssi_priv *ssi, unsigned int rate, 898 unsigned int channels, 899 unsigned int sample_width, 900 unsigned int sample_bits) 901 { 902 if (ssi->hw_params_cache.rate != rate || 903 ssi->hw_params_cache.channels != channels || 904 ssi->hw_params_cache.sample_width != sample_width || 905 ssi->hw_params_cache.sample_bits != sample_bits) 906 return false; 907 908 return true; 909 } 910 911 static void rz_ssi_cache_hw_params(struct rz_ssi_priv *ssi, unsigned int rate, 912 unsigned int channels, 913 unsigned int sample_width, 914 unsigned int sample_bits) 915 { 916 ssi->hw_params_cache.rate = rate; 917 ssi->hw_params_cache.channels = channels; 918 ssi->hw_params_cache.sample_width = sample_width; 919 ssi->hw_params_cache.sample_bits = sample_bits; 920 } 921 922 static int rz_ssi_dai_hw_params(struct snd_pcm_substream *substream, 923 struct snd_pcm_hw_params *params, 924 struct snd_soc_dai *dai) 925 { 926 struct rz_ssi_priv *ssi = snd_soc_dai_get_drvdata(dai); 927 struct rz_ssi_stream *strm = rz_ssi_stream_get(ssi, substream); 928 unsigned int sample_bits = hw_param_interval(params, 929 SNDRV_PCM_HW_PARAM_SAMPLE_BITS)->min; 930 unsigned int channels = params_channels(params); 931 unsigned int rate = params_rate(params); 932 933 if (sample_bits != 16) { 934 dev_err(ssi->dev, "Unsupported sample width: %d\n", 935 sample_bits); 936 return -EINVAL; 937 } 938 939 if (channels != 2) { 940 dev_err(ssi->dev, "Number of channels not matched: %d\n", 941 channels); 942 return -EINVAL; 943 } 944 945 if (rz_ssi_is_stream_running(&ssi->playback) || 946 rz_ssi_is_stream_running(&ssi->capture)) { 947 if (rz_ssi_is_valid_hw_params(ssi, rate, channels, 948 strm->sample_width, sample_bits)) 949 return 0; 950 951 dev_err(ssi->dev, "Full duplex needs same HW params\n"); 952 return -EINVAL; 953 } 954 955 rz_ssi_cache_hw_params(ssi, rate, channels, strm->sample_width, 956 sample_bits); 957 958 return rz_ssi_clk_setup(ssi, rate, channels); 959 } 960 961 static const struct snd_soc_dai_ops rz_ssi_dai_ops = { 962 .trigger = rz_ssi_dai_trigger, 963 .set_fmt = rz_ssi_dai_set_fmt, 964 .hw_params = rz_ssi_dai_hw_params, 965 }; 966 967 static const struct snd_pcm_hardware rz_ssi_pcm_hardware = { 968 .info = SNDRV_PCM_INFO_INTERLEAVED | 969 SNDRV_PCM_INFO_MMAP | 970 SNDRV_PCM_INFO_MMAP_VALID, 971 .buffer_bytes_max = PREALLOC_BUFFER, 972 .period_bytes_min = 32, 973 .period_bytes_max = 8192, 974 .channels_min = SSI_CHAN_MIN, 975 .channels_max = SSI_CHAN_MAX, 976 .periods_min = 1, 977 .periods_max = 32, 978 .fifo_size = 32 * 2, 979 }; 980 981 static int rz_ssi_pcm_open(struct snd_soc_component *component, 982 struct snd_pcm_substream *substream) 983 { 984 snd_soc_set_runtime_hwparams(substream, &rz_ssi_pcm_hardware); 985 986 return snd_pcm_hw_constraint_integer(substream->runtime, 987 SNDRV_PCM_HW_PARAM_PERIODS); 988 } 989 990 static snd_pcm_uframes_t rz_ssi_pcm_pointer(struct snd_soc_component *component, 991 struct snd_pcm_substream *substream) 992 { 993 struct snd_soc_dai *dai = rz_ssi_get_dai(substream); 994 struct rz_ssi_priv *ssi = snd_soc_dai_get_drvdata(dai); 995 struct rz_ssi_stream *strm = rz_ssi_stream_get(ssi, substream); 996 997 return strm->buffer_pos; 998 } 999 1000 static int rz_ssi_pcm_new(struct snd_soc_component *component, 1001 struct snd_soc_pcm_runtime *rtd) 1002 { 1003 snd_pcm_set_managed_buffer_all(rtd->pcm, SNDRV_DMA_TYPE_DEV, 1004 rtd->card->snd_card->dev, 1005 PREALLOC_BUFFER, PREALLOC_BUFFER_MAX); 1006 return 0; 1007 } 1008 1009 static struct snd_soc_dai_driver rz_ssi_soc_dai[] = { 1010 { 1011 .name = "rz-ssi-dai", 1012 .playback = { 1013 .rates = SSI_RATES, 1014 .formats = SSI_FMTS, 1015 .channels_min = SSI_CHAN_MIN, 1016 .channels_max = SSI_CHAN_MAX, 1017 }, 1018 .capture = { 1019 .rates = SSI_RATES, 1020 .formats = SSI_FMTS, 1021 .channels_min = SSI_CHAN_MIN, 1022 .channels_max = SSI_CHAN_MAX, 1023 }, 1024 .ops = &rz_ssi_dai_ops, 1025 }, 1026 }; 1027 1028 static const struct snd_soc_component_driver rz_ssi_soc_component = { 1029 .name = "rz-ssi", 1030 .open = rz_ssi_pcm_open, 1031 .pointer = rz_ssi_pcm_pointer, 1032 .pcm_construct = rz_ssi_pcm_new, 1033 .legacy_dai_naming = 1, 1034 }; 1035 1036 static int rz_ssi_probe(struct platform_device *pdev) 1037 { 1038 struct rz_ssi_priv *ssi; 1039 struct clk *audio_clk; 1040 struct resource *res; 1041 int ret; 1042 1043 ssi = devm_kzalloc(&pdev->dev, sizeof(*ssi), GFP_KERNEL); 1044 if (!ssi) 1045 return -ENOMEM; 1046 1047 ssi->pdev = pdev; 1048 ssi->dev = &pdev->dev; 1049 ssi->base = devm_platform_get_and_ioremap_resource(pdev, 0, &res); 1050 if (IS_ERR(ssi->base)) 1051 return PTR_ERR(ssi->base); 1052 1053 ssi->phys = res->start; 1054 ssi->clk = devm_clk_get(&pdev->dev, "ssi"); 1055 if (IS_ERR(ssi->clk)) 1056 return PTR_ERR(ssi->clk); 1057 1058 ssi->sfr_clk = devm_clk_get(&pdev->dev, "ssi_sfr"); 1059 if (IS_ERR(ssi->sfr_clk)) 1060 return PTR_ERR(ssi->sfr_clk); 1061 1062 audio_clk = devm_clk_get(&pdev->dev, "audio_clk1"); 1063 if (IS_ERR(audio_clk)) 1064 return dev_err_probe(&pdev->dev, PTR_ERR(audio_clk), 1065 "no audio clk1"); 1066 1067 ssi->audio_clk_1 = clk_get_rate(audio_clk); 1068 audio_clk = devm_clk_get(&pdev->dev, "audio_clk2"); 1069 if (IS_ERR(audio_clk)) 1070 return dev_err_probe(&pdev->dev, PTR_ERR(audio_clk), 1071 "no audio clk2"); 1072 1073 ssi->audio_clk_2 = clk_get_rate(audio_clk); 1074 if (!(ssi->audio_clk_1 || ssi->audio_clk_2)) 1075 return dev_err_probe(&pdev->dev, -EINVAL, 1076 "no audio clk1 or audio clk2"); 1077 1078 ssi->audio_mck = ssi->audio_clk_1 ? ssi->audio_clk_1 : ssi->audio_clk_2; 1079 1080 /* Detect DMA support */ 1081 ret = rz_ssi_dma_request(ssi, &pdev->dev); 1082 if (ret < 0) { 1083 dev_warn(&pdev->dev, "DMA not available, using PIO\n"); 1084 ssi->playback.transfer = rz_ssi_pio_send; 1085 ssi->capture.transfer = rz_ssi_pio_recv; 1086 } else { 1087 dev_info(&pdev->dev, "DMA enabled"); 1088 ssi->playback.transfer = rz_ssi_dma_transfer; 1089 ssi->capture.transfer = rz_ssi_dma_transfer; 1090 } 1091 1092 ssi->playback.priv = ssi; 1093 ssi->capture.priv = ssi; 1094 1095 spin_lock_init(&ssi->lock); 1096 dev_set_drvdata(&pdev->dev, ssi); 1097 1098 /* Error Interrupt */ 1099 ssi->irq_int = platform_get_irq_byname(pdev, "int_req"); 1100 if (ssi->irq_int < 0) { 1101 rz_ssi_release_dma_channels(ssi); 1102 return ssi->irq_int; 1103 } 1104 1105 ret = devm_request_irq(&pdev->dev, ssi->irq_int, &rz_ssi_interrupt, 1106 0, dev_name(&pdev->dev), ssi); 1107 if (ret < 0) { 1108 rz_ssi_release_dma_channels(ssi); 1109 return dev_err_probe(&pdev->dev, ret, 1110 "irq request error (int_req)\n"); 1111 } 1112 1113 if (!rz_ssi_is_dma_enabled(ssi)) { 1114 /* Tx and Rx interrupts (pio only) */ 1115 ssi->irq_tx = platform_get_irq_byname(pdev, "dma_tx"); 1116 ssi->irq_rx = platform_get_irq_byname(pdev, "dma_rx"); 1117 if (ssi->irq_tx == -ENXIO && ssi->irq_rx == -ENXIO) { 1118 ssi->irq_rt = platform_get_irq_byname(pdev, "dma_rt"); 1119 if (ssi->irq_rt < 0) 1120 return ssi->irq_rt; 1121 1122 ret = devm_request_irq(&pdev->dev, ssi->irq_rt, 1123 &rz_ssi_interrupt, 0, 1124 dev_name(&pdev->dev), ssi); 1125 if (ret < 0) 1126 return dev_err_probe(&pdev->dev, ret, 1127 "irq request error (dma_rt)\n"); 1128 } else { 1129 if (ssi->irq_tx < 0) 1130 return ssi->irq_tx; 1131 1132 if (ssi->irq_rx < 0) 1133 return ssi->irq_rx; 1134 1135 ret = devm_request_irq(&pdev->dev, ssi->irq_tx, 1136 &rz_ssi_interrupt, 0, 1137 dev_name(&pdev->dev), ssi); 1138 if (ret < 0) 1139 return dev_err_probe(&pdev->dev, ret, 1140 "irq request error (dma_tx)\n"); 1141 1142 ret = devm_request_irq(&pdev->dev, ssi->irq_rx, 1143 &rz_ssi_interrupt, 0, 1144 dev_name(&pdev->dev), ssi); 1145 if (ret < 0) 1146 return dev_err_probe(&pdev->dev, ret, 1147 "irq request error (dma_rx)\n"); 1148 } 1149 } 1150 1151 ssi->rstc = devm_reset_control_get_exclusive(&pdev->dev, NULL); 1152 if (IS_ERR(ssi->rstc)) { 1153 ret = PTR_ERR(ssi->rstc); 1154 goto err_reset; 1155 } 1156 1157 reset_control_deassert(ssi->rstc); 1158 pm_runtime_enable(&pdev->dev); 1159 ret = pm_runtime_resume_and_get(&pdev->dev); 1160 if (ret < 0) { 1161 dev_err(&pdev->dev, "pm_runtime_resume_and_get failed\n"); 1162 goto err_pm; 1163 } 1164 1165 ret = devm_snd_soc_register_component(&pdev->dev, &rz_ssi_soc_component, 1166 rz_ssi_soc_dai, 1167 ARRAY_SIZE(rz_ssi_soc_dai)); 1168 if (ret < 0) { 1169 dev_err(&pdev->dev, "failed to register snd component\n"); 1170 goto err_snd_soc; 1171 } 1172 1173 return 0; 1174 1175 err_snd_soc: 1176 pm_runtime_put(ssi->dev); 1177 err_pm: 1178 pm_runtime_disable(ssi->dev); 1179 reset_control_assert(ssi->rstc); 1180 err_reset: 1181 rz_ssi_release_dma_channels(ssi); 1182 1183 return ret; 1184 } 1185 1186 static void rz_ssi_remove(struct platform_device *pdev) 1187 { 1188 struct rz_ssi_priv *ssi = dev_get_drvdata(&pdev->dev); 1189 1190 rz_ssi_release_dma_channels(ssi); 1191 1192 pm_runtime_put(ssi->dev); 1193 pm_runtime_disable(ssi->dev); 1194 reset_control_assert(ssi->rstc); 1195 } 1196 1197 static const struct of_device_id rz_ssi_of_match[] = { 1198 { .compatible = "renesas,rz-ssi", }, 1199 {/* Sentinel */}, 1200 }; 1201 MODULE_DEVICE_TABLE(of, rz_ssi_of_match); 1202 1203 static struct platform_driver rz_ssi_driver = { 1204 .driver = { 1205 .name = "rz-ssi-pcm-audio", 1206 .of_match_table = rz_ssi_of_match, 1207 }, 1208 .probe = rz_ssi_probe, 1209 .remove = rz_ssi_remove, 1210 }; 1211 1212 module_platform_driver(rz_ssi_driver); 1213 1214 MODULE_LICENSE("GPL v2"); 1215 MODULE_DESCRIPTION("Renesas RZ/G2L ASoC Serial Sound Interface Driver"); 1216 MODULE_AUTHOR("Biju Das <biju.das.jz@bp.renesas.com>"); 1217