1 // SPDX-License-Identifier: GPL-2.0 2 // 3 // Renesas RZ/G2L ASoC Serial Sound Interface (SSIF-2) Driver 4 // 5 // Copyright (C) 2021 Renesas Electronics Corp. 6 // Copyright (C) 2019 Chris Brandt. 7 // 8 9 #include <linux/clk.h> 10 #include <linux/dmaengine.h> 11 #include <linux/io.h> 12 #include <linux/iopoll.h> 13 #include <linux/module.h> 14 #include <linux/pm_runtime.h> 15 #include <linux/reset.h> 16 #include <sound/soc.h> 17 18 /* REGISTER OFFSET */ 19 #define SSICR 0x000 20 #define SSISR 0x004 21 #define SSIFCR 0x010 22 #define SSIFSR 0x014 23 #define SSIFTDR 0x018 24 #define SSIFRDR 0x01c 25 #define SSIOFR 0x020 26 #define SSISCR 0x024 27 28 /* SSI REGISTER BITS */ 29 #define SSICR_DWL(x) (((x) & 0x7) << 19) 30 #define SSICR_SWL(x) (((x) & 0x7) << 16) 31 32 #define SSICR_CKS BIT(30) 33 #define SSICR_TUIEN BIT(29) 34 #define SSICR_TOIEN BIT(28) 35 #define SSICR_RUIEN BIT(27) 36 #define SSICR_ROIEN BIT(26) 37 #define SSICR_MST BIT(14) 38 #define SSICR_BCKP BIT(13) 39 #define SSICR_LRCKP BIT(12) 40 #define SSICR_CKDV(x) (((x) & 0xf) << 4) 41 #define SSICR_TEN BIT(1) 42 #define SSICR_REN BIT(0) 43 44 #define SSISR_TUIRQ BIT(29) 45 #define SSISR_TOIRQ BIT(28) 46 #define SSISR_RUIRQ BIT(27) 47 #define SSISR_ROIRQ BIT(26) 48 #define SSISR_IIRQ BIT(25) 49 50 #define SSIFCR_AUCKE BIT(31) 51 #define SSIFCR_SSIRST BIT(16) 52 #define SSIFCR_TIE BIT(3) 53 #define SSIFCR_RIE BIT(2) 54 #define SSIFCR_TFRST BIT(1) 55 #define SSIFCR_RFRST BIT(0) 56 #define SSIFCR_FIFO_RST (SSIFCR_TFRST | SSIFCR_RFRST) 57 58 #define SSIFSR_TDC_MASK 0x3f 59 #define SSIFSR_TDC_SHIFT 24 60 #define SSIFSR_RDC_MASK 0x3f 61 #define SSIFSR_RDC_SHIFT 8 62 63 #define SSIFSR_TDE BIT(16) 64 #define SSIFSR_RDF BIT(0) 65 66 #define SSIOFR_LRCONT BIT(8) 67 68 #define SSISCR_TDES(x) (((x) & 0x1f) << 8) 69 #define SSISCR_RDFS(x) (((x) & 0x1f) << 0) 70 71 /* Pre allocated buffers sizes */ 72 #define PREALLOC_BUFFER (SZ_32K) 73 #define PREALLOC_BUFFER_MAX (SZ_32K) 74 75 #define SSI_RATES SNDRV_PCM_RATE_8000_48000 /* 8k-48kHz */ 76 #define SSI_FMTS SNDRV_PCM_FMTBIT_S16_LE 77 #define SSI_CHAN_MIN 2 78 #define SSI_CHAN_MAX 2 79 #define SSI_FIFO_DEPTH 32 80 81 struct rz_ssi_priv; 82 83 struct rz_ssi_stream { 84 struct rz_ssi_priv *priv; 85 struct snd_pcm_substream *substream; 86 int fifo_sample_size; /* sample capacity of SSI FIFO */ 87 int dma_buffer_pos; /* The address for the next DMA descriptor */ 88 int period_counter; /* for keeping track of periods transferred */ 89 int sample_width; 90 int buffer_pos; /* current frame position in the buffer */ 91 int running; /* 0=stopped, 1=running */ 92 93 int uerr_num; 94 int oerr_num; 95 96 struct dma_chan *dma_ch; 97 98 int (*transfer)(struct rz_ssi_priv *ssi, struct rz_ssi_stream *strm); 99 }; 100 101 struct rz_ssi_priv { 102 void __iomem *base; 103 struct reset_control *rstc; 104 struct device *dev; 105 struct clk *sfr_clk; 106 struct clk *clk; 107 108 phys_addr_t phys; 109 int irq_int; 110 int irq_tx; 111 int irq_rx; 112 int irq_rt; 113 114 spinlock_t lock; 115 116 /* 117 * The SSI supports full-duplex transmission and reception. 118 * However, if an error occurs, channel reset (both transmission 119 * and reception reset) is required. 120 * So it is better to use as half-duplex (playing and recording 121 * should be done on separate channels). 122 */ 123 struct rz_ssi_stream playback; 124 struct rz_ssi_stream capture; 125 126 /* clock */ 127 unsigned long audio_mck; 128 unsigned long audio_clk_1; 129 unsigned long audio_clk_2; 130 131 bool lrckp_fsync_fall; /* LR clock polarity (SSICR.LRCKP) */ 132 bool bckp_rise; /* Bit clock polarity (SSICR.BCKP) */ 133 bool dma_rt; 134 135 /* Full duplex communication support */ 136 struct { 137 unsigned int rate; 138 unsigned int channels; 139 unsigned int sample_width; 140 unsigned int sample_bits; 141 } hw_params_cache; 142 }; 143 144 static void rz_ssi_dma_complete(void *data); 145 146 static void rz_ssi_reg_writel(struct rz_ssi_priv *priv, uint reg, u32 data) 147 { 148 writel(data, (priv->base + reg)); 149 } 150 151 static u32 rz_ssi_reg_readl(struct rz_ssi_priv *priv, uint reg) 152 { 153 return readl(priv->base + reg); 154 } 155 156 static void rz_ssi_reg_mask_setl(struct rz_ssi_priv *priv, uint reg, 157 u32 bclr, u32 bset) 158 { 159 u32 val; 160 161 val = readl(priv->base + reg); 162 val = (val & ~bclr) | bset; 163 writel(val, (priv->base + reg)); 164 } 165 166 static inline bool rz_ssi_stream_is_play(struct snd_pcm_substream *substream) 167 { 168 return substream->stream == SNDRV_PCM_STREAM_PLAYBACK; 169 } 170 171 static inline struct rz_ssi_stream * 172 rz_ssi_stream_get(struct rz_ssi_priv *ssi, struct snd_pcm_substream *substream) 173 { 174 struct rz_ssi_stream *stream = &ssi->playback; 175 176 if (substream->stream != SNDRV_PCM_STREAM_PLAYBACK) 177 stream = &ssi->capture; 178 179 return stream; 180 } 181 182 static inline bool rz_ssi_is_dma_enabled(struct rz_ssi_priv *ssi) 183 { 184 return (ssi->playback.dma_ch && (ssi->dma_rt || ssi->capture.dma_ch)); 185 } 186 187 static void rz_ssi_set_substream(struct rz_ssi_stream *strm, 188 struct snd_pcm_substream *substream) 189 { 190 struct rz_ssi_priv *ssi = strm->priv; 191 unsigned long flags; 192 193 spin_lock_irqsave(&ssi->lock, flags); 194 strm->substream = substream; 195 spin_unlock_irqrestore(&ssi->lock, flags); 196 } 197 198 static bool rz_ssi_stream_is_valid(struct rz_ssi_priv *ssi, 199 struct rz_ssi_stream *strm) 200 { 201 unsigned long flags; 202 bool ret; 203 204 spin_lock_irqsave(&ssi->lock, flags); 205 ret = strm->substream && strm->substream->runtime; 206 spin_unlock_irqrestore(&ssi->lock, flags); 207 208 return ret; 209 } 210 211 static inline bool rz_ssi_is_stream_running(struct rz_ssi_stream *strm) 212 { 213 return strm->substream && strm->running; 214 } 215 216 static void rz_ssi_stream_init(struct rz_ssi_stream *strm, 217 struct snd_pcm_substream *substream) 218 { 219 struct snd_pcm_runtime *runtime = substream->runtime; 220 221 rz_ssi_set_substream(strm, substream); 222 strm->sample_width = samples_to_bytes(runtime, 1); 223 strm->dma_buffer_pos = 0; 224 strm->period_counter = 0; 225 strm->buffer_pos = 0; 226 227 strm->oerr_num = 0; 228 strm->uerr_num = 0; 229 strm->running = 0; 230 231 /* fifo init */ 232 strm->fifo_sample_size = SSI_FIFO_DEPTH; 233 } 234 235 static void rz_ssi_stream_quit(struct rz_ssi_priv *ssi, 236 struct rz_ssi_stream *strm) 237 { 238 struct device *dev = ssi->dev; 239 240 rz_ssi_set_substream(strm, NULL); 241 242 if (strm->oerr_num > 0) 243 dev_info(dev, "overrun = %d\n", strm->oerr_num); 244 245 if (strm->uerr_num > 0) 246 dev_info(dev, "underrun = %d\n", strm->uerr_num); 247 } 248 249 static int rz_ssi_clk_setup(struct rz_ssi_priv *ssi, unsigned int rate, 250 unsigned int channels) 251 { 252 static u8 ckdv[] = { 1, 2, 4, 8, 16, 32, 64, 128, 6, 12, 24, 48, 96 }; 253 unsigned int channel_bits = 32; /* System Word Length */ 254 unsigned long bclk_rate = rate * channels * channel_bits; 255 unsigned int div; 256 unsigned int i; 257 u32 ssicr = 0; 258 u32 clk_ckdv; 259 260 /* Clear AUCKE so we can set MST */ 261 rz_ssi_reg_writel(ssi, SSIFCR, 0); 262 263 /* Continue to output LRCK pin even when idle */ 264 rz_ssi_reg_writel(ssi, SSIOFR, SSIOFR_LRCONT); 265 if (ssi->audio_clk_1 && ssi->audio_clk_2) { 266 if (ssi->audio_clk_1 % bclk_rate) 267 ssi->audio_mck = ssi->audio_clk_2; 268 else 269 ssi->audio_mck = ssi->audio_clk_1; 270 } 271 272 /* Clock setting */ 273 ssicr |= SSICR_MST; 274 if (ssi->audio_mck == ssi->audio_clk_1) 275 ssicr |= SSICR_CKS; 276 if (ssi->bckp_rise) 277 ssicr |= SSICR_BCKP; 278 if (ssi->lrckp_fsync_fall) 279 ssicr |= SSICR_LRCKP; 280 281 /* Determine the clock divider */ 282 clk_ckdv = 0; 283 div = ssi->audio_mck / bclk_rate; 284 /* try to find an match */ 285 for (i = 0; i < ARRAY_SIZE(ckdv); i++) { 286 if (ckdv[i] == div) { 287 clk_ckdv = i; 288 break; 289 } 290 } 291 292 if (i == ARRAY_SIZE(ckdv)) { 293 dev_err(ssi->dev, "Rate not divisible by audio clock source\n"); 294 return -EINVAL; 295 } 296 297 /* 298 * DWL: Data Word Length = 16 bits 299 * SWL: System Word Length = 32 bits 300 */ 301 ssicr |= SSICR_CKDV(clk_ckdv); 302 ssicr |= SSICR_DWL(1) | SSICR_SWL(3); 303 rz_ssi_reg_writel(ssi, SSICR, ssicr); 304 rz_ssi_reg_writel(ssi, SSIFCR, SSIFCR_AUCKE | SSIFCR_FIFO_RST); 305 306 return 0; 307 } 308 309 static void rz_ssi_set_idle(struct rz_ssi_priv *ssi) 310 { 311 u32 tmp; 312 int ret; 313 314 /* Disable irqs */ 315 rz_ssi_reg_mask_setl(ssi, SSICR, SSICR_TUIEN | SSICR_TOIEN | 316 SSICR_RUIEN | SSICR_ROIEN, 0); 317 rz_ssi_reg_mask_setl(ssi, SSIFCR, SSIFCR_TIE | SSIFCR_RIE, 0); 318 319 /* Clear all error flags */ 320 rz_ssi_reg_mask_setl(ssi, SSISR, 321 (SSISR_TOIRQ | SSISR_TUIRQ | SSISR_ROIRQ | 322 SSISR_RUIRQ), 0); 323 324 /* Wait for idle */ 325 ret = readl_poll_timeout_atomic(ssi->base + SSISR, tmp, (tmp & SSISR_IIRQ), 1, 100); 326 if (ret) 327 dev_warn_ratelimited(ssi->dev, "timeout waiting for SSI idle\n"); 328 329 /* Hold FIFOs in reset */ 330 rz_ssi_reg_mask_setl(ssi, SSIFCR, 0, SSIFCR_FIFO_RST); 331 } 332 333 static int rz_ssi_start(struct rz_ssi_priv *ssi, struct rz_ssi_stream *strm) 334 { 335 bool is_play = rz_ssi_stream_is_play(strm->substream); 336 bool is_full_duplex; 337 u32 ssicr, ssifcr; 338 339 is_full_duplex = rz_ssi_is_stream_running(&ssi->playback) || 340 rz_ssi_is_stream_running(&ssi->capture); 341 ssicr = rz_ssi_reg_readl(ssi, SSICR); 342 ssifcr = rz_ssi_reg_readl(ssi, SSIFCR); 343 if (!is_full_duplex) { 344 ssifcr &= ~0xF; 345 } else { 346 rz_ssi_reg_mask_setl(ssi, SSICR, SSICR_TEN | SSICR_REN, 0); 347 rz_ssi_set_idle(ssi); 348 ssifcr &= ~SSIFCR_FIFO_RST; 349 } 350 351 /* FIFO interrupt thresholds */ 352 if (rz_ssi_is_dma_enabled(ssi)) 353 rz_ssi_reg_writel(ssi, SSISCR, 0); 354 else 355 rz_ssi_reg_writel(ssi, SSISCR, 356 SSISCR_TDES(strm->fifo_sample_size / 2 - 1) | 357 SSISCR_RDFS(0)); 358 359 /* enable IRQ */ 360 if (is_play) { 361 ssicr |= SSICR_TUIEN | SSICR_TOIEN; 362 ssifcr |= SSIFCR_TIE; 363 if (!is_full_duplex) 364 ssifcr |= SSIFCR_RFRST; 365 } else { 366 ssicr |= SSICR_RUIEN | SSICR_ROIEN; 367 ssifcr |= SSIFCR_RIE; 368 if (!is_full_duplex) 369 ssifcr |= SSIFCR_TFRST; 370 } 371 372 rz_ssi_reg_writel(ssi, SSICR, ssicr); 373 rz_ssi_reg_writel(ssi, SSIFCR, ssifcr); 374 375 /* Clear all error flags */ 376 rz_ssi_reg_mask_setl(ssi, SSISR, 377 (SSISR_TOIRQ | SSISR_TUIRQ | SSISR_ROIRQ | 378 SSISR_RUIRQ), 0); 379 380 strm->running = 1; 381 if (is_full_duplex) 382 ssicr |= SSICR_TEN | SSICR_REN; 383 else 384 ssicr |= is_play ? SSICR_TEN : SSICR_REN; 385 386 rz_ssi_reg_writel(ssi, SSICR, ssicr); 387 388 return 0; 389 } 390 391 static int rz_ssi_swreset(struct rz_ssi_priv *ssi) 392 { 393 u32 tmp; 394 395 rz_ssi_reg_mask_setl(ssi, SSIFCR, 0, SSIFCR_SSIRST); 396 rz_ssi_reg_mask_setl(ssi, SSIFCR, SSIFCR_SSIRST, 0); 397 return readl_poll_timeout_atomic(ssi->base + SSIFCR, tmp, !(tmp & SSIFCR_SSIRST), 1, 5); 398 } 399 400 static int rz_ssi_stop(struct rz_ssi_priv *ssi, struct rz_ssi_stream *strm) 401 { 402 strm->running = 0; 403 404 if (rz_ssi_is_stream_running(&ssi->playback) || 405 rz_ssi_is_stream_running(&ssi->capture)) 406 return 0; 407 408 /* Disable TX/RX */ 409 rz_ssi_reg_mask_setl(ssi, SSICR, SSICR_TEN | SSICR_REN, 0); 410 411 /* Cancel all remaining DMA transactions */ 412 if (rz_ssi_is_dma_enabled(ssi)) { 413 if (ssi->playback.dma_ch) 414 dmaengine_terminate_async(ssi->playback.dma_ch); 415 if (ssi->capture.dma_ch) 416 dmaengine_terminate_async(ssi->capture.dma_ch); 417 } 418 419 rz_ssi_set_idle(ssi); 420 421 return 0; 422 } 423 424 static void rz_ssi_pointer_update(struct rz_ssi_stream *strm, int frames) 425 { 426 struct snd_pcm_substream *substream = strm->substream; 427 struct snd_pcm_runtime *runtime; 428 int current_period; 429 430 if (!strm->running || !substream || !substream->runtime) 431 return; 432 433 runtime = substream->runtime; 434 strm->buffer_pos += frames; 435 WARN_ON(strm->buffer_pos > runtime->buffer_size); 436 437 /* ring buffer */ 438 if (strm->buffer_pos == runtime->buffer_size) 439 strm->buffer_pos = 0; 440 441 current_period = strm->buffer_pos / runtime->period_size; 442 if (strm->period_counter != current_period) { 443 snd_pcm_period_elapsed(strm->substream); 444 strm->period_counter = current_period; 445 } 446 } 447 448 static int rz_ssi_pio_recv(struct rz_ssi_priv *ssi, struct rz_ssi_stream *strm) 449 { 450 struct snd_pcm_substream *substream = strm->substream; 451 struct snd_pcm_runtime *runtime; 452 u16 *buf; 453 int fifo_samples; 454 int frames_left; 455 int samples; 456 int i; 457 458 if (!rz_ssi_stream_is_valid(ssi, strm)) 459 return -EINVAL; 460 461 runtime = substream->runtime; 462 463 do { 464 /* frames left in this period */ 465 frames_left = runtime->period_size - 466 (strm->buffer_pos % runtime->period_size); 467 if (!frames_left) 468 frames_left = runtime->period_size; 469 470 /* Samples in RX FIFO */ 471 fifo_samples = (rz_ssi_reg_readl(ssi, SSIFSR) >> 472 SSIFSR_RDC_SHIFT) & SSIFSR_RDC_MASK; 473 474 /* Only read full frames at a time */ 475 samples = 0; 476 while (frames_left && (fifo_samples >= runtime->channels)) { 477 samples += runtime->channels; 478 fifo_samples -= runtime->channels; 479 frames_left--; 480 } 481 482 /* not enough samples yet */ 483 if (!samples) 484 break; 485 486 /* calculate new buffer index */ 487 buf = (u16 *)runtime->dma_area; 488 buf += strm->buffer_pos * runtime->channels; 489 490 /* Note, only supports 16-bit samples */ 491 for (i = 0; i < samples; i++) 492 *buf++ = (u16)(rz_ssi_reg_readl(ssi, SSIFRDR) >> 16); 493 494 rz_ssi_reg_mask_setl(ssi, SSIFSR, SSIFSR_RDF, 0); 495 rz_ssi_pointer_update(strm, samples / runtime->channels); 496 } while (!frames_left && fifo_samples >= runtime->channels); 497 498 return 0; 499 } 500 501 static int rz_ssi_pio_send(struct rz_ssi_priv *ssi, struct rz_ssi_stream *strm) 502 { 503 struct snd_pcm_substream *substream = strm->substream; 504 struct snd_pcm_runtime *runtime = substream->runtime; 505 int sample_space; 506 int samples = 0; 507 int frames_left; 508 int i; 509 u32 ssifsr; 510 u16 *buf; 511 512 if (!rz_ssi_stream_is_valid(ssi, strm)) 513 return -EINVAL; 514 515 /* frames left in this period */ 516 frames_left = runtime->period_size - (strm->buffer_pos % 517 runtime->period_size); 518 if (frames_left == 0) 519 frames_left = runtime->period_size; 520 521 sample_space = strm->fifo_sample_size; 522 ssifsr = rz_ssi_reg_readl(ssi, SSIFSR); 523 sample_space -= (ssifsr >> SSIFSR_TDC_SHIFT) & SSIFSR_TDC_MASK; 524 525 /* Only add full frames at a time */ 526 while (frames_left && (sample_space >= runtime->channels)) { 527 samples += runtime->channels; 528 sample_space -= runtime->channels; 529 frames_left--; 530 } 531 532 /* no space to send anything right now */ 533 if (samples == 0) 534 return 0; 535 536 /* calculate new buffer index */ 537 buf = (u16 *)(runtime->dma_area); 538 buf += strm->buffer_pos * runtime->channels; 539 540 /* Note, only supports 16-bit samples */ 541 for (i = 0; i < samples; i++) 542 rz_ssi_reg_writel(ssi, SSIFTDR, ((u32)(*buf++) << 16)); 543 544 rz_ssi_reg_mask_setl(ssi, SSIFSR, SSIFSR_TDE, 0); 545 rz_ssi_pointer_update(strm, samples / runtime->channels); 546 547 return 0; 548 } 549 550 static irqreturn_t rz_ssi_interrupt(int irq, void *data) 551 { 552 struct rz_ssi_stream *strm_playback = NULL; 553 struct rz_ssi_stream *strm_capture = NULL; 554 struct rz_ssi_priv *ssi = data; 555 u32 ssisr = rz_ssi_reg_readl(ssi, SSISR); 556 557 if (ssi->playback.substream) 558 strm_playback = &ssi->playback; 559 if (ssi->capture.substream) 560 strm_capture = &ssi->capture; 561 562 if (!strm_playback && !strm_capture) 563 return IRQ_HANDLED; /* Left over TX/RX interrupt */ 564 565 if (irq == ssi->irq_int) { /* error or idle */ 566 bool is_stopped = false; 567 int i, count; 568 569 if (rz_ssi_is_dma_enabled(ssi)) 570 count = 4; 571 else 572 count = 1; 573 574 if (ssisr & (SSISR_RUIRQ | SSISR_ROIRQ | SSISR_TUIRQ | SSISR_TOIRQ)) 575 is_stopped = true; 576 577 if (ssi->capture.substream && is_stopped) { 578 if (ssisr & SSISR_RUIRQ) 579 strm_capture->uerr_num++; 580 if (ssisr & SSISR_ROIRQ) 581 strm_capture->oerr_num++; 582 583 rz_ssi_stop(ssi, strm_capture); 584 } 585 586 if (ssi->playback.substream && is_stopped) { 587 if (ssisr & SSISR_TUIRQ) 588 strm_playback->uerr_num++; 589 if (ssisr & SSISR_TOIRQ) 590 strm_playback->oerr_num++; 591 592 rz_ssi_stop(ssi, strm_playback); 593 } 594 595 /* Clear all flags */ 596 rz_ssi_reg_mask_setl(ssi, SSISR, SSISR_TOIRQ | SSISR_TUIRQ | 597 SSISR_ROIRQ | SSISR_RUIRQ, 0); 598 599 /* Add/remove more data */ 600 if (ssi->capture.substream && is_stopped) { 601 for (i = 0; i < count; i++) 602 strm_capture->transfer(ssi, strm_capture); 603 } 604 605 if (ssi->playback.substream && is_stopped) { 606 for (i = 0; i < count; i++) 607 strm_playback->transfer(ssi, strm_playback); 608 } 609 610 /* Resume */ 611 if (ssi->playback.substream && is_stopped) 612 rz_ssi_start(ssi, &ssi->playback); 613 if (ssi->capture.substream && is_stopped) 614 rz_ssi_start(ssi, &ssi->capture); 615 } 616 617 if (!rz_ssi_is_stream_running(&ssi->playback) && 618 !rz_ssi_is_stream_running(&ssi->capture)) 619 return IRQ_HANDLED; 620 621 /* tx data empty */ 622 if (irq == ssi->irq_tx && rz_ssi_is_stream_running(&ssi->playback)) 623 strm_playback->transfer(ssi, &ssi->playback); 624 625 /* rx data full */ 626 if (irq == ssi->irq_rx && rz_ssi_is_stream_running(&ssi->capture)) { 627 strm_capture->transfer(ssi, &ssi->capture); 628 rz_ssi_reg_mask_setl(ssi, SSIFSR, SSIFSR_RDF, 0); 629 } 630 631 if (irq == ssi->irq_rt) { 632 if (ssi->playback.substream) { 633 strm_playback->transfer(ssi, &ssi->playback); 634 } else { 635 strm_capture->transfer(ssi, &ssi->capture); 636 rz_ssi_reg_mask_setl(ssi, SSIFSR, SSIFSR_RDF, 0); 637 } 638 } 639 640 return IRQ_HANDLED; 641 } 642 643 static int rz_ssi_dma_slave_config(struct rz_ssi_priv *ssi, 644 struct dma_chan *dma_ch, bool is_play) 645 { 646 struct dma_slave_config cfg; 647 648 memset(&cfg, 0, sizeof(cfg)); 649 650 cfg.direction = is_play ? DMA_MEM_TO_DEV : DMA_DEV_TO_MEM; 651 cfg.dst_addr = ssi->phys + SSIFTDR; 652 cfg.src_addr = ssi->phys + SSIFRDR; 653 cfg.src_addr_width = DMA_SLAVE_BUSWIDTH_2_BYTES; 654 cfg.dst_addr_width = DMA_SLAVE_BUSWIDTH_2_BYTES; 655 656 return dmaengine_slave_config(dma_ch, &cfg); 657 } 658 659 static int rz_ssi_dma_transfer(struct rz_ssi_priv *ssi, 660 struct rz_ssi_stream *strm) 661 { 662 struct snd_pcm_substream *substream = strm->substream; 663 struct dma_async_tx_descriptor *desc; 664 struct snd_pcm_runtime *runtime; 665 enum dma_transfer_direction dir; 666 u32 dma_paddr, dma_size; 667 int amount; 668 669 if (!rz_ssi_stream_is_valid(ssi, strm)) 670 return -EINVAL; 671 672 runtime = substream->runtime; 673 if (runtime->state == SNDRV_PCM_STATE_DRAINING) 674 /* 675 * Stream is ending, so do not queue up any more DMA 676 * transfers otherwise we play partial sound clips 677 * because we can't shut off the DMA quick enough. 678 */ 679 return 0; 680 681 dir = rz_ssi_stream_is_play(substream) ? DMA_MEM_TO_DEV : DMA_DEV_TO_MEM; 682 683 /* Always transfer 1 period */ 684 amount = runtime->period_size; 685 686 /* DMA physical address and size */ 687 dma_paddr = runtime->dma_addr + frames_to_bytes(runtime, 688 strm->dma_buffer_pos); 689 dma_size = frames_to_bytes(runtime, amount); 690 desc = dmaengine_prep_slave_single(strm->dma_ch, dma_paddr, dma_size, 691 dir, 692 DMA_PREP_INTERRUPT | DMA_CTRL_ACK); 693 if (!desc) { 694 dev_err(ssi->dev, "dmaengine_prep_slave_single() fail\n"); 695 return -ENOMEM; 696 } 697 698 desc->callback = rz_ssi_dma_complete; 699 desc->callback_param = strm; 700 701 if (dmaengine_submit(desc) < 0) { 702 dev_err(ssi->dev, "dmaengine_submit() fail\n"); 703 return -EIO; 704 } 705 706 /* Update DMA pointer */ 707 strm->dma_buffer_pos += amount; 708 if (strm->dma_buffer_pos >= runtime->buffer_size) 709 strm->dma_buffer_pos = 0; 710 711 /* Start DMA */ 712 dma_async_issue_pending(strm->dma_ch); 713 714 return 0; 715 } 716 717 static void rz_ssi_dma_complete(void *data) 718 { 719 struct rz_ssi_stream *strm = (struct rz_ssi_stream *)data; 720 721 if (!strm->running || !strm->substream || !strm->substream->runtime) 722 return; 723 724 /* Note that next DMA transaction has probably already started */ 725 rz_ssi_pointer_update(strm, strm->substream->runtime->period_size); 726 727 /* Queue up another DMA transaction */ 728 rz_ssi_dma_transfer(strm->priv, strm); 729 } 730 731 static void rz_ssi_release_dma_channels(struct rz_ssi_priv *ssi) 732 { 733 if (ssi->playback.dma_ch) { 734 dma_release_channel(ssi->playback.dma_ch); 735 ssi->playback.dma_ch = NULL; 736 if (ssi->dma_rt) 737 ssi->dma_rt = false; 738 } 739 740 if (ssi->capture.dma_ch) { 741 dma_release_channel(ssi->capture.dma_ch); 742 ssi->capture.dma_ch = NULL; 743 } 744 } 745 746 static int rz_ssi_dma_request(struct rz_ssi_priv *ssi, struct device *dev) 747 { 748 ssi->playback.dma_ch = dma_request_chan(dev, "tx"); 749 if (IS_ERR(ssi->playback.dma_ch)) 750 ssi->playback.dma_ch = NULL; 751 752 ssi->capture.dma_ch = dma_request_chan(dev, "rx"); 753 if (IS_ERR(ssi->capture.dma_ch)) 754 ssi->capture.dma_ch = NULL; 755 756 if (!ssi->playback.dma_ch && !ssi->capture.dma_ch) { 757 ssi->playback.dma_ch = dma_request_chan(dev, "rt"); 758 if (IS_ERR(ssi->playback.dma_ch)) { 759 ssi->playback.dma_ch = NULL; 760 goto no_dma; 761 } 762 763 ssi->dma_rt = true; 764 } 765 766 if (!rz_ssi_is_dma_enabled(ssi)) 767 goto no_dma; 768 769 if (ssi->playback.dma_ch && 770 (rz_ssi_dma_slave_config(ssi, ssi->playback.dma_ch, true) < 0)) 771 goto no_dma; 772 773 if (ssi->capture.dma_ch && 774 (rz_ssi_dma_slave_config(ssi, ssi->capture.dma_ch, false) < 0)) 775 goto no_dma; 776 777 return 0; 778 779 no_dma: 780 rz_ssi_release_dma_channels(ssi); 781 782 return -ENODEV; 783 } 784 785 static int rz_ssi_dai_trigger(struct snd_pcm_substream *substream, int cmd, 786 struct snd_soc_dai *dai) 787 { 788 struct rz_ssi_priv *ssi = snd_soc_dai_get_drvdata(dai); 789 struct rz_ssi_stream *strm = rz_ssi_stream_get(ssi, substream); 790 int ret = 0, i, num_transfer = 1; 791 792 switch (cmd) { 793 case SNDRV_PCM_TRIGGER_START: 794 rz_ssi_stream_init(strm, substream); 795 796 if (ssi->dma_rt) { 797 bool is_playback; 798 799 is_playback = rz_ssi_stream_is_play(substream); 800 ret = rz_ssi_dma_slave_config(ssi, ssi->playback.dma_ch, 801 is_playback); 802 /* Fallback to pio */ 803 if (ret < 0) { 804 ssi->playback.transfer = rz_ssi_pio_send; 805 ssi->capture.transfer = rz_ssi_pio_recv; 806 rz_ssi_release_dma_channels(ssi); 807 } 808 } 809 810 /* For DMA, queue up multiple DMA descriptors */ 811 if (rz_ssi_is_dma_enabled(ssi)) 812 num_transfer = 4; 813 814 for (i = 0; i < num_transfer; i++) { 815 ret = strm->transfer(ssi, strm); 816 if (ret) 817 goto done; 818 } 819 820 ret = rz_ssi_start(ssi, strm); 821 break; 822 case SNDRV_PCM_TRIGGER_STOP: 823 rz_ssi_stop(ssi, strm); 824 rz_ssi_stream_quit(ssi, strm); 825 break; 826 } 827 828 done: 829 return ret; 830 } 831 832 static int rz_ssi_dai_set_fmt(struct snd_soc_dai *dai, unsigned int fmt) 833 { 834 struct rz_ssi_priv *ssi = snd_soc_dai_get_drvdata(dai); 835 836 switch (fmt & SND_SOC_DAIFMT_CLOCK_PROVIDER_MASK) { 837 case SND_SOC_DAIFMT_BP_FP: 838 break; 839 default: 840 dev_err(ssi->dev, "Codec should be clk and frame consumer\n"); 841 return -EINVAL; 842 } 843 844 /* 845 * set clock polarity 846 * 847 * "normal" BCLK = Signal is available at rising edge of BCLK 848 * "normal" FSYNC = (I2S) Left ch starts with falling FSYNC edge 849 */ 850 switch (fmt & SND_SOC_DAIFMT_INV_MASK) { 851 case SND_SOC_DAIFMT_NB_NF: 852 ssi->bckp_rise = false; 853 ssi->lrckp_fsync_fall = false; 854 break; 855 case SND_SOC_DAIFMT_NB_IF: 856 ssi->bckp_rise = false; 857 ssi->lrckp_fsync_fall = true; 858 break; 859 case SND_SOC_DAIFMT_IB_NF: 860 ssi->bckp_rise = true; 861 ssi->lrckp_fsync_fall = false; 862 break; 863 case SND_SOC_DAIFMT_IB_IF: 864 ssi->bckp_rise = true; 865 ssi->lrckp_fsync_fall = true; 866 break; 867 default: 868 return -EINVAL; 869 } 870 871 /* only i2s support */ 872 switch (fmt & SND_SOC_DAIFMT_FORMAT_MASK) { 873 case SND_SOC_DAIFMT_I2S: 874 break; 875 default: 876 dev_err(ssi->dev, "Only I2S mode is supported.\n"); 877 return -EINVAL; 878 } 879 880 return 0; 881 } 882 883 static bool rz_ssi_is_valid_hw_params(struct rz_ssi_priv *ssi, unsigned int rate, 884 unsigned int channels, 885 unsigned int sample_width, 886 unsigned int sample_bits) 887 { 888 if (ssi->hw_params_cache.rate != rate || 889 ssi->hw_params_cache.channels != channels || 890 ssi->hw_params_cache.sample_width != sample_width || 891 ssi->hw_params_cache.sample_bits != sample_bits) 892 return false; 893 894 return true; 895 } 896 897 static void rz_ssi_cache_hw_params(struct rz_ssi_priv *ssi, unsigned int rate, 898 unsigned int channels, 899 unsigned int sample_width, 900 unsigned int sample_bits) 901 { 902 ssi->hw_params_cache.rate = rate; 903 ssi->hw_params_cache.channels = channels; 904 ssi->hw_params_cache.sample_width = sample_width; 905 ssi->hw_params_cache.sample_bits = sample_bits; 906 } 907 908 static int rz_ssi_dai_hw_params(struct snd_pcm_substream *substream, 909 struct snd_pcm_hw_params *params, 910 struct snd_soc_dai *dai) 911 { 912 struct rz_ssi_priv *ssi = snd_soc_dai_get_drvdata(dai); 913 struct rz_ssi_stream *strm = rz_ssi_stream_get(ssi, substream); 914 unsigned int sample_bits = hw_param_interval(params, 915 SNDRV_PCM_HW_PARAM_SAMPLE_BITS)->min; 916 unsigned int channels = params_channels(params); 917 unsigned int rate = params_rate(params); 918 int ret; 919 920 if (sample_bits != 16) { 921 dev_err(ssi->dev, "Unsupported sample width: %d\n", 922 sample_bits); 923 return -EINVAL; 924 } 925 926 if (channels != 2) { 927 dev_err(ssi->dev, "Number of channels not matched: %d\n", 928 channels); 929 return -EINVAL; 930 } 931 932 if (rz_ssi_is_stream_running(&ssi->playback) || 933 rz_ssi_is_stream_running(&ssi->capture)) { 934 if (rz_ssi_is_valid_hw_params(ssi, rate, channels, 935 strm->sample_width, sample_bits)) 936 return 0; 937 938 dev_err(ssi->dev, "Full duplex needs same HW params\n"); 939 return -EINVAL; 940 } 941 942 rz_ssi_cache_hw_params(ssi, rate, channels, strm->sample_width, 943 sample_bits); 944 945 ret = rz_ssi_swreset(ssi); 946 if (ret) 947 return ret; 948 949 return rz_ssi_clk_setup(ssi, rate, channels); 950 } 951 952 static const struct snd_soc_dai_ops rz_ssi_dai_ops = { 953 .trigger = rz_ssi_dai_trigger, 954 .set_fmt = rz_ssi_dai_set_fmt, 955 .hw_params = rz_ssi_dai_hw_params, 956 }; 957 958 static const struct snd_pcm_hardware rz_ssi_pcm_hardware = { 959 .info = SNDRV_PCM_INFO_INTERLEAVED | 960 SNDRV_PCM_INFO_MMAP | 961 SNDRV_PCM_INFO_MMAP_VALID, 962 .buffer_bytes_max = PREALLOC_BUFFER, 963 .period_bytes_min = 32, 964 .period_bytes_max = 8192, 965 .channels_min = SSI_CHAN_MIN, 966 .channels_max = SSI_CHAN_MAX, 967 .periods_min = 1, 968 .periods_max = 32, 969 .fifo_size = 32 * 2, 970 }; 971 972 static int rz_ssi_pcm_open(struct snd_soc_component *component, 973 struct snd_pcm_substream *substream) 974 { 975 snd_soc_set_runtime_hwparams(substream, &rz_ssi_pcm_hardware); 976 977 return snd_pcm_hw_constraint_integer(substream->runtime, 978 SNDRV_PCM_HW_PARAM_PERIODS); 979 } 980 981 static snd_pcm_uframes_t rz_ssi_pcm_pointer(struct snd_soc_component *component, 982 struct snd_pcm_substream *substream) 983 { 984 struct snd_soc_pcm_runtime *rtd = snd_soc_substream_to_rtd(substream); 985 struct snd_soc_dai *dai = snd_soc_rtd_to_cpu(rtd, 0); 986 struct rz_ssi_priv *ssi = snd_soc_dai_get_drvdata(dai); 987 struct rz_ssi_stream *strm = rz_ssi_stream_get(ssi, substream); 988 989 return strm->buffer_pos; 990 } 991 992 static int rz_ssi_pcm_new(struct snd_soc_component *component, 993 struct snd_soc_pcm_runtime *rtd) 994 { 995 snd_pcm_set_managed_buffer_all(rtd->pcm, SNDRV_DMA_TYPE_DEV, 996 rtd->card->snd_card->dev, 997 PREALLOC_BUFFER, PREALLOC_BUFFER_MAX); 998 return 0; 999 } 1000 1001 static struct snd_soc_dai_driver rz_ssi_soc_dai[] = { 1002 { 1003 .name = "rz-ssi-dai", 1004 .playback = { 1005 .rates = SSI_RATES, 1006 .formats = SSI_FMTS, 1007 .channels_min = SSI_CHAN_MIN, 1008 .channels_max = SSI_CHAN_MAX, 1009 }, 1010 .capture = { 1011 .rates = SSI_RATES, 1012 .formats = SSI_FMTS, 1013 .channels_min = SSI_CHAN_MIN, 1014 .channels_max = SSI_CHAN_MAX, 1015 }, 1016 .ops = &rz_ssi_dai_ops, 1017 }, 1018 }; 1019 1020 static const struct snd_soc_component_driver rz_ssi_soc_component = { 1021 .name = "rz-ssi", 1022 .open = rz_ssi_pcm_open, 1023 .pointer = rz_ssi_pcm_pointer, 1024 .pcm_construct = rz_ssi_pcm_new, 1025 .legacy_dai_naming = 1, 1026 }; 1027 1028 static int rz_ssi_probe(struct platform_device *pdev) 1029 { 1030 struct device *dev = &pdev->dev; 1031 struct rz_ssi_priv *ssi; 1032 struct clk *audio_clk; 1033 struct resource *res; 1034 int ret; 1035 1036 ssi = devm_kzalloc(dev, sizeof(*ssi), GFP_KERNEL); 1037 if (!ssi) 1038 return -ENOMEM; 1039 1040 ssi->dev = dev; 1041 ssi->base = devm_platform_get_and_ioremap_resource(pdev, 0, &res); 1042 if (IS_ERR(ssi->base)) 1043 return PTR_ERR(ssi->base); 1044 1045 ssi->phys = res->start; 1046 ssi->clk = devm_clk_get(dev, "ssi"); 1047 if (IS_ERR(ssi->clk)) 1048 return PTR_ERR(ssi->clk); 1049 1050 ssi->sfr_clk = devm_clk_get(dev, "ssi_sfr"); 1051 if (IS_ERR(ssi->sfr_clk)) 1052 return PTR_ERR(ssi->sfr_clk); 1053 1054 audio_clk = devm_clk_get(dev, "audio_clk1"); 1055 if (IS_ERR(audio_clk)) 1056 return dev_err_probe(&pdev->dev, PTR_ERR(audio_clk), 1057 "no audio clk1"); 1058 1059 ssi->audio_clk_1 = clk_get_rate(audio_clk); 1060 audio_clk = devm_clk_get(dev, "audio_clk2"); 1061 if (IS_ERR(audio_clk)) 1062 return dev_err_probe(&pdev->dev, PTR_ERR(audio_clk), 1063 "no audio clk2"); 1064 1065 ssi->audio_clk_2 = clk_get_rate(audio_clk); 1066 if (!(ssi->audio_clk_1 || ssi->audio_clk_2)) 1067 return dev_err_probe(&pdev->dev, -EINVAL, 1068 "no audio clk1 or audio clk2"); 1069 1070 ssi->audio_mck = ssi->audio_clk_1 ? ssi->audio_clk_1 : ssi->audio_clk_2; 1071 1072 /* Detect DMA support */ 1073 ret = rz_ssi_dma_request(ssi, dev); 1074 if (ret < 0) { 1075 dev_warn(dev, "DMA not available, using PIO\n"); 1076 ssi->playback.transfer = rz_ssi_pio_send; 1077 ssi->capture.transfer = rz_ssi_pio_recv; 1078 } else { 1079 dev_info(dev, "DMA enabled"); 1080 ssi->playback.transfer = rz_ssi_dma_transfer; 1081 ssi->capture.transfer = rz_ssi_dma_transfer; 1082 } 1083 1084 ssi->playback.priv = ssi; 1085 ssi->capture.priv = ssi; 1086 1087 spin_lock_init(&ssi->lock); 1088 dev_set_drvdata(dev, ssi); 1089 1090 /* Error Interrupt */ 1091 ssi->irq_int = platform_get_irq_byname(pdev, "int_req"); 1092 if (ssi->irq_int < 0) { 1093 ret = ssi->irq_int; 1094 goto err_release_dma_chs; 1095 } 1096 1097 ret = devm_request_irq(dev, ssi->irq_int, &rz_ssi_interrupt, 1098 0, dev_name(dev), ssi); 1099 if (ret < 0) { 1100 dev_err_probe(dev, ret, "irq request error (int_req)\n"); 1101 goto err_release_dma_chs; 1102 } 1103 1104 if (!rz_ssi_is_dma_enabled(ssi)) { 1105 /* Tx and Rx interrupts (pio only) */ 1106 ssi->irq_tx = platform_get_irq_byname(pdev, "dma_tx"); 1107 ssi->irq_rx = platform_get_irq_byname(pdev, "dma_rx"); 1108 if (ssi->irq_tx == -ENXIO && ssi->irq_rx == -ENXIO) { 1109 ssi->irq_rt = platform_get_irq_byname(pdev, "dma_rt"); 1110 if (ssi->irq_rt < 0) 1111 return ssi->irq_rt; 1112 1113 ret = devm_request_irq(dev, ssi->irq_rt, 1114 &rz_ssi_interrupt, 0, 1115 dev_name(dev), ssi); 1116 if (ret < 0) 1117 return dev_err_probe(dev, ret, 1118 "irq request error (dma_rt)\n"); 1119 } else { 1120 if (ssi->irq_tx < 0) 1121 return ssi->irq_tx; 1122 1123 if (ssi->irq_rx < 0) 1124 return ssi->irq_rx; 1125 1126 ret = devm_request_irq(dev, ssi->irq_tx, 1127 &rz_ssi_interrupt, 0, 1128 dev_name(dev), ssi); 1129 if (ret < 0) 1130 return dev_err_probe(dev, ret, 1131 "irq request error (dma_tx)\n"); 1132 1133 ret = devm_request_irq(dev, ssi->irq_rx, 1134 &rz_ssi_interrupt, 0, 1135 dev_name(dev), ssi); 1136 if (ret < 0) 1137 return dev_err_probe(dev, ret, 1138 "irq request error (dma_rx)\n"); 1139 } 1140 } 1141 1142 ssi->rstc = devm_reset_control_get_exclusive(dev, NULL); 1143 if (IS_ERR(ssi->rstc)) { 1144 ret = PTR_ERR(ssi->rstc); 1145 goto err_release_dma_chs; 1146 } 1147 1148 /* Default 0 for power saving. Can be overridden via sysfs. */ 1149 pm_runtime_set_autosuspend_delay(dev, 0); 1150 pm_runtime_use_autosuspend(dev); 1151 ret = devm_pm_runtime_enable(dev); 1152 if (ret < 0) { 1153 dev_err(dev, "Failed to enable runtime PM!\n"); 1154 goto err_release_dma_chs; 1155 } 1156 1157 ret = devm_snd_soc_register_component(dev, &rz_ssi_soc_component, 1158 rz_ssi_soc_dai, 1159 ARRAY_SIZE(rz_ssi_soc_dai)); 1160 if (ret < 0) { 1161 dev_err(dev, "failed to register snd component\n"); 1162 goto err_release_dma_chs; 1163 } 1164 1165 return 0; 1166 1167 err_release_dma_chs: 1168 rz_ssi_release_dma_channels(ssi); 1169 1170 return ret; 1171 } 1172 1173 static void rz_ssi_remove(struct platform_device *pdev) 1174 { 1175 struct rz_ssi_priv *ssi = dev_get_drvdata(&pdev->dev); 1176 1177 rz_ssi_release_dma_channels(ssi); 1178 1179 reset_control_assert(ssi->rstc); 1180 } 1181 1182 static const struct of_device_id rz_ssi_of_match[] = { 1183 { .compatible = "renesas,rz-ssi", }, 1184 {/* Sentinel */}, 1185 }; 1186 MODULE_DEVICE_TABLE(of, rz_ssi_of_match); 1187 1188 static int rz_ssi_runtime_suspend(struct device *dev) 1189 { 1190 struct rz_ssi_priv *ssi = dev_get_drvdata(dev); 1191 1192 return reset_control_assert(ssi->rstc); 1193 } 1194 1195 static int rz_ssi_runtime_resume(struct device *dev) 1196 { 1197 struct rz_ssi_priv *ssi = dev_get_drvdata(dev); 1198 1199 return reset_control_deassert(ssi->rstc); 1200 } 1201 1202 static const struct dev_pm_ops rz_ssi_pm_ops = { 1203 RUNTIME_PM_OPS(rz_ssi_runtime_suspend, rz_ssi_runtime_resume, NULL) 1204 }; 1205 1206 static struct platform_driver rz_ssi_driver = { 1207 .driver = { 1208 .name = "rz-ssi-pcm-audio", 1209 .of_match_table = rz_ssi_of_match, 1210 .pm = pm_ptr(&rz_ssi_pm_ops), 1211 }, 1212 .probe = rz_ssi_probe, 1213 .remove = rz_ssi_remove, 1214 }; 1215 1216 module_platform_driver(rz_ssi_driver); 1217 1218 MODULE_LICENSE("GPL v2"); 1219 MODULE_DESCRIPTION("Renesas RZ/G2L ASoC Serial Sound Interface Driver"); 1220 MODULE_AUTHOR("Biju Das <biju.das.jz@bp.renesas.com>"); 1221