1 // SPDX-License-Identifier: GPL-2.0
2 //
3 // Renesas RZ/G2L ASoC Serial Sound Interface (SSIF-2) Driver
4 //
5 // Copyright (C) 2021 Renesas Electronics Corp.
6 // Copyright (C) 2019 Chris Brandt.
7 //
8
9 #include <linux/clk.h>
10 #include <linux/dmaengine.h>
11 #include <linux/io.h>
12 #include <linux/module.h>
13 #include <linux/pm_runtime.h>
14 #include <linux/reset.h>
15 #include <sound/soc.h>
16
17 /* REGISTER OFFSET */
18 #define SSICR 0x000
19 #define SSISR 0x004
20 #define SSIFCR 0x010
21 #define SSIFSR 0x014
22 #define SSIFTDR 0x018
23 #define SSIFRDR 0x01c
24 #define SSIOFR 0x020
25 #define SSISCR 0x024
26
27 /* SSI REGISTER BITS */
28 #define SSICR_DWL(x) (((x) & 0x7) << 19)
29 #define SSICR_SWL(x) (((x) & 0x7) << 16)
30
31 #define SSICR_CKS BIT(30)
32 #define SSICR_TUIEN BIT(29)
33 #define SSICR_TOIEN BIT(28)
34 #define SSICR_RUIEN BIT(27)
35 #define SSICR_ROIEN BIT(26)
36 #define SSICR_MST BIT(14)
37 #define SSICR_BCKP BIT(13)
38 #define SSICR_LRCKP BIT(12)
39 #define SSICR_CKDV(x) (((x) & 0xf) << 4)
40 #define SSICR_TEN BIT(1)
41 #define SSICR_REN BIT(0)
42
43 #define SSISR_TUIRQ BIT(29)
44 #define SSISR_TOIRQ BIT(28)
45 #define SSISR_RUIRQ BIT(27)
46 #define SSISR_ROIRQ BIT(26)
47 #define SSISR_IIRQ BIT(25)
48
49 #define SSIFCR_AUCKE BIT(31)
50 #define SSIFCR_SSIRST BIT(16)
51 #define SSIFCR_TIE BIT(3)
52 #define SSIFCR_RIE BIT(2)
53 #define SSIFCR_TFRST BIT(1)
54 #define SSIFCR_RFRST BIT(0)
55 #define SSIFCR_FIFO_RST (SSIFCR_TFRST | SSIFCR_RFRST)
56
57 #define SSIFSR_TDC_MASK 0x3f
58 #define SSIFSR_TDC_SHIFT 24
59 #define SSIFSR_RDC_MASK 0x3f
60 #define SSIFSR_RDC_SHIFT 8
61
62 #define SSIFSR_TDE BIT(16)
63 #define SSIFSR_RDF BIT(0)
64
65 #define SSIOFR_LRCONT BIT(8)
66
67 #define SSISCR_TDES(x) (((x) & 0x1f) << 8)
68 #define SSISCR_RDFS(x) (((x) & 0x1f) << 0)
69
70 /* Pre allocated buffers sizes */
71 #define PREALLOC_BUFFER (SZ_32K)
72 #define PREALLOC_BUFFER_MAX (SZ_32K)
73
74 #define SSI_RATES SNDRV_PCM_RATE_8000_48000 /* 8k-44.1kHz */
75 #define SSI_FMTS SNDRV_PCM_FMTBIT_S16_LE
76 #define SSI_CHAN_MIN 2
77 #define SSI_CHAN_MAX 2
78 #define SSI_FIFO_DEPTH 32
79
80 struct rz_ssi_priv;
81
82 struct rz_ssi_stream {
83 struct rz_ssi_priv *priv;
84 struct snd_pcm_substream *substream;
85 int fifo_sample_size; /* sample capacity of SSI FIFO */
86 int dma_buffer_pos; /* The address for the next DMA descriptor */
87 int period_counter; /* for keeping track of periods transferred */
88 int sample_width;
89 int buffer_pos; /* current frame position in the buffer */
90 int running; /* 0=stopped, 1=running */
91
92 int uerr_num;
93 int oerr_num;
94
95 struct dma_chan *dma_ch;
96
97 int (*transfer)(struct rz_ssi_priv *ssi, struct rz_ssi_stream *strm);
98 };
99
100 struct rz_ssi_priv {
101 void __iomem *base;
102 struct platform_device *pdev;
103 struct reset_control *rstc;
104 struct device *dev;
105 struct clk *sfr_clk;
106 struct clk *clk;
107
108 phys_addr_t phys;
109 int irq_int;
110 int irq_tx;
111 int irq_rx;
112 int irq_rt;
113
114 spinlock_t lock;
115
116 /*
117 * The SSI supports full-duplex transmission and reception.
118 * However, if an error occurs, channel reset (both transmission
119 * and reception reset) is required.
120 * So it is better to use as half-duplex (playing and recording
121 * should be done on separate channels).
122 */
123 struct rz_ssi_stream playback;
124 struct rz_ssi_stream capture;
125
126 /* clock */
127 unsigned long audio_mck;
128 unsigned long audio_clk_1;
129 unsigned long audio_clk_2;
130
131 bool lrckp_fsync_fall; /* LR clock polarity (SSICR.LRCKP) */
132 bool bckp_rise; /* Bit clock polarity (SSICR.BCKP) */
133 bool dma_rt;
134
135 /* Full duplex communication support */
136 struct {
137 unsigned int rate;
138 unsigned int channels;
139 unsigned int sample_width;
140 unsigned int sample_bits;
141 } hw_params_cache;
142 };
143
144 static void rz_ssi_dma_complete(void *data);
145
rz_ssi_reg_writel(struct rz_ssi_priv * priv,uint reg,u32 data)146 static void rz_ssi_reg_writel(struct rz_ssi_priv *priv, uint reg, u32 data)
147 {
148 writel(data, (priv->base + reg));
149 }
150
rz_ssi_reg_readl(struct rz_ssi_priv * priv,uint reg)151 static u32 rz_ssi_reg_readl(struct rz_ssi_priv *priv, uint reg)
152 {
153 return readl(priv->base + reg);
154 }
155
rz_ssi_reg_mask_setl(struct rz_ssi_priv * priv,uint reg,u32 bclr,u32 bset)156 static void rz_ssi_reg_mask_setl(struct rz_ssi_priv *priv, uint reg,
157 u32 bclr, u32 bset)
158 {
159 u32 val;
160
161 val = readl(priv->base + reg);
162 val = (val & ~bclr) | bset;
163 writel(val, (priv->base + reg));
164 }
165
166 static inline struct snd_soc_dai *
rz_ssi_get_dai(struct snd_pcm_substream * substream)167 rz_ssi_get_dai(struct snd_pcm_substream *substream)
168 {
169 struct snd_soc_pcm_runtime *rtd = snd_soc_substream_to_rtd(substream);
170
171 return snd_soc_rtd_to_cpu(rtd, 0);
172 }
173
rz_ssi_stream_is_play(struct rz_ssi_priv * ssi,struct snd_pcm_substream * substream)174 static inline bool rz_ssi_stream_is_play(struct rz_ssi_priv *ssi,
175 struct snd_pcm_substream *substream)
176 {
177 return substream->stream == SNDRV_PCM_STREAM_PLAYBACK;
178 }
179
180 static inline struct rz_ssi_stream *
rz_ssi_stream_get(struct rz_ssi_priv * ssi,struct snd_pcm_substream * substream)181 rz_ssi_stream_get(struct rz_ssi_priv *ssi, struct snd_pcm_substream *substream)
182 {
183 struct rz_ssi_stream *stream = &ssi->playback;
184
185 if (substream->stream != SNDRV_PCM_STREAM_PLAYBACK)
186 stream = &ssi->capture;
187
188 return stream;
189 }
190
rz_ssi_is_dma_enabled(struct rz_ssi_priv * ssi)191 static inline bool rz_ssi_is_dma_enabled(struct rz_ssi_priv *ssi)
192 {
193 return (ssi->playback.dma_ch && (ssi->dma_rt || ssi->capture.dma_ch));
194 }
195
rz_ssi_set_substream(struct rz_ssi_stream * strm,struct snd_pcm_substream * substream)196 static void rz_ssi_set_substream(struct rz_ssi_stream *strm,
197 struct snd_pcm_substream *substream)
198 {
199 struct rz_ssi_priv *ssi = strm->priv;
200 unsigned long flags;
201
202 spin_lock_irqsave(&ssi->lock, flags);
203 strm->substream = substream;
204 spin_unlock_irqrestore(&ssi->lock, flags);
205 }
206
rz_ssi_stream_is_valid(struct rz_ssi_priv * ssi,struct rz_ssi_stream * strm)207 static bool rz_ssi_stream_is_valid(struct rz_ssi_priv *ssi,
208 struct rz_ssi_stream *strm)
209 {
210 unsigned long flags;
211 bool ret;
212
213 spin_lock_irqsave(&ssi->lock, flags);
214 ret = strm->substream && strm->substream->runtime;
215 spin_unlock_irqrestore(&ssi->lock, flags);
216
217 return ret;
218 }
219
rz_ssi_is_stream_running(struct rz_ssi_stream * strm)220 static inline bool rz_ssi_is_stream_running(struct rz_ssi_stream *strm)
221 {
222 return strm->substream && strm->running;
223 }
224
rz_ssi_stream_init(struct rz_ssi_stream * strm,struct snd_pcm_substream * substream)225 static void rz_ssi_stream_init(struct rz_ssi_stream *strm,
226 struct snd_pcm_substream *substream)
227 {
228 struct snd_pcm_runtime *runtime = substream->runtime;
229
230 rz_ssi_set_substream(strm, substream);
231 strm->sample_width = samples_to_bytes(runtime, 1);
232 strm->dma_buffer_pos = 0;
233 strm->period_counter = 0;
234 strm->buffer_pos = 0;
235
236 strm->oerr_num = 0;
237 strm->uerr_num = 0;
238 strm->running = 0;
239
240 /* fifo init */
241 strm->fifo_sample_size = SSI_FIFO_DEPTH;
242 }
243
rz_ssi_stream_quit(struct rz_ssi_priv * ssi,struct rz_ssi_stream * strm)244 static void rz_ssi_stream_quit(struct rz_ssi_priv *ssi,
245 struct rz_ssi_stream *strm)
246 {
247 struct snd_soc_dai *dai = rz_ssi_get_dai(strm->substream);
248
249 rz_ssi_set_substream(strm, NULL);
250
251 if (strm->oerr_num > 0)
252 dev_info(dai->dev, "overrun = %d\n", strm->oerr_num);
253
254 if (strm->uerr_num > 0)
255 dev_info(dai->dev, "underrun = %d\n", strm->uerr_num);
256 }
257
rz_ssi_clk_setup(struct rz_ssi_priv * ssi,unsigned int rate,unsigned int channels)258 static int rz_ssi_clk_setup(struct rz_ssi_priv *ssi, unsigned int rate,
259 unsigned int channels)
260 {
261 static s8 ckdv[16] = { 1, 2, 4, 8, 16, 32, 64, 128,
262 6, 12, 24, 48, 96, -1, -1, -1 };
263 unsigned int channel_bits = 32; /* System Word Length */
264 unsigned long bclk_rate = rate * channels * channel_bits;
265 unsigned int div;
266 unsigned int i;
267 u32 ssicr = 0;
268 u32 clk_ckdv;
269
270 /* Clear AUCKE so we can set MST */
271 rz_ssi_reg_writel(ssi, SSIFCR, 0);
272
273 /* Continue to output LRCK pin even when idle */
274 rz_ssi_reg_writel(ssi, SSIOFR, SSIOFR_LRCONT);
275 if (ssi->audio_clk_1 && ssi->audio_clk_2) {
276 if (ssi->audio_clk_1 % bclk_rate)
277 ssi->audio_mck = ssi->audio_clk_2;
278 else
279 ssi->audio_mck = ssi->audio_clk_1;
280 }
281
282 /* Clock setting */
283 ssicr |= SSICR_MST;
284 if (ssi->audio_mck == ssi->audio_clk_1)
285 ssicr |= SSICR_CKS;
286 if (ssi->bckp_rise)
287 ssicr |= SSICR_BCKP;
288 if (ssi->lrckp_fsync_fall)
289 ssicr |= SSICR_LRCKP;
290
291 /* Determine the clock divider */
292 clk_ckdv = 0;
293 div = ssi->audio_mck / bclk_rate;
294 /* try to find an match */
295 for (i = 0; i < ARRAY_SIZE(ckdv); i++) {
296 if (ckdv[i] == div) {
297 clk_ckdv = i;
298 break;
299 }
300 }
301
302 if (i == ARRAY_SIZE(ckdv)) {
303 dev_err(ssi->dev, "Rate not divisible by audio clock source\n");
304 return -EINVAL;
305 }
306
307 /*
308 * DWL: Data Word Length = 16 bits
309 * SWL: System Word Length = 32 bits
310 */
311 ssicr |= SSICR_CKDV(clk_ckdv);
312 ssicr |= SSICR_DWL(1) | SSICR_SWL(3);
313 rz_ssi_reg_writel(ssi, SSICR, ssicr);
314 rz_ssi_reg_writel(ssi, SSIFCR,
315 (SSIFCR_AUCKE | SSIFCR_TFRST | SSIFCR_RFRST));
316
317 return 0;
318 }
319
rz_ssi_set_idle(struct rz_ssi_priv * ssi)320 static void rz_ssi_set_idle(struct rz_ssi_priv *ssi)
321 {
322 int timeout;
323
324 /* Disable irqs */
325 rz_ssi_reg_mask_setl(ssi, SSICR, SSICR_TUIEN | SSICR_TOIEN |
326 SSICR_RUIEN | SSICR_ROIEN, 0);
327 rz_ssi_reg_mask_setl(ssi, SSIFCR, SSIFCR_TIE | SSIFCR_RIE, 0);
328
329 /* Clear all error flags */
330 rz_ssi_reg_mask_setl(ssi, SSISR,
331 (SSISR_TOIRQ | SSISR_TUIRQ | SSISR_ROIRQ |
332 SSISR_RUIRQ), 0);
333
334 /* Wait for idle */
335 timeout = 100;
336 while (--timeout) {
337 if (rz_ssi_reg_readl(ssi, SSISR) & SSISR_IIRQ)
338 break;
339 udelay(1);
340 }
341
342 if (!timeout)
343 dev_info(ssi->dev, "timeout waiting for SSI idle\n");
344
345 /* Hold FIFOs in reset */
346 rz_ssi_reg_mask_setl(ssi, SSIFCR, 0,
347 SSIFCR_TFRST | SSIFCR_RFRST);
348 }
349
rz_ssi_start(struct rz_ssi_priv * ssi,struct rz_ssi_stream * strm)350 static int rz_ssi_start(struct rz_ssi_priv *ssi, struct rz_ssi_stream *strm)
351 {
352 bool is_play = rz_ssi_stream_is_play(ssi, strm->substream);
353 bool is_full_duplex;
354 u32 ssicr, ssifcr;
355
356 is_full_duplex = rz_ssi_is_stream_running(&ssi->playback) ||
357 rz_ssi_is_stream_running(&ssi->capture);
358 ssicr = rz_ssi_reg_readl(ssi, SSICR);
359 ssifcr = rz_ssi_reg_readl(ssi, SSIFCR);
360 if (!is_full_duplex) {
361 ssifcr &= ~0xF;
362 } else {
363 rz_ssi_reg_mask_setl(ssi, SSICR, SSICR_TEN | SSICR_REN, 0);
364 rz_ssi_set_idle(ssi);
365 ssifcr &= ~SSIFCR_FIFO_RST;
366 }
367
368 /* FIFO interrupt thresholds */
369 if (rz_ssi_is_dma_enabled(ssi))
370 rz_ssi_reg_writel(ssi, SSISCR, 0);
371 else
372 rz_ssi_reg_writel(ssi, SSISCR,
373 SSISCR_TDES(strm->fifo_sample_size / 2 - 1) |
374 SSISCR_RDFS(0));
375
376 /* enable IRQ */
377 if (is_play) {
378 ssicr |= SSICR_TUIEN | SSICR_TOIEN;
379 ssifcr |= SSIFCR_TIE;
380 if (!is_full_duplex)
381 ssifcr |= SSIFCR_RFRST;
382 } else {
383 ssicr |= SSICR_RUIEN | SSICR_ROIEN;
384 ssifcr |= SSIFCR_RIE;
385 if (!is_full_duplex)
386 ssifcr |= SSIFCR_TFRST;
387 }
388
389 rz_ssi_reg_writel(ssi, SSICR, ssicr);
390 rz_ssi_reg_writel(ssi, SSIFCR, ssifcr);
391
392 /* Clear all error flags */
393 rz_ssi_reg_mask_setl(ssi, SSISR,
394 (SSISR_TOIRQ | SSISR_TUIRQ | SSISR_ROIRQ |
395 SSISR_RUIRQ), 0);
396
397 strm->running = 1;
398 if (is_full_duplex)
399 ssicr |= SSICR_TEN | SSICR_REN;
400 else
401 ssicr |= is_play ? SSICR_TEN : SSICR_REN;
402
403 rz_ssi_reg_writel(ssi, SSICR, ssicr);
404
405 return 0;
406 }
407
rz_ssi_stop(struct rz_ssi_priv * ssi,struct rz_ssi_stream * strm)408 static int rz_ssi_stop(struct rz_ssi_priv *ssi, struct rz_ssi_stream *strm)
409 {
410 strm->running = 0;
411
412 if (rz_ssi_is_stream_running(&ssi->playback) ||
413 rz_ssi_is_stream_running(&ssi->capture))
414 return 0;
415
416 /* Disable TX/RX */
417 rz_ssi_reg_mask_setl(ssi, SSICR, SSICR_TEN | SSICR_REN, 0);
418
419 /* Cancel all remaining DMA transactions */
420 if (rz_ssi_is_dma_enabled(ssi))
421 dmaengine_terminate_async(strm->dma_ch);
422
423 rz_ssi_set_idle(ssi);
424
425 return 0;
426 }
427
rz_ssi_pointer_update(struct rz_ssi_stream * strm,int frames)428 static void rz_ssi_pointer_update(struct rz_ssi_stream *strm, int frames)
429 {
430 struct snd_pcm_substream *substream = strm->substream;
431 struct snd_pcm_runtime *runtime;
432 int current_period;
433
434 if (!strm->running || !substream || !substream->runtime)
435 return;
436
437 runtime = substream->runtime;
438 strm->buffer_pos += frames;
439 WARN_ON(strm->buffer_pos > runtime->buffer_size);
440
441 /* ring buffer */
442 if (strm->buffer_pos == runtime->buffer_size)
443 strm->buffer_pos = 0;
444
445 current_period = strm->buffer_pos / runtime->period_size;
446 if (strm->period_counter != current_period) {
447 snd_pcm_period_elapsed(strm->substream);
448 strm->period_counter = current_period;
449 }
450 }
451
rz_ssi_pio_recv(struct rz_ssi_priv * ssi,struct rz_ssi_stream * strm)452 static int rz_ssi_pio_recv(struct rz_ssi_priv *ssi, struct rz_ssi_stream *strm)
453 {
454 struct snd_pcm_substream *substream = strm->substream;
455 struct snd_pcm_runtime *runtime;
456 u16 *buf;
457 int fifo_samples;
458 int frames_left;
459 int samples;
460 int i;
461
462 if (!rz_ssi_stream_is_valid(ssi, strm))
463 return -EINVAL;
464
465 runtime = substream->runtime;
466
467 do {
468 /* frames left in this period */
469 frames_left = runtime->period_size -
470 (strm->buffer_pos % runtime->period_size);
471 if (!frames_left)
472 frames_left = runtime->period_size;
473
474 /* Samples in RX FIFO */
475 fifo_samples = (rz_ssi_reg_readl(ssi, SSIFSR) >>
476 SSIFSR_RDC_SHIFT) & SSIFSR_RDC_MASK;
477
478 /* Only read full frames at a time */
479 samples = 0;
480 while (frames_left && (fifo_samples >= runtime->channels)) {
481 samples += runtime->channels;
482 fifo_samples -= runtime->channels;
483 frames_left--;
484 }
485
486 /* not enough samples yet */
487 if (!samples)
488 break;
489
490 /* calculate new buffer index */
491 buf = (u16 *)runtime->dma_area;
492 buf += strm->buffer_pos * runtime->channels;
493
494 /* Note, only supports 16-bit samples */
495 for (i = 0; i < samples; i++)
496 *buf++ = (u16)(rz_ssi_reg_readl(ssi, SSIFRDR) >> 16);
497
498 rz_ssi_reg_mask_setl(ssi, SSIFSR, SSIFSR_RDF, 0);
499 rz_ssi_pointer_update(strm, samples / runtime->channels);
500 } while (!frames_left && fifo_samples >= runtime->channels);
501
502 return 0;
503 }
504
rz_ssi_pio_send(struct rz_ssi_priv * ssi,struct rz_ssi_stream * strm)505 static int rz_ssi_pio_send(struct rz_ssi_priv *ssi, struct rz_ssi_stream *strm)
506 {
507 struct snd_pcm_substream *substream = strm->substream;
508 struct snd_pcm_runtime *runtime = substream->runtime;
509 int sample_space;
510 int samples = 0;
511 int frames_left;
512 int i;
513 u32 ssifsr;
514 u16 *buf;
515
516 if (!rz_ssi_stream_is_valid(ssi, strm))
517 return -EINVAL;
518
519 /* frames left in this period */
520 frames_left = runtime->period_size - (strm->buffer_pos %
521 runtime->period_size);
522 if (frames_left == 0)
523 frames_left = runtime->period_size;
524
525 sample_space = strm->fifo_sample_size;
526 ssifsr = rz_ssi_reg_readl(ssi, SSIFSR);
527 sample_space -= (ssifsr >> SSIFSR_TDC_SHIFT) & SSIFSR_TDC_MASK;
528
529 /* Only add full frames at a time */
530 while (frames_left && (sample_space >= runtime->channels)) {
531 samples += runtime->channels;
532 sample_space -= runtime->channels;
533 frames_left--;
534 }
535
536 /* no space to send anything right now */
537 if (samples == 0)
538 return 0;
539
540 /* calculate new buffer index */
541 buf = (u16 *)(runtime->dma_area);
542 buf += strm->buffer_pos * runtime->channels;
543
544 /* Note, only supports 16-bit samples */
545 for (i = 0; i < samples; i++)
546 rz_ssi_reg_writel(ssi, SSIFTDR, ((u32)(*buf++) << 16));
547
548 rz_ssi_reg_mask_setl(ssi, SSIFSR, SSIFSR_TDE, 0);
549 rz_ssi_pointer_update(strm, samples / runtime->channels);
550
551 return 0;
552 }
553
rz_ssi_interrupt(int irq,void * data)554 static irqreturn_t rz_ssi_interrupt(int irq, void *data)
555 {
556 struct rz_ssi_stream *strm_playback = NULL;
557 struct rz_ssi_stream *strm_capture = NULL;
558 struct rz_ssi_priv *ssi = data;
559 u32 ssisr = rz_ssi_reg_readl(ssi, SSISR);
560
561 if (ssi->playback.substream)
562 strm_playback = &ssi->playback;
563 if (ssi->capture.substream)
564 strm_capture = &ssi->capture;
565
566 if (!strm_playback && !strm_capture)
567 return IRQ_HANDLED; /* Left over TX/RX interrupt */
568
569 if (irq == ssi->irq_int) { /* error or idle */
570 bool is_stopped = false;
571 int i, count;
572
573 if (rz_ssi_is_dma_enabled(ssi))
574 count = 4;
575 else
576 count = 1;
577
578 if (ssisr & (SSISR_RUIRQ | SSISR_ROIRQ | SSISR_TUIRQ | SSISR_TOIRQ))
579 is_stopped = true;
580
581 if (ssi->capture.substream && is_stopped) {
582 if (ssisr & SSISR_RUIRQ)
583 strm_capture->uerr_num++;
584 if (ssisr & SSISR_ROIRQ)
585 strm_capture->oerr_num++;
586
587 rz_ssi_stop(ssi, strm_capture);
588 }
589
590 if (ssi->playback.substream && is_stopped) {
591 if (ssisr & SSISR_TUIRQ)
592 strm_playback->uerr_num++;
593 if (ssisr & SSISR_TOIRQ)
594 strm_playback->oerr_num++;
595
596 rz_ssi_stop(ssi, strm_playback);
597 }
598
599 /* Clear all flags */
600 rz_ssi_reg_mask_setl(ssi, SSISR, SSISR_TOIRQ | SSISR_TUIRQ |
601 SSISR_ROIRQ | SSISR_RUIRQ, 0);
602
603 /* Add/remove more data */
604 if (ssi->capture.substream && is_stopped) {
605 for (i = 0; i < count; i++)
606 strm_capture->transfer(ssi, strm_capture);
607 }
608
609 if (ssi->playback.substream && is_stopped) {
610 for (i = 0; i < count; i++)
611 strm_playback->transfer(ssi, strm_playback);
612 }
613
614 /* Resume */
615 if (ssi->playback.substream && is_stopped)
616 rz_ssi_start(ssi, &ssi->playback);
617 if (ssi->capture.substream && is_stopped)
618 rz_ssi_start(ssi, &ssi->capture);
619 }
620
621 if (!rz_ssi_is_stream_running(&ssi->playback) &&
622 !rz_ssi_is_stream_running(&ssi->capture))
623 return IRQ_HANDLED;
624
625 /* tx data empty */
626 if (irq == ssi->irq_tx && rz_ssi_is_stream_running(&ssi->playback))
627 strm_playback->transfer(ssi, &ssi->playback);
628
629 /* rx data full */
630 if (irq == ssi->irq_rx && rz_ssi_is_stream_running(&ssi->capture)) {
631 strm_capture->transfer(ssi, &ssi->capture);
632 rz_ssi_reg_mask_setl(ssi, SSIFSR, SSIFSR_RDF, 0);
633 }
634
635 if (irq == ssi->irq_rt) {
636 if (ssi->playback.substream) {
637 strm_playback->transfer(ssi, &ssi->playback);
638 } else {
639 strm_capture->transfer(ssi, &ssi->capture);
640 rz_ssi_reg_mask_setl(ssi, SSIFSR, SSIFSR_RDF, 0);
641 }
642 }
643
644 return IRQ_HANDLED;
645 }
646
rz_ssi_dma_slave_config(struct rz_ssi_priv * ssi,struct dma_chan * dma_ch,bool is_play)647 static int rz_ssi_dma_slave_config(struct rz_ssi_priv *ssi,
648 struct dma_chan *dma_ch, bool is_play)
649 {
650 struct dma_slave_config cfg;
651
652 memset(&cfg, 0, sizeof(cfg));
653
654 cfg.direction = is_play ? DMA_MEM_TO_DEV : DMA_DEV_TO_MEM;
655 cfg.dst_addr = ssi->phys + SSIFTDR;
656 cfg.src_addr = ssi->phys + SSIFRDR;
657 cfg.src_addr_width = DMA_SLAVE_BUSWIDTH_2_BYTES;
658 cfg.dst_addr_width = DMA_SLAVE_BUSWIDTH_2_BYTES;
659
660 return dmaengine_slave_config(dma_ch, &cfg);
661 }
662
rz_ssi_dma_transfer(struct rz_ssi_priv * ssi,struct rz_ssi_stream * strm)663 static int rz_ssi_dma_transfer(struct rz_ssi_priv *ssi,
664 struct rz_ssi_stream *strm)
665 {
666 struct snd_pcm_substream *substream = strm->substream;
667 struct dma_async_tx_descriptor *desc;
668 struct snd_pcm_runtime *runtime;
669 enum dma_transfer_direction dir;
670 u32 dma_paddr, dma_size;
671 int amount;
672
673 if (!rz_ssi_stream_is_valid(ssi, strm))
674 return -EINVAL;
675
676 runtime = substream->runtime;
677 if (runtime->state == SNDRV_PCM_STATE_DRAINING)
678 /*
679 * Stream is ending, so do not queue up any more DMA
680 * transfers otherwise we play partial sound clips
681 * because we can't shut off the DMA quick enough.
682 */
683 return 0;
684
685 dir = rz_ssi_stream_is_play(ssi, substream) ? DMA_MEM_TO_DEV : DMA_DEV_TO_MEM;
686
687 /* Always transfer 1 period */
688 amount = runtime->period_size;
689
690 /* DMA physical address and size */
691 dma_paddr = runtime->dma_addr + frames_to_bytes(runtime,
692 strm->dma_buffer_pos);
693 dma_size = frames_to_bytes(runtime, amount);
694 desc = dmaengine_prep_slave_single(strm->dma_ch, dma_paddr, dma_size,
695 dir,
696 DMA_PREP_INTERRUPT | DMA_CTRL_ACK);
697 if (!desc) {
698 dev_err(ssi->dev, "dmaengine_prep_slave_single() fail\n");
699 return -ENOMEM;
700 }
701
702 desc->callback = rz_ssi_dma_complete;
703 desc->callback_param = strm;
704
705 if (dmaengine_submit(desc) < 0) {
706 dev_err(ssi->dev, "dmaengine_submit() fail\n");
707 return -EIO;
708 }
709
710 /* Update DMA pointer */
711 strm->dma_buffer_pos += amount;
712 if (strm->dma_buffer_pos >= runtime->buffer_size)
713 strm->dma_buffer_pos = 0;
714
715 /* Start DMA */
716 dma_async_issue_pending(strm->dma_ch);
717
718 return 0;
719 }
720
rz_ssi_dma_complete(void * data)721 static void rz_ssi_dma_complete(void *data)
722 {
723 struct rz_ssi_stream *strm = (struct rz_ssi_stream *)data;
724
725 if (!strm->running || !strm->substream || !strm->substream->runtime)
726 return;
727
728 /* Note that next DMA transaction has probably already started */
729 rz_ssi_pointer_update(strm, strm->substream->runtime->period_size);
730
731 /* Queue up another DMA transaction */
732 rz_ssi_dma_transfer(strm->priv, strm);
733 }
734
rz_ssi_release_dma_channels(struct rz_ssi_priv * ssi)735 static void rz_ssi_release_dma_channels(struct rz_ssi_priv *ssi)
736 {
737 if (ssi->playback.dma_ch) {
738 dma_release_channel(ssi->playback.dma_ch);
739 ssi->playback.dma_ch = NULL;
740 if (ssi->dma_rt)
741 ssi->dma_rt = false;
742 }
743
744 if (ssi->capture.dma_ch) {
745 dma_release_channel(ssi->capture.dma_ch);
746 ssi->capture.dma_ch = NULL;
747 }
748 }
749
rz_ssi_dma_request(struct rz_ssi_priv * ssi,struct device * dev)750 static int rz_ssi_dma_request(struct rz_ssi_priv *ssi, struct device *dev)
751 {
752 ssi->playback.dma_ch = dma_request_chan(dev, "tx");
753 if (IS_ERR(ssi->playback.dma_ch))
754 ssi->playback.dma_ch = NULL;
755
756 ssi->capture.dma_ch = dma_request_chan(dev, "rx");
757 if (IS_ERR(ssi->capture.dma_ch))
758 ssi->capture.dma_ch = NULL;
759
760 if (!ssi->playback.dma_ch && !ssi->capture.dma_ch) {
761 ssi->playback.dma_ch = dma_request_chan(dev, "rt");
762 if (IS_ERR(ssi->playback.dma_ch)) {
763 ssi->playback.dma_ch = NULL;
764 goto no_dma;
765 }
766
767 ssi->dma_rt = true;
768 }
769
770 if (!rz_ssi_is_dma_enabled(ssi))
771 goto no_dma;
772
773 if (ssi->playback.dma_ch &&
774 (rz_ssi_dma_slave_config(ssi, ssi->playback.dma_ch, true) < 0))
775 goto no_dma;
776
777 if (ssi->capture.dma_ch &&
778 (rz_ssi_dma_slave_config(ssi, ssi->capture.dma_ch, false) < 0))
779 goto no_dma;
780
781 return 0;
782
783 no_dma:
784 rz_ssi_release_dma_channels(ssi);
785
786 return -ENODEV;
787 }
788
rz_ssi_dai_trigger(struct snd_pcm_substream * substream,int cmd,struct snd_soc_dai * dai)789 static int rz_ssi_dai_trigger(struct snd_pcm_substream *substream, int cmd,
790 struct snd_soc_dai *dai)
791 {
792 struct rz_ssi_priv *ssi = snd_soc_dai_get_drvdata(dai);
793 struct rz_ssi_stream *strm = rz_ssi_stream_get(ssi, substream);
794 int ret = 0, i, num_transfer = 1;
795
796 switch (cmd) {
797 case SNDRV_PCM_TRIGGER_START:
798 /* Soft Reset */
799 if (!rz_ssi_is_stream_running(&ssi->playback) &&
800 !rz_ssi_is_stream_running(&ssi->capture)) {
801 rz_ssi_reg_mask_setl(ssi, SSIFCR, 0, SSIFCR_SSIRST);
802 rz_ssi_reg_mask_setl(ssi, SSIFCR, SSIFCR_SSIRST, 0);
803 udelay(5);
804 }
805
806 rz_ssi_stream_init(strm, substream);
807
808 if (ssi->dma_rt) {
809 bool is_playback;
810
811 is_playback = rz_ssi_stream_is_play(ssi, substream);
812 ret = rz_ssi_dma_slave_config(ssi, ssi->playback.dma_ch,
813 is_playback);
814 /* Fallback to pio */
815 if (ret < 0) {
816 ssi->playback.transfer = rz_ssi_pio_send;
817 ssi->capture.transfer = rz_ssi_pio_recv;
818 rz_ssi_release_dma_channels(ssi);
819 }
820 }
821
822 /* For DMA, queue up multiple DMA descriptors */
823 if (rz_ssi_is_dma_enabled(ssi))
824 num_transfer = 4;
825
826 for (i = 0; i < num_transfer; i++) {
827 ret = strm->transfer(ssi, strm);
828 if (ret)
829 goto done;
830 }
831
832 ret = rz_ssi_start(ssi, strm);
833 break;
834 case SNDRV_PCM_TRIGGER_STOP:
835 rz_ssi_stop(ssi, strm);
836 rz_ssi_stream_quit(ssi, strm);
837 break;
838 }
839
840 done:
841 return ret;
842 }
843
rz_ssi_dai_set_fmt(struct snd_soc_dai * dai,unsigned int fmt)844 static int rz_ssi_dai_set_fmt(struct snd_soc_dai *dai, unsigned int fmt)
845 {
846 struct rz_ssi_priv *ssi = snd_soc_dai_get_drvdata(dai);
847
848 switch (fmt & SND_SOC_DAIFMT_CLOCK_PROVIDER_MASK) {
849 case SND_SOC_DAIFMT_BP_FP:
850 break;
851 default:
852 dev_err(ssi->dev, "Codec should be clk and frame consumer\n");
853 return -EINVAL;
854 }
855
856 /*
857 * set clock polarity
858 *
859 * "normal" BCLK = Signal is available at rising edge of BCLK
860 * "normal" FSYNC = (I2S) Left ch starts with falling FSYNC edge
861 */
862 switch (fmt & SND_SOC_DAIFMT_INV_MASK) {
863 case SND_SOC_DAIFMT_NB_NF:
864 ssi->bckp_rise = false;
865 ssi->lrckp_fsync_fall = false;
866 break;
867 case SND_SOC_DAIFMT_NB_IF:
868 ssi->bckp_rise = false;
869 ssi->lrckp_fsync_fall = true;
870 break;
871 case SND_SOC_DAIFMT_IB_NF:
872 ssi->bckp_rise = true;
873 ssi->lrckp_fsync_fall = false;
874 break;
875 case SND_SOC_DAIFMT_IB_IF:
876 ssi->bckp_rise = true;
877 ssi->lrckp_fsync_fall = true;
878 break;
879 default:
880 return -EINVAL;
881 }
882
883 /* only i2s support */
884 switch (fmt & SND_SOC_DAIFMT_FORMAT_MASK) {
885 case SND_SOC_DAIFMT_I2S:
886 break;
887 default:
888 dev_err(ssi->dev, "Only I2S mode is supported.\n");
889 return -EINVAL;
890 }
891
892 return 0;
893 }
894
rz_ssi_is_valid_hw_params(struct rz_ssi_priv * ssi,unsigned int rate,unsigned int channels,unsigned int sample_width,unsigned int sample_bits)895 static bool rz_ssi_is_valid_hw_params(struct rz_ssi_priv *ssi, unsigned int rate,
896 unsigned int channels,
897 unsigned int sample_width,
898 unsigned int sample_bits)
899 {
900 if (ssi->hw_params_cache.rate != rate ||
901 ssi->hw_params_cache.channels != channels ||
902 ssi->hw_params_cache.sample_width != sample_width ||
903 ssi->hw_params_cache.sample_bits != sample_bits)
904 return false;
905
906 return true;
907 }
908
rz_ssi_cache_hw_params(struct rz_ssi_priv * ssi,unsigned int rate,unsigned int channels,unsigned int sample_width,unsigned int sample_bits)909 static void rz_ssi_cache_hw_params(struct rz_ssi_priv *ssi, unsigned int rate,
910 unsigned int channels,
911 unsigned int sample_width,
912 unsigned int sample_bits)
913 {
914 ssi->hw_params_cache.rate = rate;
915 ssi->hw_params_cache.channels = channels;
916 ssi->hw_params_cache.sample_width = sample_width;
917 ssi->hw_params_cache.sample_bits = sample_bits;
918 }
919
rz_ssi_dai_hw_params(struct snd_pcm_substream * substream,struct snd_pcm_hw_params * params,struct snd_soc_dai * dai)920 static int rz_ssi_dai_hw_params(struct snd_pcm_substream *substream,
921 struct snd_pcm_hw_params *params,
922 struct snd_soc_dai *dai)
923 {
924 struct rz_ssi_priv *ssi = snd_soc_dai_get_drvdata(dai);
925 struct rz_ssi_stream *strm = rz_ssi_stream_get(ssi, substream);
926 unsigned int sample_bits = hw_param_interval(params,
927 SNDRV_PCM_HW_PARAM_SAMPLE_BITS)->min;
928 unsigned int channels = params_channels(params);
929 unsigned int rate = params_rate(params);
930
931 if (sample_bits != 16) {
932 dev_err(ssi->dev, "Unsupported sample width: %d\n",
933 sample_bits);
934 return -EINVAL;
935 }
936
937 if (channels != 2) {
938 dev_err(ssi->dev, "Number of channels not matched: %d\n",
939 channels);
940 return -EINVAL;
941 }
942
943 if (rz_ssi_is_stream_running(&ssi->playback) ||
944 rz_ssi_is_stream_running(&ssi->capture)) {
945 if (rz_ssi_is_valid_hw_params(ssi, rate, channels,
946 strm->sample_width, sample_bits))
947 return 0;
948
949 dev_err(ssi->dev, "Full duplex needs same HW params\n");
950 return -EINVAL;
951 }
952
953 rz_ssi_cache_hw_params(ssi, rate, channels, strm->sample_width,
954 sample_bits);
955
956 return rz_ssi_clk_setup(ssi, rate, channels);
957 }
958
959 static const struct snd_soc_dai_ops rz_ssi_dai_ops = {
960 .trigger = rz_ssi_dai_trigger,
961 .set_fmt = rz_ssi_dai_set_fmt,
962 .hw_params = rz_ssi_dai_hw_params,
963 };
964
965 static const struct snd_pcm_hardware rz_ssi_pcm_hardware = {
966 .info = SNDRV_PCM_INFO_INTERLEAVED |
967 SNDRV_PCM_INFO_MMAP |
968 SNDRV_PCM_INFO_MMAP_VALID,
969 .buffer_bytes_max = PREALLOC_BUFFER,
970 .period_bytes_min = 32,
971 .period_bytes_max = 8192,
972 .channels_min = SSI_CHAN_MIN,
973 .channels_max = SSI_CHAN_MAX,
974 .periods_min = 1,
975 .periods_max = 32,
976 .fifo_size = 32 * 2,
977 };
978
rz_ssi_pcm_open(struct snd_soc_component * component,struct snd_pcm_substream * substream)979 static int rz_ssi_pcm_open(struct snd_soc_component *component,
980 struct snd_pcm_substream *substream)
981 {
982 snd_soc_set_runtime_hwparams(substream, &rz_ssi_pcm_hardware);
983
984 return snd_pcm_hw_constraint_integer(substream->runtime,
985 SNDRV_PCM_HW_PARAM_PERIODS);
986 }
987
rz_ssi_pcm_pointer(struct snd_soc_component * component,struct snd_pcm_substream * substream)988 static snd_pcm_uframes_t rz_ssi_pcm_pointer(struct snd_soc_component *component,
989 struct snd_pcm_substream *substream)
990 {
991 struct snd_soc_dai *dai = rz_ssi_get_dai(substream);
992 struct rz_ssi_priv *ssi = snd_soc_dai_get_drvdata(dai);
993 struct rz_ssi_stream *strm = rz_ssi_stream_get(ssi, substream);
994
995 return strm->buffer_pos;
996 }
997
rz_ssi_pcm_new(struct snd_soc_component * component,struct snd_soc_pcm_runtime * rtd)998 static int rz_ssi_pcm_new(struct snd_soc_component *component,
999 struct snd_soc_pcm_runtime *rtd)
1000 {
1001 snd_pcm_set_managed_buffer_all(rtd->pcm, SNDRV_DMA_TYPE_DEV,
1002 rtd->card->snd_card->dev,
1003 PREALLOC_BUFFER, PREALLOC_BUFFER_MAX);
1004 return 0;
1005 }
1006
1007 static struct snd_soc_dai_driver rz_ssi_soc_dai[] = {
1008 {
1009 .name = "rz-ssi-dai",
1010 .playback = {
1011 .rates = SSI_RATES,
1012 .formats = SSI_FMTS,
1013 .channels_min = SSI_CHAN_MIN,
1014 .channels_max = SSI_CHAN_MAX,
1015 },
1016 .capture = {
1017 .rates = SSI_RATES,
1018 .formats = SSI_FMTS,
1019 .channels_min = SSI_CHAN_MIN,
1020 .channels_max = SSI_CHAN_MAX,
1021 },
1022 .ops = &rz_ssi_dai_ops,
1023 },
1024 };
1025
1026 static const struct snd_soc_component_driver rz_ssi_soc_component = {
1027 .name = "rz-ssi",
1028 .open = rz_ssi_pcm_open,
1029 .pointer = rz_ssi_pcm_pointer,
1030 .pcm_construct = rz_ssi_pcm_new,
1031 .legacy_dai_naming = 1,
1032 };
1033
rz_ssi_probe(struct platform_device * pdev)1034 static int rz_ssi_probe(struct platform_device *pdev)
1035 {
1036 struct rz_ssi_priv *ssi;
1037 struct clk *audio_clk;
1038 struct resource *res;
1039 int ret;
1040
1041 ssi = devm_kzalloc(&pdev->dev, sizeof(*ssi), GFP_KERNEL);
1042 if (!ssi)
1043 return -ENOMEM;
1044
1045 ssi->pdev = pdev;
1046 ssi->dev = &pdev->dev;
1047 ssi->base = devm_platform_get_and_ioremap_resource(pdev, 0, &res);
1048 if (IS_ERR(ssi->base))
1049 return PTR_ERR(ssi->base);
1050
1051 ssi->phys = res->start;
1052 ssi->clk = devm_clk_get(&pdev->dev, "ssi");
1053 if (IS_ERR(ssi->clk))
1054 return PTR_ERR(ssi->clk);
1055
1056 ssi->sfr_clk = devm_clk_get(&pdev->dev, "ssi_sfr");
1057 if (IS_ERR(ssi->sfr_clk))
1058 return PTR_ERR(ssi->sfr_clk);
1059
1060 audio_clk = devm_clk_get(&pdev->dev, "audio_clk1");
1061 if (IS_ERR(audio_clk))
1062 return dev_err_probe(&pdev->dev, PTR_ERR(audio_clk),
1063 "no audio clk1");
1064
1065 ssi->audio_clk_1 = clk_get_rate(audio_clk);
1066 audio_clk = devm_clk_get(&pdev->dev, "audio_clk2");
1067 if (IS_ERR(audio_clk))
1068 return dev_err_probe(&pdev->dev, PTR_ERR(audio_clk),
1069 "no audio clk2");
1070
1071 ssi->audio_clk_2 = clk_get_rate(audio_clk);
1072 if (!(ssi->audio_clk_1 || ssi->audio_clk_2))
1073 return dev_err_probe(&pdev->dev, -EINVAL,
1074 "no audio clk1 or audio clk2");
1075
1076 ssi->audio_mck = ssi->audio_clk_1 ? ssi->audio_clk_1 : ssi->audio_clk_2;
1077
1078 /* Detect DMA support */
1079 ret = rz_ssi_dma_request(ssi, &pdev->dev);
1080 if (ret < 0) {
1081 dev_warn(&pdev->dev, "DMA not available, using PIO\n");
1082 ssi->playback.transfer = rz_ssi_pio_send;
1083 ssi->capture.transfer = rz_ssi_pio_recv;
1084 } else {
1085 dev_info(&pdev->dev, "DMA enabled");
1086 ssi->playback.transfer = rz_ssi_dma_transfer;
1087 ssi->capture.transfer = rz_ssi_dma_transfer;
1088 }
1089
1090 ssi->playback.priv = ssi;
1091 ssi->capture.priv = ssi;
1092
1093 spin_lock_init(&ssi->lock);
1094 dev_set_drvdata(&pdev->dev, ssi);
1095
1096 /* Error Interrupt */
1097 ssi->irq_int = platform_get_irq_byname(pdev, "int_req");
1098 if (ssi->irq_int < 0) {
1099 rz_ssi_release_dma_channels(ssi);
1100 return ssi->irq_int;
1101 }
1102
1103 ret = devm_request_irq(&pdev->dev, ssi->irq_int, &rz_ssi_interrupt,
1104 0, dev_name(&pdev->dev), ssi);
1105 if (ret < 0) {
1106 rz_ssi_release_dma_channels(ssi);
1107 return dev_err_probe(&pdev->dev, ret,
1108 "irq request error (int_req)\n");
1109 }
1110
1111 if (!rz_ssi_is_dma_enabled(ssi)) {
1112 /* Tx and Rx interrupts (pio only) */
1113 ssi->irq_tx = platform_get_irq_byname(pdev, "dma_tx");
1114 ssi->irq_rx = platform_get_irq_byname(pdev, "dma_rx");
1115 if (ssi->irq_tx == -ENXIO && ssi->irq_rx == -ENXIO) {
1116 ssi->irq_rt = platform_get_irq_byname(pdev, "dma_rt");
1117 if (ssi->irq_rt < 0)
1118 return ssi->irq_rt;
1119
1120 ret = devm_request_irq(&pdev->dev, ssi->irq_rt,
1121 &rz_ssi_interrupt, 0,
1122 dev_name(&pdev->dev), ssi);
1123 if (ret < 0)
1124 return dev_err_probe(&pdev->dev, ret,
1125 "irq request error (dma_rt)\n");
1126 } else {
1127 if (ssi->irq_tx < 0)
1128 return ssi->irq_tx;
1129
1130 if (ssi->irq_rx < 0)
1131 return ssi->irq_rx;
1132
1133 ret = devm_request_irq(&pdev->dev, ssi->irq_tx,
1134 &rz_ssi_interrupt, 0,
1135 dev_name(&pdev->dev), ssi);
1136 if (ret < 0)
1137 return dev_err_probe(&pdev->dev, ret,
1138 "irq request error (dma_tx)\n");
1139
1140 ret = devm_request_irq(&pdev->dev, ssi->irq_rx,
1141 &rz_ssi_interrupt, 0,
1142 dev_name(&pdev->dev), ssi);
1143 if (ret < 0)
1144 return dev_err_probe(&pdev->dev, ret,
1145 "irq request error (dma_rx)\n");
1146 }
1147 }
1148
1149 ssi->rstc = devm_reset_control_get_exclusive(&pdev->dev, NULL);
1150 if (IS_ERR(ssi->rstc)) {
1151 ret = PTR_ERR(ssi->rstc);
1152 goto err_reset;
1153 }
1154
1155 reset_control_deassert(ssi->rstc);
1156 pm_runtime_enable(&pdev->dev);
1157 ret = pm_runtime_resume_and_get(&pdev->dev);
1158 if (ret < 0) {
1159 dev_err(&pdev->dev, "pm_runtime_resume_and_get failed\n");
1160 goto err_pm;
1161 }
1162
1163 ret = devm_snd_soc_register_component(&pdev->dev, &rz_ssi_soc_component,
1164 rz_ssi_soc_dai,
1165 ARRAY_SIZE(rz_ssi_soc_dai));
1166 if (ret < 0) {
1167 dev_err(&pdev->dev, "failed to register snd component\n");
1168 goto err_snd_soc;
1169 }
1170
1171 return 0;
1172
1173 err_snd_soc:
1174 pm_runtime_put(ssi->dev);
1175 err_pm:
1176 pm_runtime_disable(ssi->dev);
1177 reset_control_assert(ssi->rstc);
1178 err_reset:
1179 rz_ssi_release_dma_channels(ssi);
1180
1181 return ret;
1182 }
1183
rz_ssi_remove(struct platform_device * pdev)1184 static void rz_ssi_remove(struct platform_device *pdev)
1185 {
1186 struct rz_ssi_priv *ssi = dev_get_drvdata(&pdev->dev);
1187
1188 rz_ssi_release_dma_channels(ssi);
1189
1190 pm_runtime_put(ssi->dev);
1191 pm_runtime_disable(ssi->dev);
1192 reset_control_assert(ssi->rstc);
1193 }
1194
1195 static const struct of_device_id rz_ssi_of_match[] = {
1196 { .compatible = "renesas,rz-ssi", },
1197 {/* Sentinel */},
1198 };
1199 MODULE_DEVICE_TABLE(of, rz_ssi_of_match);
1200
1201 static struct platform_driver rz_ssi_driver = {
1202 .driver = {
1203 .name = "rz-ssi-pcm-audio",
1204 .of_match_table = rz_ssi_of_match,
1205 },
1206 .probe = rz_ssi_probe,
1207 .remove = rz_ssi_remove,
1208 };
1209
1210 module_platform_driver(rz_ssi_driver);
1211
1212 MODULE_LICENSE("GPL v2");
1213 MODULE_DESCRIPTION("Renesas RZ/G2L ASoC Serial Sound Interface Driver");
1214 MODULE_AUTHOR("Biju Das <biju.das.jz@bp.renesas.com>");
1215