xref: /linux/sound/soc/sh/rz-ssi.c (revision 3f0a50f345f78183f6e9b39c2f45ca5dcaa511ca)
1 // SPDX-License-Identifier: GPL-2.0
2 //
3 // Renesas RZ/G2L ASoC Serial Sound Interface (SSIF-2) Driver
4 //
5 // Copyright (C) 2021 Renesas Electronics Corp.
6 // Copyright (C) 2019 Chris Brandt.
7 //
8 
9 #include <linux/clk.h>
10 #include <linux/dmaengine.h>
11 #include <linux/io.h>
12 #include <linux/module.h>
13 #include <linux/of_device.h>
14 #include <linux/pm_runtime.h>
15 #include <linux/reset.h>
16 #include <sound/soc.h>
17 
18 /* REGISTER OFFSET */
19 #define SSICR			0x000
20 #define SSISR			0x004
21 #define SSIFCR			0x010
22 #define SSIFSR			0x014
23 #define SSIFTDR			0x018
24 #define SSIFRDR			0x01c
25 #define SSIOFR			0x020
26 #define SSISCR			0x024
27 
28 /* SSI REGISTER BITS */
29 #define SSICR_DWL(x)		(((x) & 0x7) << 19)
30 #define SSICR_SWL(x)		(((x) & 0x7) << 16)
31 
32 #define SSICR_CKS		BIT(30)
33 #define SSICR_TUIEN		BIT(29)
34 #define SSICR_TOIEN		BIT(28)
35 #define SSICR_RUIEN		BIT(27)
36 #define SSICR_ROIEN		BIT(26)
37 #define SSICR_MST		BIT(14)
38 #define SSICR_BCKP		BIT(13)
39 #define SSICR_LRCKP		BIT(12)
40 #define SSICR_CKDV(x)		(((x) & 0xf) << 4)
41 #define SSICR_TEN		BIT(1)
42 #define SSICR_REN		BIT(0)
43 
44 #define SSISR_TUIRQ		BIT(29)
45 #define SSISR_TOIRQ		BIT(28)
46 #define SSISR_RUIRQ		BIT(27)
47 #define SSISR_ROIRQ		BIT(26)
48 #define SSISR_IIRQ		BIT(25)
49 
50 #define SSIFCR_AUCKE		BIT(31)
51 #define SSIFCR_SSIRST		BIT(16)
52 #define SSIFCR_TIE		BIT(3)
53 #define SSIFCR_RIE		BIT(2)
54 #define SSIFCR_TFRST		BIT(1)
55 #define SSIFCR_RFRST		BIT(0)
56 
57 #define SSIFSR_TDC_MASK		0x3f
58 #define SSIFSR_TDC_SHIFT	24
59 #define SSIFSR_RDC_MASK		0x3f
60 #define SSIFSR_RDC_SHIFT	8
61 
62 #define SSIFSR_TDC(x)		(((x) & 0x1f) << 24)
63 #define SSIFSR_TDE		BIT(16)
64 #define SSIFSR_RDC(x)		(((x) & 0x1f) << 8)
65 #define SSIFSR_RDF		BIT(0)
66 
67 #define SSIOFR_LRCONT		BIT(8)
68 
69 #define SSISCR_TDES(x)		(((x) & 0x1f) << 8)
70 #define SSISCR_RDFS(x)		(((x) & 0x1f) << 0)
71 
72 /* Pre allocated buffers sizes */
73 #define PREALLOC_BUFFER		(SZ_32K)
74 #define PREALLOC_BUFFER_MAX	(SZ_32K)
75 
76 #define SSI_RATES		SNDRV_PCM_RATE_8000_48000 /* 8k-44.1kHz */
77 #define SSI_FMTS		SNDRV_PCM_FMTBIT_S16_LE
78 #define SSI_CHAN_MIN		2
79 #define SSI_CHAN_MAX		2
80 #define SSI_FIFO_DEPTH		32
81 
82 struct rz_ssi_priv;
83 
84 struct rz_ssi_stream {
85 	struct rz_ssi_priv *priv;
86 	struct snd_pcm_substream *substream;
87 	int fifo_sample_size;	/* sample capacity of SSI FIFO */
88 	int dma_buffer_pos;	/* The address for the next DMA descriptor */
89 	int period_counter;	/* for keeping track of periods transferred */
90 	int sample_width;
91 	int buffer_pos;		/* current frame position in the buffer */
92 	int running;		/* 0=stopped, 1=running */
93 
94 	int uerr_num;
95 	int oerr_num;
96 
97 	struct dma_chan *dma_ch;
98 
99 	int (*transfer)(struct rz_ssi_priv *ssi, struct rz_ssi_stream *strm);
100 };
101 
102 struct rz_ssi_priv {
103 	void __iomem *base;
104 	struct platform_device *pdev;
105 	struct reset_control *rstc;
106 	struct device *dev;
107 	struct clk *sfr_clk;
108 	struct clk *clk;
109 
110 	phys_addr_t phys;
111 	int irq_int;
112 	int irq_tx;
113 	int irq_rx;
114 
115 	spinlock_t lock;
116 
117 	/*
118 	 * The SSI supports full-duplex transmission and reception.
119 	 * However, if an error occurs, channel reset (both transmission
120 	 * and reception reset) is required.
121 	 * So it is better to use as half-duplex (playing and recording
122 	 * should be done on separate channels).
123 	 */
124 	struct rz_ssi_stream playback;
125 	struct rz_ssi_stream capture;
126 
127 	/* clock */
128 	unsigned long audio_mck;
129 	unsigned long audio_clk_1;
130 	unsigned long audio_clk_2;
131 
132 	bool lrckp_fsync_fall;	/* LR clock polarity (SSICR.LRCKP) */
133 	bool bckp_rise;	/* Bit clock polarity (SSICR.BCKP) */
134 	bool dma_rt;
135 };
136 
137 static void rz_ssi_dma_complete(void *data);
138 
139 static void rz_ssi_reg_writel(struct rz_ssi_priv *priv, uint reg, u32 data)
140 {
141 	writel(data, (priv->base + reg));
142 }
143 
144 static u32 rz_ssi_reg_readl(struct rz_ssi_priv *priv, uint reg)
145 {
146 	return readl(priv->base + reg);
147 }
148 
149 static void rz_ssi_reg_mask_setl(struct rz_ssi_priv *priv, uint reg,
150 				 u32 bclr, u32 bset)
151 {
152 	u32 val;
153 
154 	val = readl(priv->base + reg);
155 	val = (val & ~bclr) | bset;
156 	writel(val, (priv->base + reg));
157 }
158 
159 static inline struct snd_soc_dai *
160 rz_ssi_get_dai(struct snd_pcm_substream *substream)
161 {
162 	struct snd_soc_pcm_runtime *rtd = asoc_substream_to_rtd(substream);
163 
164 	return asoc_rtd_to_cpu(rtd, 0);
165 }
166 
167 static inline bool rz_ssi_stream_is_play(struct rz_ssi_priv *ssi,
168 					 struct snd_pcm_substream *substream)
169 {
170 	return substream->stream == SNDRV_PCM_STREAM_PLAYBACK;
171 }
172 
173 static inline struct rz_ssi_stream *
174 rz_ssi_stream_get(struct rz_ssi_priv *ssi, struct snd_pcm_substream *substream)
175 {
176 	struct rz_ssi_stream *stream = &ssi->playback;
177 
178 	if (substream->stream != SNDRV_PCM_STREAM_PLAYBACK)
179 		stream = &ssi->capture;
180 
181 	return stream;
182 }
183 
184 static inline bool rz_ssi_is_dma_enabled(struct rz_ssi_priv *ssi)
185 {
186 	return (ssi->playback.dma_ch && (ssi->dma_rt || ssi->capture.dma_ch));
187 }
188 
189 static void rz_ssi_set_substream(struct rz_ssi_stream *strm,
190 				 struct snd_pcm_substream *substream)
191 {
192 	struct rz_ssi_priv *ssi = strm->priv;
193 	unsigned long flags;
194 
195 	spin_lock_irqsave(&ssi->lock, flags);
196 	strm->substream = substream;
197 	spin_unlock_irqrestore(&ssi->lock, flags);
198 }
199 
200 static bool rz_ssi_stream_is_valid(struct rz_ssi_priv *ssi,
201 				   struct rz_ssi_stream *strm)
202 {
203 	unsigned long flags;
204 	bool ret;
205 
206 	spin_lock_irqsave(&ssi->lock, flags);
207 	ret = strm->substream && strm->substream->runtime;
208 	spin_unlock_irqrestore(&ssi->lock, flags);
209 
210 	return ret;
211 }
212 
213 static void rz_ssi_stream_init(struct rz_ssi_stream *strm,
214 			       struct snd_pcm_substream *substream)
215 {
216 	struct snd_pcm_runtime *runtime = substream->runtime;
217 
218 	rz_ssi_set_substream(strm, substream);
219 	strm->sample_width = samples_to_bytes(runtime, 1);
220 	strm->dma_buffer_pos = 0;
221 	strm->period_counter = 0;
222 	strm->buffer_pos = 0;
223 
224 	strm->oerr_num = 0;
225 	strm->uerr_num = 0;
226 	strm->running = 0;
227 
228 	/* fifo init */
229 	strm->fifo_sample_size = SSI_FIFO_DEPTH;
230 }
231 
232 static void rz_ssi_stream_quit(struct rz_ssi_priv *ssi,
233 			       struct rz_ssi_stream *strm)
234 {
235 	struct snd_soc_dai *dai = rz_ssi_get_dai(strm->substream);
236 
237 	rz_ssi_set_substream(strm, NULL);
238 
239 	if (strm->oerr_num > 0)
240 		dev_info(dai->dev, "overrun = %d\n", strm->oerr_num);
241 
242 	if (strm->uerr_num > 0)
243 		dev_info(dai->dev, "underrun = %d\n", strm->uerr_num);
244 }
245 
246 static int rz_ssi_clk_setup(struct rz_ssi_priv *ssi, unsigned int rate,
247 			    unsigned int channels)
248 {
249 	static s8 ckdv[16] = { 1,  2,  4,  8, 16, 32, 64, 128,
250 			       6, 12, 24, 48, 96, -1, -1, -1 };
251 	unsigned int channel_bits = 32;	/* System Word Length */
252 	unsigned long bclk_rate = rate * channels * channel_bits;
253 	unsigned int div;
254 	unsigned int i;
255 	u32 ssicr = 0;
256 	u32 clk_ckdv;
257 
258 	/* Clear AUCKE so we can set MST */
259 	rz_ssi_reg_writel(ssi, SSIFCR, 0);
260 
261 	/* Continue to output LRCK pin even when idle */
262 	rz_ssi_reg_writel(ssi, SSIOFR, SSIOFR_LRCONT);
263 	if (ssi->audio_clk_1 && ssi->audio_clk_2) {
264 		if (ssi->audio_clk_1 % bclk_rate)
265 			ssi->audio_mck = ssi->audio_clk_2;
266 		else
267 			ssi->audio_mck = ssi->audio_clk_1;
268 	}
269 
270 	/* Clock setting */
271 	ssicr |= SSICR_MST;
272 	if (ssi->audio_mck == ssi->audio_clk_1)
273 		ssicr |= SSICR_CKS;
274 	if (ssi->bckp_rise)
275 		ssicr |= SSICR_BCKP;
276 	if (ssi->lrckp_fsync_fall)
277 		ssicr |= SSICR_LRCKP;
278 
279 	/* Determine the clock divider */
280 	clk_ckdv = 0;
281 	div = ssi->audio_mck / bclk_rate;
282 	/* try to find an match */
283 	for (i = 0; i < ARRAY_SIZE(ckdv); i++) {
284 		if (ckdv[i] == div) {
285 			clk_ckdv = i;
286 			break;
287 		}
288 	}
289 
290 	if (i == ARRAY_SIZE(ckdv)) {
291 		dev_err(ssi->dev, "Rate not divisible by audio clock source\n");
292 		return -EINVAL;
293 	}
294 
295 	/*
296 	 * DWL: Data Word Length = 16 bits
297 	 * SWL: System Word Length = 32 bits
298 	 */
299 	ssicr |= SSICR_CKDV(clk_ckdv);
300 	ssicr |= SSICR_DWL(1) | SSICR_SWL(3);
301 	rz_ssi_reg_writel(ssi, SSICR, ssicr);
302 	rz_ssi_reg_writel(ssi, SSIFCR,
303 			  (SSIFCR_AUCKE | SSIFCR_TFRST | SSIFCR_RFRST));
304 
305 	return 0;
306 }
307 
308 static int rz_ssi_start(struct rz_ssi_priv *ssi, struct rz_ssi_stream *strm)
309 {
310 	bool is_play = rz_ssi_stream_is_play(ssi, strm->substream);
311 	u32 ssicr, ssifcr;
312 
313 	ssicr = rz_ssi_reg_readl(ssi, SSICR);
314 	ssifcr = rz_ssi_reg_readl(ssi, SSIFCR) & ~0xF;
315 
316 	/* FIFO interrupt thresholds */
317 	if (rz_ssi_is_dma_enabled(ssi))
318 		rz_ssi_reg_writel(ssi, SSISCR, 0);
319 	else
320 		rz_ssi_reg_writel(ssi, SSISCR,
321 				  SSISCR_TDES(strm->fifo_sample_size / 2 - 1) |
322 				  SSISCR_RDFS(0));
323 
324 	/* enable IRQ */
325 	if (is_play) {
326 		ssicr |= SSICR_TUIEN | SSICR_TOIEN;
327 		ssifcr |= SSIFCR_TIE | SSIFCR_RFRST;
328 	} else {
329 		ssicr |= SSICR_RUIEN | SSICR_ROIEN;
330 		ssifcr |= SSIFCR_RIE | SSIFCR_TFRST;
331 	}
332 
333 	rz_ssi_reg_writel(ssi, SSICR, ssicr);
334 	rz_ssi_reg_writel(ssi, SSIFCR, ssifcr);
335 
336 	/* Clear all error flags */
337 	rz_ssi_reg_mask_setl(ssi, SSISR,
338 			     (SSISR_TOIRQ | SSISR_TUIRQ | SSISR_ROIRQ |
339 			      SSISR_RUIRQ), 0);
340 
341 	strm->running = 1;
342 	ssicr |= is_play ? SSICR_TEN : SSICR_REN;
343 	rz_ssi_reg_writel(ssi, SSICR, ssicr);
344 
345 	return 0;
346 }
347 
348 static int rz_ssi_stop(struct rz_ssi_priv *ssi, struct rz_ssi_stream *strm)
349 {
350 	int timeout;
351 
352 	strm->running = 0;
353 
354 	/* Disable TX/RX */
355 	rz_ssi_reg_mask_setl(ssi, SSICR, SSICR_TEN | SSICR_REN, 0);
356 
357 	/* Cancel all remaining DMA transactions */
358 	if (rz_ssi_is_dma_enabled(ssi))
359 		dmaengine_terminate_async(strm->dma_ch);
360 
361 	/* Disable irqs */
362 	rz_ssi_reg_mask_setl(ssi, SSICR, SSICR_TUIEN | SSICR_TOIEN |
363 			     SSICR_RUIEN | SSICR_ROIEN, 0);
364 	rz_ssi_reg_mask_setl(ssi, SSIFCR, SSIFCR_TIE | SSIFCR_RIE, 0);
365 
366 	/* Clear all error flags */
367 	rz_ssi_reg_mask_setl(ssi, SSISR,
368 			     (SSISR_TOIRQ | SSISR_TUIRQ | SSISR_ROIRQ |
369 			      SSISR_RUIRQ), 0);
370 
371 	/* Wait for idle */
372 	timeout = 100;
373 	while (--timeout) {
374 		if (rz_ssi_reg_readl(ssi, SSISR) & SSISR_IIRQ)
375 			break;
376 		udelay(1);
377 	}
378 
379 	if (!timeout)
380 		dev_info(ssi->dev, "timeout waiting for SSI idle\n");
381 
382 	/* Hold FIFOs in reset */
383 	rz_ssi_reg_mask_setl(ssi, SSIFCR, 0,
384 			     SSIFCR_TFRST | SSIFCR_RFRST);
385 
386 	return 0;
387 }
388 
389 static void rz_ssi_pointer_update(struct rz_ssi_stream *strm, int frames)
390 {
391 	struct snd_pcm_substream *substream = strm->substream;
392 	struct snd_pcm_runtime *runtime;
393 	int current_period;
394 
395 	if (!strm->running || !substream || !substream->runtime)
396 		return;
397 
398 	runtime = substream->runtime;
399 	strm->buffer_pos += frames;
400 	WARN_ON(strm->buffer_pos > runtime->buffer_size);
401 
402 	/* ring buffer */
403 	if (strm->buffer_pos == runtime->buffer_size)
404 		strm->buffer_pos = 0;
405 
406 	current_period = strm->buffer_pos / runtime->period_size;
407 	if (strm->period_counter != current_period) {
408 		snd_pcm_period_elapsed(strm->substream);
409 		strm->period_counter = current_period;
410 	}
411 }
412 
413 static int rz_ssi_pio_recv(struct rz_ssi_priv *ssi, struct rz_ssi_stream *strm)
414 {
415 	struct snd_pcm_substream *substream = strm->substream;
416 	struct snd_pcm_runtime *runtime;
417 	u16 *buf;
418 	int fifo_samples;
419 	int frames_left;
420 	int samples;
421 	int i;
422 
423 	if (!rz_ssi_stream_is_valid(ssi, strm))
424 		return -EINVAL;
425 
426 	runtime = substream->runtime;
427 
428 	do {
429 		/* frames left in this period */
430 		frames_left = runtime->period_size -
431 			      (strm->buffer_pos % runtime->period_size);
432 		if (!frames_left)
433 			frames_left = runtime->period_size;
434 
435 		/* Samples in RX FIFO */
436 		fifo_samples = (rz_ssi_reg_readl(ssi, SSIFSR) >>
437 				SSIFSR_RDC_SHIFT) & SSIFSR_RDC_MASK;
438 
439 		/* Only read full frames at a time */
440 		samples = 0;
441 		while (frames_left && (fifo_samples >= runtime->channels)) {
442 			samples += runtime->channels;
443 			fifo_samples -= runtime->channels;
444 			frames_left--;
445 		}
446 
447 		/* not enough samples yet */
448 		if (!samples)
449 			break;
450 
451 		/* calculate new buffer index */
452 		buf = (u16 *)runtime->dma_area;
453 		buf += strm->buffer_pos * runtime->channels;
454 
455 		/* Note, only supports 16-bit samples */
456 		for (i = 0; i < samples; i++)
457 			*buf++ = (u16)(rz_ssi_reg_readl(ssi, SSIFRDR) >> 16);
458 
459 		rz_ssi_reg_mask_setl(ssi, SSIFSR, SSIFSR_RDF, 0);
460 		rz_ssi_pointer_update(strm, samples / runtime->channels);
461 	} while (!frames_left && fifo_samples >= runtime->channels);
462 
463 	return 0;
464 }
465 
466 static int rz_ssi_pio_send(struct rz_ssi_priv *ssi, struct rz_ssi_stream *strm)
467 {
468 	struct snd_pcm_substream *substream = strm->substream;
469 	struct snd_pcm_runtime *runtime = substream->runtime;
470 	int sample_space;
471 	int samples = 0;
472 	int frames_left;
473 	int i;
474 	u32 ssifsr;
475 	u16 *buf;
476 
477 	if (!rz_ssi_stream_is_valid(ssi, strm))
478 		return -EINVAL;
479 
480 	/* frames left in this period */
481 	frames_left = runtime->period_size - (strm->buffer_pos %
482 					      runtime->period_size);
483 	if (frames_left == 0)
484 		frames_left = runtime->period_size;
485 
486 	sample_space = strm->fifo_sample_size;
487 	ssifsr = rz_ssi_reg_readl(ssi, SSIFSR);
488 	sample_space -= (ssifsr >> SSIFSR_TDC_SHIFT) & SSIFSR_TDC_MASK;
489 
490 	/* Only add full frames at a time */
491 	while (frames_left && (sample_space >= runtime->channels)) {
492 		samples += runtime->channels;
493 		sample_space -= runtime->channels;
494 		frames_left--;
495 	}
496 
497 	/* no space to send anything right now */
498 	if (samples == 0)
499 		return 0;
500 
501 	/* calculate new buffer index */
502 	buf = (u16 *)(runtime->dma_area);
503 	buf += strm->buffer_pos * runtime->channels;
504 
505 	/* Note, only supports 16-bit samples */
506 	for (i = 0; i < samples; i++)
507 		rz_ssi_reg_writel(ssi, SSIFTDR, ((u32)(*buf++) << 16));
508 
509 	rz_ssi_reg_mask_setl(ssi, SSIFSR, SSIFSR_TDE, 0);
510 	rz_ssi_pointer_update(strm, samples / runtime->channels);
511 
512 	return 0;
513 }
514 
515 static irqreturn_t rz_ssi_interrupt(int irq, void *data)
516 {
517 	struct rz_ssi_stream *strm = NULL;
518 	struct rz_ssi_priv *ssi = data;
519 	u32 ssisr = rz_ssi_reg_readl(ssi, SSISR);
520 
521 	if (ssi->playback.substream)
522 		strm = &ssi->playback;
523 	else if (ssi->capture.substream)
524 		strm = &ssi->capture;
525 	else
526 		return IRQ_HANDLED; /* Left over TX/RX interrupt */
527 
528 	if (irq == ssi->irq_int) { /* error or idle */
529 		if (ssisr & SSISR_TUIRQ)
530 			strm->uerr_num++;
531 		if (ssisr & SSISR_TOIRQ)
532 			strm->oerr_num++;
533 		if (ssisr & SSISR_RUIRQ)
534 			strm->uerr_num++;
535 		if (ssisr & SSISR_ROIRQ)
536 			strm->oerr_num++;
537 
538 		if (ssisr & (SSISR_TUIRQ | SSISR_TOIRQ | SSISR_RUIRQ |
539 			     SSISR_ROIRQ)) {
540 			/* Error handling */
541 			/* You must reset (stop/restart) after each interrupt */
542 			rz_ssi_stop(ssi, strm);
543 
544 			/* Clear all flags */
545 			rz_ssi_reg_mask_setl(ssi, SSISR, SSISR_TOIRQ |
546 					     SSISR_TUIRQ | SSISR_ROIRQ |
547 					     SSISR_RUIRQ, 0);
548 
549 			/* Add/remove more data */
550 			strm->transfer(ssi, strm);
551 
552 			/* Resume */
553 			rz_ssi_start(ssi, strm);
554 		}
555 	}
556 
557 	if (!strm->running)
558 		return IRQ_HANDLED;
559 
560 	/* tx data empty */
561 	if (irq == ssi->irq_tx)
562 		strm->transfer(ssi, &ssi->playback);
563 
564 	/* rx data full */
565 	if (irq == ssi->irq_rx) {
566 		strm->transfer(ssi, &ssi->capture);
567 		rz_ssi_reg_mask_setl(ssi, SSIFSR, SSIFSR_RDF, 0);
568 	}
569 
570 	return IRQ_HANDLED;
571 }
572 
573 static int rz_ssi_dma_slave_config(struct rz_ssi_priv *ssi,
574 				   struct dma_chan *dma_ch, bool is_play)
575 {
576 	struct dma_slave_config cfg;
577 
578 	memset(&cfg, 0, sizeof(cfg));
579 
580 	cfg.direction = is_play ? DMA_MEM_TO_DEV : DMA_DEV_TO_MEM;
581 	cfg.dst_addr = ssi->phys + SSIFTDR;
582 	cfg.src_addr = ssi->phys + SSIFRDR;
583 	cfg.src_addr_width = DMA_SLAVE_BUSWIDTH_2_BYTES;
584 	cfg.dst_addr_width = DMA_SLAVE_BUSWIDTH_2_BYTES;
585 
586 	return dmaengine_slave_config(dma_ch, &cfg);
587 }
588 
589 static int rz_ssi_dma_transfer(struct rz_ssi_priv *ssi,
590 			       struct rz_ssi_stream *strm)
591 {
592 	struct snd_pcm_substream *substream = strm->substream;
593 	struct dma_async_tx_descriptor *desc;
594 	struct snd_pcm_runtime *runtime;
595 	enum dma_transfer_direction dir;
596 	u32 dma_paddr, dma_size;
597 	int amount;
598 
599 	if (!rz_ssi_stream_is_valid(ssi, strm))
600 		return -EINVAL;
601 
602 	runtime = substream->runtime;
603 	if (runtime->status->state == SNDRV_PCM_STATE_DRAINING)
604 		/*
605 		 * Stream is ending, so do not queue up any more DMA
606 		 * transfers otherwise we play partial sound clips
607 		 * because we can't shut off the DMA quick enough.
608 		 */
609 		return 0;
610 
611 	dir = rz_ssi_stream_is_play(ssi, substream) ? DMA_MEM_TO_DEV : DMA_DEV_TO_MEM;
612 
613 	/* Always transfer 1 period */
614 	amount = runtime->period_size;
615 
616 	/* DMA physical address and size */
617 	dma_paddr = runtime->dma_addr + frames_to_bytes(runtime,
618 							strm->dma_buffer_pos);
619 	dma_size = frames_to_bytes(runtime, amount);
620 	desc = dmaengine_prep_slave_single(strm->dma_ch, dma_paddr, dma_size,
621 					   dir,
622 					   DMA_PREP_INTERRUPT | DMA_CTRL_ACK);
623 	if (!desc) {
624 		dev_err(ssi->dev, "dmaengine_prep_slave_single() fail\n");
625 		return -ENOMEM;
626 	}
627 
628 	desc->callback = rz_ssi_dma_complete;
629 	desc->callback_param = strm;
630 
631 	if (dmaengine_submit(desc) < 0) {
632 		dev_err(ssi->dev, "dmaengine_submit() fail\n");
633 		return -EIO;
634 	}
635 
636 	/* Update DMA pointer */
637 	strm->dma_buffer_pos += amount;
638 	if (strm->dma_buffer_pos >= runtime->buffer_size)
639 		strm->dma_buffer_pos = 0;
640 
641 	/* Start DMA */
642 	dma_async_issue_pending(strm->dma_ch);
643 
644 	return 0;
645 }
646 
647 static void rz_ssi_dma_complete(void *data)
648 {
649 	struct rz_ssi_stream *strm = (struct rz_ssi_stream *)data;
650 
651 	if (!strm->running || !strm->substream || !strm->substream->runtime)
652 		return;
653 
654 	/* Note that next DMA transaction has probably already started */
655 	rz_ssi_pointer_update(strm, strm->substream->runtime->period_size);
656 
657 	/* Queue up another DMA transaction */
658 	rz_ssi_dma_transfer(strm->priv, strm);
659 }
660 
661 static void rz_ssi_release_dma_channels(struct rz_ssi_priv *ssi)
662 {
663 	if (ssi->playback.dma_ch) {
664 		dma_release_channel(ssi->playback.dma_ch);
665 		ssi->playback.dma_ch = NULL;
666 		if (ssi->dma_rt)
667 			ssi->dma_rt = false;
668 	}
669 
670 	if (ssi->capture.dma_ch) {
671 		dma_release_channel(ssi->capture.dma_ch);
672 		ssi->capture.dma_ch = NULL;
673 	}
674 }
675 
676 static int rz_ssi_dma_request(struct rz_ssi_priv *ssi, struct device *dev)
677 {
678 	ssi->playback.dma_ch = dma_request_chan(dev, "tx");
679 	if (IS_ERR(ssi->playback.dma_ch))
680 		ssi->playback.dma_ch = NULL;
681 
682 	ssi->capture.dma_ch = dma_request_chan(dev, "rx");
683 	if (IS_ERR(ssi->capture.dma_ch))
684 		ssi->capture.dma_ch = NULL;
685 
686 	if (!ssi->playback.dma_ch && !ssi->capture.dma_ch) {
687 		ssi->playback.dma_ch = dma_request_chan(dev, "rt");
688 		if (IS_ERR(ssi->playback.dma_ch)) {
689 			ssi->playback.dma_ch = NULL;
690 			goto no_dma;
691 		}
692 
693 		ssi->dma_rt = true;
694 	}
695 
696 	if (!rz_ssi_is_dma_enabled(ssi))
697 		goto no_dma;
698 
699 	if (ssi->playback.dma_ch &&
700 	    (rz_ssi_dma_slave_config(ssi, ssi->playback.dma_ch, true) < 0))
701 		goto no_dma;
702 
703 	if (ssi->capture.dma_ch &&
704 	    (rz_ssi_dma_slave_config(ssi, ssi->capture.dma_ch, false) < 0))
705 		goto no_dma;
706 
707 	return 0;
708 
709 no_dma:
710 	rz_ssi_release_dma_channels(ssi);
711 
712 	return -ENODEV;
713 }
714 
715 static int rz_ssi_dai_trigger(struct snd_pcm_substream *substream, int cmd,
716 			      struct snd_soc_dai *dai)
717 {
718 	struct rz_ssi_priv *ssi = snd_soc_dai_get_drvdata(dai);
719 	struct rz_ssi_stream *strm = rz_ssi_stream_get(ssi, substream);
720 	int ret = 0, i, num_transfer = 1;
721 
722 	switch (cmd) {
723 	case SNDRV_PCM_TRIGGER_START:
724 		/* Soft Reset */
725 		rz_ssi_reg_mask_setl(ssi, SSIFCR, 0, SSIFCR_SSIRST);
726 		rz_ssi_reg_mask_setl(ssi, SSIFCR, SSIFCR_SSIRST, 0);
727 		udelay(5);
728 
729 		rz_ssi_stream_init(strm, substream);
730 
731 		if (ssi->dma_rt) {
732 			bool is_playback;
733 
734 			is_playback = rz_ssi_stream_is_play(ssi, substream);
735 			ret = rz_ssi_dma_slave_config(ssi, ssi->playback.dma_ch,
736 						      is_playback);
737 			/* Fallback to pio */
738 			if (ret < 0) {
739 				ssi->playback.transfer = rz_ssi_pio_send;
740 				ssi->capture.transfer = rz_ssi_pio_recv;
741 				rz_ssi_release_dma_channels(ssi);
742 			}
743 		}
744 
745 		/* For DMA, queue up multiple DMA descriptors */
746 		if (rz_ssi_is_dma_enabled(ssi))
747 			num_transfer = 4;
748 
749 		for (i = 0; i < num_transfer; i++) {
750 			ret = strm->transfer(ssi, strm);
751 			if (ret)
752 				goto done;
753 		}
754 
755 		ret = rz_ssi_start(ssi, strm);
756 		break;
757 	case SNDRV_PCM_TRIGGER_STOP:
758 		rz_ssi_stop(ssi, strm);
759 		rz_ssi_stream_quit(ssi, strm);
760 		break;
761 	}
762 
763 done:
764 	return ret;
765 }
766 
767 static int rz_ssi_dai_set_fmt(struct snd_soc_dai *dai, unsigned int fmt)
768 {
769 	struct rz_ssi_priv *ssi = snd_soc_dai_get_drvdata(dai);
770 
771 	switch (fmt & SND_SOC_DAIFMT_CLOCK_PROVIDER_MASK) {
772 	case SND_SOC_DAIFMT_CBC_CFC:
773 		break;
774 	default:
775 		dev_err(ssi->dev, "Codec should be clk and frame consumer\n");
776 		return -EINVAL;
777 	}
778 
779 	/*
780 	 * set clock polarity
781 	 *
782 	 * "normal" BCLK = Signal is available at rising edge of BCLK
783 	 * "normal" FSYNC = (I2S) Left ch starts with falling FSYNC edge
784 	 */
785 	switch (fmt & SND_SOC_DAIFMT_INV_MASK) {
786 	case SND_SOC_DAIFMT_NB_NF:
787 		ssi->bckp_rise = false;
788 		ssi->lrckp_fsync_fall = false;
789 		break;
790 	case SND_SOC_DAIFMT_NB_IF:
791 		ssi->bckp_rise = false;
792 		ssi->lrckp_fsync_fall = true;
793 		break;
794 	case SND_SOC_DAIFMT_IB_NF:
795 		ssi->bckp_rise = true;
796 		ssi->lrckp_fsync_fall = false;
797 		break;
798 	case SND_SOC_DAIFMT_IB_IF:
799 		ssi->bckp_rise = true;
800 		ssi->lrckp_fsync_fall = true;
801 		break;
802 	default:
803 		return -EINVAL;
804 	}
805 
806 	/* only i2s support */
807 	switch (fmt & SND_SOC_DAIFMT_FORMAT_MASK) {
808 	case SND_SOC_DAIFMT_I2S:
809 		break;
810 	default:
811 		dev_err(ssi->dev, "Only I2S mode is supported.\n");
812 		return -EINVAL;
813 	}
814 
815 	return 0;
816 }
817 
818 static int rz_ssi_dai_hw_params(struct snd_pcm_substream *substream,
819 				struct snd_pcm_hw_params *params,
820 				struct snd_soc_dai *dai)
821 {
822 	struct rz_ssi_priv *ssi = snd_soc_dai_get_drvdata(dai);
823 	unsigned int sample_bits = hw_param_interval(params,
824 					SNDRV_PCM_HW_PARAM_SAMPLE_BITS)->min;
825 	unsigned int channels = params_channels(params);
826 
827 	if (sample_bits != 16) {
828 		dev_err(ssi->dev, "Unsupported sample width: %d\n",
829 			sample_bits);
830 		return -EINVAL;
831 	}
832 
833 	if (channels != 2) {
834 		dev_err(ssi->dev, "Number of channels not matched: %d\n",
835 			channels);
836 		return -EINVAL;
837 	}
838 
839 	return rz_ssi_clk_setup(ssi, params_rate(params),
840 				params_channels(params));
841 }
842 
843 static const struct snd_soc_dai_ops rz_ssi_dai_ops = {
844 	.trigger	= rz_ssi_dai_trigger,
845 	.set_fmt	= rz_ssi_dai_set_fmt,
846 	.hw_params	= rz_ssi_dai_hw_params,
847 };
848 
849 static const struct snd_pcm_hardware rz_ssi_pcm_hardware = {
850 	.info			= SNDRV_PCM_INFO_INTERLEAVED	|
851 				  SNDRV_PCM_INFO_MMAP		|
852 				  SNDRV_PCM_INFO_MMAP_VALID,
853 	.buffer_bytes_max	= PREALLOC_BUFFER,
854 	.period_bytes_min	= 32,
855 	.period_bytes_max	= 8192,
856 	.channels_min		= SSI_CHAN_MIN,
857 	.channels_max		= SSI_CHAN_MAX,
858 	.periods_min		= 1,
859 	.periods_max		= 32,
860 	.fifo_size		= 32 * 2,
861 };
862 
863 static int rz_ssi_pcm_open(struct snd_soc_component *component,
864 			   struct snd_pcm_substream *substream)
865 {
866 	snd_soc_set_runtime_hwparams(substream, &rz_ssi_pcm_hardware);
867 
868 	return snd_pcm_hw_constraint_integer(substream->runtime,
869 					    SNDRV_PCM_HW_PARAM_PERIODS);
870 }
871 
872 static snd_pcm_uframes_t rz_ssi_pcm_pointer(struct snd_soc_component *component,
873 					    struct snd_pcm_substream *substream)
874 {
875 	struct snd_soc_dai *dai = rz_ssi_get_dai(substream);
876 	struct rz_ssi_priv *ssi = snd_soc_dai_get_drvdata(dai);
877 	struct rz_ssi_stream *strm = rz_ssi_stream_get(ssi, substream);
878 
879 	return strm->buffer_pos;
880 }
881 
882 static int rz_ssi_pcm_new(struct snd_soc_component *component,
883 			  struct snd_soc_pcm_runtime *rtd)
884 {
885 	snd_pcm_set_managed_buffer_all(rtd->pcm, SNDRV_DMA_TYPE_DEV,
886 				       rtd->card->snd_card->dev,
887 				       PREALLOC_BUFFER, PREALLOC_BUFFER_MAX);
888 	return 0;
889 }
890 
891 static struct snd_soc_dai_driver rz_ssi_soc_dai[] = {
892 	{
893 		.name			= "rz-ssi-dai",
894 		.playback = {
895 			.rates		= SSI_RATES,
896 			.formats	= SSI_FMTS,
897 			.channels_min	= SSI_CHAN_MIN,
898 			.channels_max	= SSI_CHAN_MAX,
899 		},
900 		.capture = {
901 			.rates		= SSI_RATES,
902 			.formats	= SSI_FMTS,
903 			.channels_min	= SSI_CHAN_MIN,
904 			.channels_max	= SSI_CHAN_MAX,
905 		},
906 		.ops = &rz_ssi_dai_ops,
907 	},
908 };
909 
910 static const struct snd_soc_component_driver rz_ssi_soc_component = {
911 	.name		= "rz-ssi",
912 	.open		= rz_ssi_pcm_open,
913 	.pointer	= rz_ssi_pcm_pointer,
914 	.pcm_construct	= rz_ssi_pcm_new,
915 };
916 
917 static int rz_ssi_probe(struct platform_device *pdev)
918 {
919 	struct rz_ssi_priv *ssi;
920 	struct clk *audio_clk;
921 	struct resource *res;
922 	int ret;
923 
924 	ssi = devm_kzalloc(&pdev->dev, sizeof(*ssi), GFP_KERNEL);
925 	if (!ssi)
926 		return -ENOMEM;
927 
928 	ssi->pdev = pdev;
929 	ssi->dev = &pdev->dev;
930 	ssi->base = devm_platform_get_and_ioremap_resource(pdev, 0, &res);
931 	if (IS_ERR(ssi->base))
932 		return PTR_ERR(ssi->base);
933 
934 	ssi->phys = res->start;
935 	ssi->clk = devm_clk_get(&pdev->dev, "ssi");
936 	if (IS_ERR(ssi->clk))
937 		return PTR_ERR(ssi->clk);
938 
939 	ssi->sfr_clk = devm_clk_get(&pdev->dev, "ssi_sfr");
940 	if (IS_ERR(ssi->sfr_clk))
941 		return PTR_ERR(ssi->sfr_clk);
942 
943 	audio_clk = devm_clk_get(&pdev->dev, "audio_clk1");
944 	if (IS_ERR(audio_clk))
945 		return dev_err_probe(&pdev->dev, PTR_ERR(audio_clk),
946 				     "no audio clk1");
947 
948 	ssi->audio_clk_1 = clk_get_rate(audio_clk);
949 	audio_clk = devm_clk_get(&pdev->dev, "audio_clk2");
950 	if (IS_ERR(audio_clk))
951 		return dev_err_probe(&pdev->dev, PTR_ERR(audio_clk),
952 				     "no audio clk2");
953 
954 	ssi->audio_clk_2 = clk_get_rate(audio_clk);
955 	if (!(ssi->audio_clk_1 || ssi->audio_clk_2))
956 		return dev_err_probe(&pdev->dev, -EINVAL,
957 				     "no audio clk1 or audio clk2");
958 
959 	ssi->audio_mck = ssi->audio_clk_1 ? ssi->audio_clk_1 : ssi->audio_clk_2;
960 
961 	/* Detect DMA support */
962 	ret = rz_ssi_dma_request(ssi, &pdev->dev);
963 	if (ret < 0) {
964 		dev_warn(&pdev->dev, "DMA not available, using PIO\n");
965 		ssi->playback.transfer = rz_ssi_pio_send;
966 		ssi->capture.transfer = rz_ssi_pio_recv;
967 	} else {
968 		dev_info(&pdev->dev, "DMA enabled");
969 		ssi->playback.transfer = rz_ssi_dma_transfer;
970 		ssi->capture.transfer = rz_ssi_dma_transfer;
971 	}
972 
973 	ssi->playback.priv = ssi;
974 	ssi->capture.priv = ssi;
975 
976 	spin_lock_init(&ssi->lock);
977 	dev_set_drvdata(&pdev->dev, ssi);
978 
979 	/* Error Interrupt */
980 	ssi->irq_int = platform_get_irq_byname(pdev, "int_req");
981 	if (ssi->irq_int < 0)
982 		return dev_err_probe(&pdev->dev, -ENODEV,
983 				     "Unable to get SSI int_req IRQ\n");
984 
985 	ret = devm_request_irq(&pdev->dev, ssi->irq_int, &rz_ssi_interrupt,
986 			       0, dev_name(&pdev->dev), ssi);
987 	if (ret < 0)
988 		return dev_err_probe(&pdev->dev, ret,
989 				     "irq request error (int_req)\n");
990 
991 	if (!rz_ssi_is_dma_enabled(ssi)) {
992 		/* Tx and Rx interrupts (pio only) */
993 		ssi->irq_tx = platform_get_irq_byname(pdev, "dma_tx");
994 		if (ssi->irq_tx < 0)
995 			return dev_err_probe(&pdev->dev, -ENODEV,
996 					     "Unable to get SSI dma_tx IRQ\n");
997 
998 		ret = devm_request_irq(&pdev->dev, ssi->irq_tx,
999 				       &rz_ssi_interrupt, 0,
1000 				       dev_name(&pdev->dev), ssi);
1001 		if (ret < 0)
1002 			return dev_err_probe(&pdev->dev, ret,
1003 					     "irq request error (dma_tx)\n");
1004 
1005 		ssi->irq_rx = platform_get_irq_byname(pdev, "dma_rx");
1006 		if (ssi->irq_rx < 0)
1007 			return dev_err_probe(&pdev->dev, -ENODEV,
1008 					     "Unable to get SSI dma_rx IRQ\n");
1009 
1010 		ret = devm_request_irq(&pdev->dev, ssi->irq_rx,
1011 				       &rz_ssi_interrupt, 0,
1012 				       dev_name(&pdev->dev), ssi);
1013 		if (ret < 0)
1014 			return dev_err_probe(&pdev->dev, ret,
1015 					     "irq request error (dma_rx)\n");
1016 	}
1017 
1018 	ssi->rstc = devm_reset_control_get_exclusive(&pdev->dev, NULL);
1019 	if (IS_ERR(ssi->rstc))
1020 		return PTR_ERR(ssi->rstc);
1021 
1022 	reset_control_deassert(ssi->rstc);
1023 	pm_runtime_enable(&pdev->dev);
1024 	ret = pm_runtime_resume_and_get(&pdev->dev);
1025 	if (ret < 0) {
1026 		pm_runtime_disable(ssi->dev);
1027 		reset_control_assert(ssi->rstc);
1028 		return dev_err_probe(ssi->dev, ret, "pm_runtime_resume_and_get failed\n");
1029 	}
1030 
1031 	ret = devm_snd_soc_register_component(&pdev->dev, &rz_ssi_soc_component,
1032 					      rz_ssi_soc_dai,
1033 					      ARRAY_SIZE(rz_ssi_soc_dai));
1034 	if (ret < 0) {
1035 		rz_ssi_release_dma_channels(ssi);
1036 
1037 		pm_runtime_put(ssi->dev);
1038 		pm_runtime_disable(ssi->dev);
1039 		reset_control_assert(ssi->rstc);
1040 		dev_err(&pdev->dev, "failed to register snd component\n");
1041 	}
1042 
1043 	return ret;
1044 }
1045 
1046 static int rz_ssi_remove(struct platform_device *pdev)
1047 {
1048 	struct rz_ssi_priv *ssi = dev_get_drvdata(&pdev->dev);
1049 
1050 	rz_ssi_release_dma_channels(ssi);
1051 
1052 	pm_runtime_put(ssi->dev);
1053 	pm_runtime_disable(ssi->dev);
1054 	reset_control_assert(ssi->rstc);
1055 
1056 	return 0;
1057 }
1058 
1059 static const struct of_device_id rz_ssi_of_match[] = {
1060 	{ .compatible = "renesas,rz-ssi", },
1061 	{/* Sentinel */},
1062 };
1063 MODULE_DEVICE_TABLE(of, rz_ssi_of_match);
1064 
1065 static struct platform_driver rz_ssi_driver = {
1066 	.driver	= {
1067 		.name	= "rz-ssi-pcm-audio",
1068 		.of_match_table = rz_ssi_of_match,
1069 	},
1070 	.probe		= rz_ssi_probe,
1071 	.remove		= rz_ssi_remove,
1072 };
1073 
1074 module_platform_driver(rz_ssi_driver);
1075 
1076 MODULE_LICENSE("GPL v2");
1077 MODULE_DESCRIPTION("Renesas RZ/G2L ASoC Serial Sound Interface Driver");
1078 MODULE_AUTHOR("Biju Das <biju.das.jz@bp.renesas.com>");
1079