xref: /linux/sound/soc/samsung/idma.c (revision ca55b2fef3a9373fcfc30f82fd26bc7fccbda732)
1 /*
2  * sound/soc/samsung/idma.c
3  *
4  * Copyright (c) 2011 Samsung Electronics Co., Ltd.
5  *		http://www.samsung.com
6  *
7  * I2S0's Internal DMA driver
8  *
9  * This program is free software; you can redistribute  it and/or modify it
10  * under  the terms of  the GNU General  Public License as published by the
11  * Free Software Foundation;  either version 2 of the  License, or (at your
12  * option) any later version.
13  */
14 #include <linux/interrupt.h>
15 #include <linux/platform_device.h>
16 #include <linux/dma-mapping.h>
17 #include <linux/slab.h>
18 #include <linux/module.h>
19 #include <sound/pcm.h>
20 #include <sound/pcm_params.h>
21 #include <sound/soc.h>
22 
23 #include "i2s.h"
24 #include "idma.h"
25 #include "dma.h"
26 #include "i2s-regs.h"
27 
28 #define ST_RUNNING		(1<<0)
29 #define ST_OPENED		(1<<1)
30 
31 static const struct snd_pcm_hardware idma_hardware = {
32 	.info = SNDRV_PCM_INFO_INTERLEAVED |
33 		    SNDRV_PCM_INFO_BLOCK_TRANSFER |
34 		    SNDRV_PCM_INFO_MMAP |
35 		    SNDRV_PCM_INFO_MMAP_VALID |
36 		    SNDRV_PCM_INFO_PAUSE |
37 		    SNDRV_PCM_INFO_RESUME,
38 	.buffer_bytes_max = MAX_IDMA_BUFFER,
39 	.period_bytes_min = 128,
40 	.period_bytes_max = MAX_IDMA_PERIOD,
41 	.periods_min = 1,
42 	.periods_max = 2,
43 };
44 
45 struct idma_ctrl {
46 	spinlock_t	lock;
47 	int		state;
48 	dma_addr_t	start;
49 	dma_addr_t	pos;
50 	dma_addr_t	end;
51 	dma_addr_t	period;
52 	dma_addr_t	periodsz;
53 	void		*token;
54 	void		(*cb)(void *dt, int bytes_xfer);
55 };
56 
57 static struct idma_info {
58 	spinlock_t	lock;
59 	void		 __iomem  *regs;
60 	dma_addr_t	lp_tx_addr;
61 } idma;
62 
63 static int idma_irq;
64 
65 static void idma_getpos(dma_addr_t *src)
66 {
67 	*src = idma.lp_tx_addr +
68 		(readl(idma.regs + I2STRNCNT) & 0xffffff) * 4;
69 }
70 
71 static int idma_enqueue(struct snd_pcm_substream *substream)
72 {
73 	struct snd_pcm_runtime *runtime = substream->runtime;
74 	struct idma_ctrl *prtd = substream->runtime->private_data;
75 	u32 val;
76 
77 	spin_lock(&prtd->lock);
78 	prtd->token = (void *) substream;
79 	spin_unlock(&prtd->lock);
80 
81 	/* Internal DMA Level0 Interrupt Address */
82 	val = idma.lp_tx_addr + prtd->periodsz;
83 	writel(val, idma.regs + I2SLVL0ADDR);
84 
85 	/* Start address0 of I2S internal DMA operation. */
86 	val = idma.lp_tx_addr;
87 	writel(val, idma.regs + I2SSTR0);
88 
89 	/*
90 	 * Transfer block size for I2S internal DMA.
91 	 * Should decide transfer size before start dma operation
92 	 */
93 	val = readl(idma.regs + I2SSIZE);
94 	val &= ~(I2SSIZE_TRNMSK << I2SSIZE_SHIFT);
95 	val |= (((runtime->dma_bytes >> 2) &
96 			I2SSIZE_TRNMSK) << I2SSIZE_SHIFT);
97 	writel(val, idma.regs + I2SSIZE);
98 
99 	val = readl(idma.regs + I2SAHB);
100 	val |= AHB_INTENLVL0;
101 	writel(val, idma.regs + I2SAHB);
102 
103 	return 0;
104 }
105 
106 static void idma_setcallbk(struct snd_pcm_substream *substream,
107 				void (*cb)(void *, int))
108 {
109 	struct idma_ctrl *prtd = substream->runtime->private_data;
110 
111 	spin_lock(&prtd->lock);
112 	prtd->cb = cb;
113 	spin_unlock(&prtd->lock);
114 }
115 
116 static void idma_control(int op)
117 {
118 	u32 val = readl(idma.regs + I2SAHB);
119 
120 	spin_lock(&idma.lock);
121 
122 	switch (op) {
123 	case LPAM_DMA_START:
124 		val |= (AHB_INTENLVL0 | AHB_DMAEN);
125 		break;
126 	case LPAM_DMA_STOP:
127 		val &= ~(AHB_INTENLVL0 | AHB_DMAEN);
128 		break;
129 	default:
130 		spin_unlock(&idma.lock);
131 		return;
132 	}
133 
134 	writel(val, idma.regs + I2SAHB);
135 	spin_unlock(&idma.lock);
136 }
137 
138 static void idma_done(void *id, int bytes_xfer)
139 {
140 	struct snd_pcm_substream *substream = id;
141 	struct idma_ctrl *prtd = substream->runtime->private_data;
142 
143 	if (prtd && (prtd->state & ST_RUNNING))
144 		snd_pcm_period_elapsed(substream);
145 }
146 
147 static int idma_hw_params(struct snd_pcm_substream *substream,
148 				struct snd_pcm_hw_params *params)
149 {
150 	struct snd_pcm_runtime *runtime = substream->runtime;
151 	struct idma_ctrl *prtd = substream->runtime->private_data;
152 	u32 mod = readl(idma.regs + I2SMOD);
153 	u32 ahb = readl(idma.regs + I2SAHB);
154 
155 	ahb |= (AHB_DMARLD | AHB_INTMASK);
156 	mod |= MOD_TXS_IDMA;
157 	writel(ahb, idma.regs + I2SAHB);
158 	writel(mod, idma.regs + I2SMOD);
159 
160 	snd_pcm_set_runtime_buffer(substream, &substream->dma_buffer);
161 	runtime->dma_bytes = params_buffer_bytes(params);
162 
163 	prtd->start = prtd->pos = runtime->dma_addr;
164 	prtd->period = params_periods(params);
165 	prtd->periodsz = params_period_bytes(params);
166 	prtd->end = runtime->dma_addr + runtime->dma_bytes;
167 
168 	idma_setcallbk(substream, idma_done);
169 
170 	return 0;
171 }
172 
173 static int idma_hw_free(struct snd_pcm_substream *substream)
174 {
175 	snd_pcm_set_runtime_buffer(substream, NULL);
176 
177 	return 0;
178 }
179 
180 static int idma_prepare(struct snd_pcm_substream *substream)
181 {
182 	struct idma_ctrl *prtd = substream->runtime->private_data;
183 
184 	prtd->pos = prtd->start;
185 
186 	/* flush the DMA channel */
187 	idma_control(LPAM_DMA_STOP);
188 	idma_enqueue(substream);
189 
190 	return 0;
191 }
192 
193 static int idma_trigger(struct snd_pcm_substream *substream, int cmd)
194 {
195 	struct idma_ctrl *prtd = substream->runtime->private_data;
196 	int ret = 0;
197 
198 	spin_lock(&prtd->lock);
199 
200 	switch (cmd) {
201 	case SNDRV_PCM_TRIGGER_RESUME:
202 	case SNDRV_PCM_TRIGGER_START:
203 	case SNDRV_PCM_TRIGGER_PAUSE_RELEASE:
204 		prtd->state |= ST_RUNNING;
205 		idma_control(LPAM_DMA_START);
206 		break;
207 
208 	case SNDRV_PCM_TRIGGER_SUSPEND:
209 	case SNDRV_PCM_TRIGGER_STOP:
210 	case SNDRV_PCM_TRIGGER_PAUSE_PUSH:
211 		prtd->state &= ~ST_RUNNING;
212 		idma_control(LPAM_DMA_STOP);
213 		break;
214 
215 	default:
216 		ret = -EINVAL;
217 		break;
218 	}
219 
220 	spin_unlock(&prtd->lock);
221 
222 	return ret;
223 }
224 
225 static snd_pcm_uframes_t
226 	idma_pointer(struct snd_pcm_substream *substream)
227 {
228 	struct snd_pcm_runtime *runtime = substream->runtime;
229 	struct idma_ctrl *prtd = runtime->private_data;
230 	dma_addr_t src;
231 	unsigned long res;
232 
233 	spin_lock(&prtd->lock);
234 
235 	idma_getpos(&src);
236 	res = src - prtd->start;
237 
238 	spin_unlock(&prtd->lock);
239 
240 	return bytes_to_frames(substream->runtime, res);
241 }
242 
243 static int idma_mmap(struct snd_pcm_substream *substream,
244 	struct vm_area_struct *vma)
245 {
246 	struct snd_pcm_runtime *runtime = substream->runtime;
247 	unsigned long size, offset;
248 	int ret;
249 
250 	/* From snd_pcm_lib_mmap_iomem */
251 	vma->vm_page_prot = pgprot_noncached(vma->vm_page_prot);
252 	size = vma->vm_end - vma->vm_start;
253 	offset = vma->vm_pgoff << PAGE_SHIFT;
254 	ret = io_remap_pfn_range(vma, vma->vm_start,
255 			(runtime->dma_addr + offset) >> PAGE_SHIFT,
256 			size, vma->vm_page_prot);
257 
258 	return ret;
259 }
260 
261 static irqreturn_t iis_irq(int irqno, void *dev_id)
262 {
263 	struct idma_ctrl *prtd = (struct idma_ctrl *)dev_id;
264 	u32 iisahb, val, addr;
265 
266 	iisahb  = readl(idma.regs + I2SAHB);
267 
268 	val = (iisahb & AHB_LVL0INT) ? AHB_CLRLVL0INT : 0;
269 
270 	if (val) {
271 		iisahb |= val;
272 		writel(iisahb, idma.regs + I2SAHB);
273 
274 		addr = readl(idma.regs + I2SLVL0ADDR) - idma.lp_tx_addr;
275 		addr += prtd->periodsz;
276 		addr %= (u32)(prtd->end - prtd->start);
277 		addr += idma.lp_tx_addr;
278 
279 		writel(addr, idma.regs + I2SLVL0ADDR);
280 
281 		if (prtd->cb)
282 			prtd->cb(prtd->token, prtd->period);
283 	}
284 
285 	return IRQ_HANDLED;
286 }
287 
288 static int idma_open(struct snd_pcm_substream *substream)
289 {
290 	struct snd_pcm_runtime *runtime = substream->runtime;
291 	struct idma_ctrl *prtd;
292 	int ret;
293 
294 	snd_soc_set_runtime_hwparams(substream, &idma_hardware);
295 
296 	prtd = kzalloc(sizeof(struct idma_ctrl), GFP_KERNEL);
297 	if (prtd == NULL)
298 		return -ENOMEM;
299 
300 	ret = request_irq(idma_irq, iis_irq, 0, "i2s", prtd);
301 	if (ret < 0) {
302 		pr_err("fail to claim i2s irq , ret = %d\n", ret);
303 		kfree(prtd);
304 		return ret;
305 	}
306 
307 	spin_lock_init(&prtd->lock);
308 
309 	runtime->private_data = prtd;
310 
311 	return 0;
312 }
313 
314 static int idma_close(struct snd_pcm_substream *substream)
315 {
316 	struct snd_pcm_runtime *runtime = substream->runtime;
317 	struct idma_ctrl *prtd = runtime->private_data;
318 
319 	free_irq(idma_irq, prtd);
320 
321 	if (!prtd)
322 		pr_err("idma_close called with prtd == NULL\n");
323 
324 	kfree(prtd);
325 
326 	return 0;
327 }
328 
329 static struct snd_pcm_ops idma_ops = {
330 	.open		= idma_open,
331 	.close		= idma_close,
332 	.ioctl		= snd_pcm_lib_ioctl,
333 	.trigger	= idma_trigger,
334 	.pointer	= idma_pointer,
335 	.mmap		= idma_mmap,
336 	.hw_params	= idma_hw_params,
337 	.hw_free	= idma_hw_free,
338 	.prepare	= idma_prepare,
339 };
340 
341 static void idma_free(struct snd_pcm *pcm)
342 {
343 	struct snd_pcm_substream *substream;
344 	struct snd_dma_buffer *buf;
345 
346 	substream = pcm->streams[SNDRV_PCM_STREAM_PLAYBACK].substream;
347 	if (!substream)
348 		return;
349 
350 	buf = &substream->dma_buffer;
351 	if (!buf->area)
352 		return;
353 
354 	iounmap((void __iomem *)buf->area);
355 
356 	buf->area = NULL;
357 	buf->addr = 0;
358 }
359 
360 static int preallocate_idma_buffer(struct snd_pcm *pcm, int stream)
361 {
362 	struct snd_pcm_substream *substream = pcm->streams[stream].substream;
363 	struct snd_dma_buffer *buf = &substream->dma_buffer;
364 
365 	buf->dev.dev = pcm->card->dev;
366 	buf->private_data = NULL;
367 
368 	/* Assign PCM buffer pointers */
369 	buf->dev.type = SNDRV_DMA_TYPE_CONTINUOUS;
370 	buf->addr = idma.lp_tx_addr;
371 	buf->bytes = idma_hardware.buffer_bytes_max;
372 	buf->area = (unsigned char * __force)ioremap(buf->addr, buf->bytes);
373 
374 	return 0;
375 }
376 
377 static int idma_new(struct snd_soc_pcm_runtime *rtd)
378 {
379 	struct snd_card *card = rtd->card->snd_card;
380 	struct snd_pcm *pcm = rtd->pcm;
381 	int ret;
382 
383 	ret = dma_coerce_mask_and_coherent(card->dev, DMA_BIT_MASK(32));
384 	if (ret)
385 		return ret;
386 
387 	if (pcm->streams[SNDRV_PCM_STREAM_PLAYBACK].substream) {
388 		ret = preallocate_idma_buffer(pcm,
389 				SNDRV_PCM_STREAM_PLAYBACK);
390 	}
391 
392 	return ret;
393 }
394 
395 void idma_reg_addr_init(void __iomem *regs, dma_addr_t addr)
396 {
397 	spin_lock_init(&idma.lock);
398 	idma.regs = regs;
399 	idma.lp_tx_addr = addr;
400 }
401 EXPORT_SYMBOL_GPL(idma_reg_addr_init);
402 
403 static struct snd_soc_platform_driver asoc_idma_platform = {
404 	.ops = &idma_ops,
405 	.pcm_new = idma_new,
406 	.pcm_free = idma_free,
407 };
408 
409 static int asoc_idma_platform_probe(struct platform_device *pdev)
410 {
411 	idma_irq = platform_get_irq(pdev, 0);
412 	if (idma_irq < 0)
413 		return idma_irq;
414 
415 	return devm_snd_soc_register_platform(&pdev->dev, &asoc_idma_platform);
416 }
417 
418 static struct platform_driver asoc_idma_driver = {
419 	.driver = {
420 		.name = "samsung-idma",
421 	},
422 
423 	.probe = asoc_idma_platform_probe,
424 };
425 
426 module_platform_driver(asoc_idma_driver);
427 
428 MODULE_AUTHOR("Jaswinder Singh, <jassisinghbrar@gmail.com>");
429 MODULE_DESCRIPTION("Samsung ASoC IDMA Driver");
430 MODULE_LICENSE("GPL");
431