xref: /linux/drivers/iio/adc/nxp-sar-adc.c (revision ff124bbbca1d3a07fa1392ffdbbdeece71f68ece)
1 // SPDX-License-Identifier: GPL-2.0-only
2 /*
3  * NXP SAR-ADC driver (adapted from Freescale Vybrid vf610 ADC driver
4  * by Fugang Duan <B38611@freescale.com>)
5  *
6  * Copyright 2013 Freescale Semiconductor, Inc.
7  * Copyright 2017, 2020-2025 NXP
8  * Copyright 2025, Linaro Ltd
9  */
10 #include <linux/bitfield.h>
11 #include <linux/bitops.h>
12 #include <linux/circ_buf.h>
13 #include <linux/cleanup.h>
14 #include <linux/clk.h>
15 #include <linux/completion.h>
16 #include <linux/delay.h>
17 #include <linux/dma-mapping.h>
18 #include <linux/dmaengine.h>
19 #include <linux/err.h>
20 #include <linux/interrupt.h>
21 #include <linux/iopoll.h>
22 #include <linux/math64.h>
23 #include <linux/minmax.h>
24 #include <linux/mod_devicetable.h>
25 #include <linux/module.h>
26 #include <linux/platform_device.h>
27 #include <linux/pm.h>
28 #include <linux/property.h>
29 #include <linux/slab.h>
30 #include <linux/spinlock.h>
31 #include <linux/time.h>
32 #include <linux/types.h>
33 #include <linux/units.h>
34 
35 #include <linux/iio/iio.h>
36 #include <linux/iio/triggered_buffer.h>
37 #include <linux/iio/trigger_consumer.h>
38 
39 /* SAR ADC registers. */
40 #define NXP_SAR_ADC_CDR(__base, __channel)	(((__base) + 0x100) + ((__channel) * 0x4))
41 
42 #define NXP_SAR_ADC_CDR_CDATA_MASK	GENMASK(11, 0)
43 #define NXP_SAR_ADC_CDR_VALID		BIT(19)
44 
45 /* Main Configuration Register */
46 #define NXP_SAR_ADC_MCR(__base)		((__base) + 0x00)
47 
48 #define NXP_SAR_ADC_MCR_PWDN		BIT(0)
49 #define NXP_SAR_ADC_MCR_ACKO		BIT(5)
50 #define NXP_SAR_ADC_MCR_ADCLKSEL	BIT(8)
51 #define NXP_SAR_ADC_MCR_TSAMP_MASK	GENMASK(10, 9)
52 #define NXP_SAR_ADC_MCR_NRSMPL_MASK	GENMASK(12, 11)
53 #define NXP_SAR_ADC_MCR_AVGEN		BIT(13)
54 #define NXP_SAR_ADC_MCR_CALSTART	BIT(14)
55 #define NXP_SAR_ADC_MCR_NSTART		BIT(24)
56 #define NXP_SAR_ADC_MCR_MODE		BIT(29)
57 #define NXP_SAR_ADC_MCR_OWREN		BIT(31)
58 
59 /* Main Status Register */
60 #define NXP_SAR_ADC_MSR(__base)		((__base) + 0x04)
61 
62 #define NXP_SAR_ADC_MSR_CALBUSY		BIT(29)
63 #define NXP_SAR_ADC_MSR_CALFAIL		BIT(30)
64 
65 /* Interrupt Status Register */
66 #define NXP_SAR_ADC_ISR(__base)		((__base) + 0x10)
67 
68 #define NXP_SAR_ADC_ISR_ECH		BIT(0)
69 
70 /*  Channel Pending Register */
71 #define NXP_SAR_ADC_CEOCFR0(__base)	((__base) + 0x14)
72 #define NXP_SAR_ADC_CEOCFR1(__base)	((__base) + 0x18)
73 
74 #define NXP_SAR_ADC_EOC_CH(c)		BIT(c)
75 
76 /* Interrupt Mask Register */
77 #define NXP_SAR_ADC_IMR(__base)		((__base) + 0x20)
78 
79 /* Channel Interrupt Mask Register */
80 #define NXP_SAR_ADC_CIMR0(__base)	((__base) + 0x24)
81 #define NXP_SAR_ADC_CIMR1(__base)	((__base) + 0x28)
82 
83 /* DMA Setting Register */
84 #define NXP_SAR_ADC_DMAE(__base)	((__base) + 0x40)
85 
86 #define NXP_SAR_ADC_DMAE_DMAEN		BIT(0)
87 #define NXP_SAR_ADC_DMAE_DCLR		BIT(1)
88 
89 /* DMA Control register */
90 #define NXP_SAR_ADC_DMAR0(__base)	((__base) + 0x44)
91 #define NXP_SAR_ADC_DMAR1(__base)	((__base) + 0x48)
92 
93 /* Conversion Timing Register */
94 #define NXP_SAR_ADC_CTR0(__base)	((__base) + 0x94)
95 #define NXP_SAR_ADC_CTR1(__base)	((__base) + 0x98)
96 
97 #define NXP_SAR_ADC_CTR_INPSAMP_MIN	0x08
98 #define NXP_SAR_ADC_CTR_INPSAMP_MAX	0xff
99 
100 /* Normal Conversion Mask Register */
101 #define NXP_SAR_ADC_NCMR0(__base)	((__base) + 0xa4)
102 #define NXP_SAR_ADC_NCMR1(__base)	((__base) + 0xa8)
103 
104 /* Normal Conversion Mask Register field define */
105 #define NXP_SAR_ADC_CH_MASK		GENMASK(7, 0)
106 
107 /* Other field define */
108 #define NXP_SAR_ADC_CONV_TIMEOUT	(msecs_to_jiffies(100))
109 #define NXP_SAR_ADC_CAL_TIMEOUT_US	(100 * USEC_PER_MSEC)
110 #define NXP_SAR_ADC_WAIT_US		(2 * USEC_PER_MSEC)
111 #define NXP_SAR_ADC_RESOLUTION		12
112 
113 /* Duration of conversion phases */
114 #define NXP_SAR_ADC_TPT			2
115 #define NXP_SAR_ADC_DP			2
116 #define NXP_SAR_ADC_CT			((NXP_SAR_ADC_RESOLUTION + 2) * 4)
117 #define NXP_SAR_ADC_CONV_TIME		(NXP_SAR_ADC_TPT + NXP_SAR_ADC_CT + NXP_SAR_ADC_DP)
118 
119 #define NXP_SAR_ADC_NR_CHANNELS		8
120 
121 #define NXP_PAGE_SIZE			SZ_4K
122 #define NXP_SAR_ADC_DMA_SAMPLE_SZ	DMA_SLAVE_BUSWIDTH_4_BYTES
123 #define NXP_SAR_ADC_DMA_BUFF_SZ		(NXP_PAGE_SIZE * NXP_SAR_ADC_DMA_SAMPLE_SZ)
124 #define NXP_SAR_ADC_DMA_SAMPLE_CNT	(NXP_SAR_ADC_DMA_BUFF_SZ / NXP_SAR_ADC_DMA_SAMPLE_SZ)
125 
126 struct nxp_sar_adc {
127 	void __iomem *regs;
128 	phys_addr_t regs_phys;
129 	u8 current_channel;
130 	u8 channels_used;
131 	u16 value;
132 	u32 vref_mV;
133 
134 	/* Save and restore context. */
135 	u32 inpsamp;
136 	u32 pwdn;
137 
138 	struct clk *clk;
139 	struct dma_chan	*dma_chan;
140 	struct completion completion;
141 	struct circ_buf dma_buf;
142 
143 	dma_addr_t rx_dma_buf;
144 	dma_cookie_t cookie;
145 
146 	/* Protect circular buffers access. */
147 	spinlock_t lock;
148 
149 	/* Array of enabled channels. */
150 	u16 buffered_chan[NXP_SAR_ADC_NR_CHANNELS];
151 
152 	/* Buffer to be filled by the DMA. */
153 	IIO_DECLARE_BUFFER_WITH_TS(u16, buffer, NXP_SAR_ADC_NR_CHANNELS);
154 };
155 
156 struct nxp_sar_adc_data {
157 	u32 vref_mV;
158 	const char *model;
159 };
160 
161 #define ADC_CHAN(_idx, _chan_type) {				\
162 	.type = (_chan_type),					\
163 	.indexed = 1,						\
164 	.channel = (_idx),					\
165 	.info_mask_separate = BIT(IIO_CHAN_INFO_RAW),		\
166 	.info_mask_shared_by_type = BIT(IIO_CHAN_INFO_SCALE) |	\
167 				BIT(IIO_CHAN_INFO_SAMP_FREQ),	\
168 	.scan_index = (_idx),					\
169 	.scan_type = {						\
170 		.sign = 'u',					\
171 		.realbits = 12,					\
172 		.storagebits = 16,				\
173 	},							\
174 }
175 
176 static const struct iio_chan_spec nxp_sar_adc_iio_channels[] = {
177 	ADC_CHAN(0, IIO_VOLTAGE),
178 	ADC_CHAN(1, IIO_VOLTAGE),
179 	ADC_CHAN(2, IIO_VOLTAGE),
180 	ADC_CHAN(3, IIO_VOLTAGE),
181 	ADC_CHAN(4, IIO_VOLTAGE),
182 	ADC_CHAN(5, IIO_VOLTAGE),
183 	ADC_CHAN(6, IIO_VOLTAGE),
184 	ADC_CHAN(7, IIO_VOLTAGE),
185 	/*
186 	 * The NXP SAR ADC documentation marks the channels 8 to 31 as
187 	 * "Reserved". Reflect the same in the driver in case new ADC
188 	 * variants comes with more channels.
189 	 */
190 	IIO_CHAN_SOFT_TIMESTAMP(32),
191 };
192 
193 static void nxp_sar_adc_irq_cfg(struct nxp_sar_adc *info, bool enable)
194 {
195 	if (enable)
196 		writel(NXP_SAR_ADC_ISR_ECH, NXP_SAR_ADC_IMR(info->regs));
197 	else
198 		writel(0, NXP_SAR_ADC_IMR(info->regs));
199 }
200 
201 static bool nxp_sar_adc_set_enabled(struct nxp_sar_adc *info, bool enable)
202 {
203 	u32 mcr;
204 	bool pwdn;
205 
206 	mcr = readl(NXP_SAR_ADC_MCR(info->regs));
207 
208 	/*
209 	 * Get the current state and return it later. This is used for
210 	 * suspend/resume to get the power state
211 	 */
212 	pwdn = FIELD_GET(NXP_SAR_ADC_MCR_PWDN, mcr);
213 
214 	/* When the enabled flag is not set, we set the power down bit */
215 	FIELD_MODIFY(NXP_SAR_ADC_MCR_PWDN, &mcr, !enable);
216 
217 	writel(mcr, NXP_SAR_ADC_MCR(info->regs));
218 
219 	/*
220 	 * Ensure there are at least three cycles between the
221 	 * configuration of NCMR and the setting of NSTART.
222 	 */
223 	if (enable)
224 		ndelay(div64_u64(NSEC_PER_SEC, clk_get_rate(info->clk) * 3));
225 
226 	return pwdn;
227 }
228 
229 static inline bool nxp_sar_adc_enable(struct nxp_sar_adc *info)
230 {
231 	return nxp_sar_adc_set_enabled(info, true);
232 }
233 
234 static inline bool nxp_sar_adc_disable(struct nxp_sar_adc *info)
235 {
236 	return nxp_sar_adc_set_enabled(info, false);
237 }
238 
239 static inline void nxp_sar_adc_calibration_start(void __iomem *base)
240 {
241 	u32 mcr = readl(NXP_SAR_ADC_MCR(base));
242 
243 	FIELD_MODIFY(NXP_SAR_ADC_MCR_CALSTART, &mcr, 0x1);
244 
245 	writel(mcr, NXP_SAR_ADC_MCR(base));
246 }
247 
248 static inline int nxp_sar_adc_calibration_wait(void __iomem *base)
249 {
250 	u32 msr, ret;
251 
252 	ret = readl_poll_timeout(NXP_SAR_ADC_MSR(base), msr,
253 				 !FIELD_GET(NXP_SAR_ADC_MSR_CALBUSY, msr),
254 				 NXP_SAR_ADC_WAIT_US,
255 				 NXP_SAR_ADC_CAL_TIMEOUT_US);
256 	if (ret)
257 		return ret;
258 
259 	if (FIELD_GET(NXP_SAR_ADC_MSR_CALFAIL, msr)) {
260 		/*
261 		 * If the calibration fails, the status register bit must be
262 		 * cleared.
263 		 */
264 		FIELD_MODIFY(NXP_SAR_ADC_MSR_CALFAIL, &msr, 0x0);
265 		writel(msr, NXP_SAR_ADC_MSR(base));
266 
267 		return -EAGAIN;
268 	}
269 
270 	return 0;
271 }
272 
273 static int nxp_sar_adc_calibration(struct nxp_sar_adc *info)
274 {
275 	int ret;
276 
277 	/* Calibration works only if the ADC is powered up. */
278 	nxp_sar_adc_enable(info);
279 
280 	/* The calibration operation starts. */
281 	nxp_sar_adc_calibration_start(info->regs);
282 
283 	ret = nxp_sar_adc_calibration_wait(info->regs);
284 
285 	/*
286 	 * Calibration works only if the ADC is powered up. However
287 	 * the calibration is called from the probe function where the
288 	 * iio is not enabled, so we disable after the calibration.
289 	 */
290 	nxp_sar_adc_disable(info);
291 
292 	return ret;
293 }
294 
295 static void nxp_sar_adc_conversion_timing_set(struct nxp_sar_adc *info, u32 inpsamp)
296 {
297 	inpsamp = clamp(inpsamp, NXP_SAR_ADC_CTR_INPSAMP_MIN, NXP_SAR_ADC_CTR_INPSAMP_MAX);
298 
299 	writel(inpsamp, NXP_SAR_ADC_CTR0(info->regs));
300 }
301 
302 static u32 nxp_sar_adc_conversion_timing_get(struct nxp_sar_adc *info)
303 {
304 	return readl(NXP_SAR_ADC_CTR0(info->regs));
305 }
306 
307 static void nxp_sar_adc_read_notify(struct nxp_sar_adc *info)
308 {
309 	writel(NXP_SAR_ADC_CH_MASK, NXP_SAR_ADC_CEOCFR0(info->regs));
310 	writel(NXP_SAR_ADC_CH_MASK, NXP_SAR_ADC_CEOCFR1(info->regs));
311 }
312 
313 static int nxp_sar_adc_read_data(struct nxp_sar_adc *info, unsigned int chan)
314 {
315 	u32 ceocfr, cdr;
316 
317 	ceocfr = readl(NXP_SAR_ADC_CEOCFR0(info->regs));
318 
319 	/*
320 	 * FIELD_GET() can not be used here because EOC_CH is not constant.
321 	 * TODO: Switch to field_get() when it will be available.
322 	 */
323 	if (!(NXP_SAR_ADC_EOC_CH(chan) & ceocfr))
324 		return -EIO;
325 
326 	cdr = readl(NXP_SAR_ADC_CDR(info->regs, chan));
327 	if (!(FIELD_GET(NXP_SAR_ADC_CDR_VALID, cdr)))
328 		return -EIO;
329 
330 	return FIELD_GET(NXP_SAR_ADC_CDR_CDATA_MASK, cdr);
331 }
332 
333 static void nxp_sar_adc_isr_buffer(struct iio_dev *indio_dev)
334 {
335 	struct nxp_sar_adc *info = iio_priv(indio_dev);
336 	unsigned int i;
337 	int ret;
338 
339 	for (i = 0; i < info->channels_used; i++) {
340 		ret = nxp_sar_adc_read_data(info, info->buffered_chan[i]);
341 		if (ret < 0) {
342 			nxp_sar_adc_read_notify(info);
343 			return;
344 		}
345 
346 		info->buffer[i] = ret;
347 	}
348 
349 	nxp_sar_adc_read_notify(info);
350 
351 	iio_push_to_buffers_with_ts(indio_dev, info->buffer, sizeof(info->buffer),
352 				    iio_get_time_ns(indio_dev));
353 
354 	iio_trigger_notify_done(indio_dev->trig);
355 }
356 
357 static void nxp_sar_adc_isr_read_raw(struct iio_dev *indio_dev)
358 {
359 	struct nxp_sar_adc *info = iio_priv(indio_dev);
360 	int ret;
361 
362 	ret = nxp_sar_adc_read_data(info, info->current_channel);
363 	nxp_sar_adc_read_notify(info);
364 	if (ret < 0)
365 		return;
366 
367 	info->value = ret;
368 	complete(&info->completion);
369 }
370 
371 static irqreturn_t nxp_sar_adc_isr(int irq, void *dev_id)
372 {
373 	struct iio_dev *indio_dev = dev_id;
374 	struct nxp_sar_adc *info = iio_priv(indio_dev);
375 	int isr;
376 
377 	isr = readl(NXP_SAR_ADC_ISR(info->regs));
378 	if (!(FIELD_GET(NXP_SAR_ADC_ISR_ECH, isr)))
379 		return IRQ_NONE;
380 
381 	if (iio_buffer_enabled(indio_dev))
382 		nxp_sar_adc_isr_buffer(indio_dev);
383 	else
384 		nxp_sar_adc_isr_read_raw(indio_dev);
385 
386 	writel(NXP_SAR_ADC_ISR_ECH, NXP_SAR_ADC_ISR(info->regs));
387 
388 	return IRQ_HANDLED;
389 }
390 
391 static void nxp_sar_adc_channels_disable(struct nxp_sar_adc *info, u32 mask)
392 {
393 	u32 ncmr, cimr;
394 
395 	ncmr = readl(NXP_SAR_ADC_NCMR0(info->regs));
396 	cimr = readl(NXP_SAR_ADC_CIMR0(info->regs));
397 
398 	/* FIELD_MODIFY() can not be used because the mask is not constant */
399 	ncmr &= ~mask;
400 	cimr &= ~mask;
401 
402 	writel(ncmr, NXP_SAR_ADC_NCMR0(info->regs));
403 	writel(cimr, NXP_SAR_ADC_CIMR0(info->regs));
404 }
405 
406 static void nxp_sar_adc_channels_enable(struct nxp_sar_adc *info, u32 mask)
407 {
408 	u32 ncmr, cimr;
409 
410 	ncmr = readl(NXP_SAR_ADC_NCMR0(info->regs));
411 	cimr = readl(NXP_SAR_ADC_CIMR0(info->regs));
412 
413 	ncmr |= mask;
414 	cimr |= mask;
415 
416 	writel(ncmr, NXP_SAR_ADC_NCMR0(info->regs));
417 	writel(cimr, NXP_SAR_ADC_CIMR0(info->regs));
418 }
419 
420 static void nxp_sar_adc_dma_channels_enable(struct nxp_sar_adc *info, u32 mask)
421 {
422 	u32 dmar;
423 
424 	dmar = readl(NXP_SAR_ADC_DMAR0(info->regs));
425 
426 	dmar |= mask;
427 
428 	writel(dmar, NXP_SAR_ADC_DMAR0(info->regs));
429 }
430 
431 static void nxp_sar_adc_dma_channels_disable(struct nxp_sar_adc *info, u32 mask)
432 {
433 	u32 dmar;
434 
435 	dmar = readl(NXP_SAR_ADC_DMAR0(info->regs));
436 
437 	dmar &= ~mask;
438 
439 	writel(dmar, NXP_SAR_ADC_DMAR0(info->regs));
440 }
441 
442 static void nxp_sar_adc_dma_cfg(struct nxp_sar_adc *info, bool enable)
443 {
444 	u32 dmae;
445 
446 	dmae = readl(NXP_SAR_ADC_DMAE(info->regs));
447 
448 	FIELD_MODIFY(NXP_SAR_ADC_DMAE_DMAEN, &dmae, enable);
449 
450 	writel(dmae, NXP_SAR_ADC_DMAE(info->regs));
451 }
452 
453 static void nxp_sar_adc_stop_conversion(struct nxp_sar_adc *info)
454 {
455 	u32 mcr;
456 
457 	mcr = readl(NXP_SAR_ADC_MCR(info->regs));
458 
459 	FIELD_MODIFY(NXP_SAR_ADC_MCR_NSTART, &mcr, 0x0);
460 
461 	writel(mcr, NXP_SAR_ADC_MCR(info->regs));
462 
463 	/*
464 	 * On disable, we have to wait for the transaction to finish.
465 	 * ADC does not abort the transaction if a chain conversion is
466 	 * in progress. Wait for the worst case scenario - 80 ADC clk
467 	 * cycles. The clock rate is 80MHz, this routine is called
468 	 * only when the capture finishes. The delay will be very
469 	 * short, usec-ish, which is acceptable in the atomic context.
470 	 */
471 	ndelay(div64_u64(NSEC_PER_SEC, clk_get_rate(info->clk)) * 80);
472 }
473 
474 static int nxp_sar_adc_start_conversion(struct nxp_sar_adc *info, bool raw)
475 {
476 	u32 mcr;
477 
478 	mcr = readl(NXP_SAR_ADC_MCR(info->regs));
479 
480 	FIELD_MODIFY(NXP_SAR_ADC_MCR_NSTART, &mcr, 0x1);
481 	FIELD_MODIFY(NXP_SAR_ADC_MCR_MODE, &mcr, raw ? 0 : 1);
482 
483 	writel(mcr, NXP_SAR_ADC_MCR(info->regs));
484 
485 	return 0;
486 }
487 
488 static int nxp_sar_adc_read_channel(struct nxp_sar_adc *info, int channel)
489 {
490 	int ret;
491 
492 	info->current_channel = channel;
493 	nxp_sar_adc_channels_enable(info, BIT(channel));
494 	nxp_sar_adc_irq_cfg(info, true);
495 	nxp_sar_adc_enable(info);
496 
497 	reinit_completion(&info->completion);
498 	ret = nxp_sar_adc_start_conversion(info, true);
499 	if (ret < 0)
500 		goto out_disable;
501 
502 	if (!wait_for_completion_interruptible_timeout(&info->completion,
503 						       NXP_SAR_ADC_CONV_TIMEOUT))
504 		ret = -ETIMEDOUT;
505 
506 	nxp_sar_adc_stop_conversion(info);
507 
508 out_disable:
509 	nxp_sar_adc_channels_disable(info, BIT(channel));
510 	nxp_sar_adc_irq_cfg(info, false);
511 	nxp_sar_adc_disable(info);
512 
513 	return ret;
514 }
515 
516 static int nxp_sar_adc_read_raw(struct iio_dev *indio_dev,
517 				struct iio_chan_spec const *chan, int *val,
518 				int *val2, long mask)
519 {
520 	struct nxp_sar_adc *info = iio_priv(indio_dev);
521 	u32 inpsamp;
522 	int ret;
523 
524 	switch (mask) {
525 	case IIO_CHAN_INFO_RAW:
526 		if (!iio_device_claim_direct(indio_dev))
527 			return -EBUSY;
528 
529 		ret = nxp_sar_adc_read_channel(info, chan->channel);
530 
531 		iio_device_release_direct(indio_dev);
532 
533 		if (ret)
534 			return ret;
535 
536 		*val = info->value;
537 		return IIO_VAL_INT;
538 
539 	case IIO_CHAN_INFO_SCALE:
540 		*val = info->vref_mV;
541 		*val2 = NXP_SAR_ADC_RESOLUTION;
542 		return IIO_VAL_FRACTIONAL_LOG2;
543 
544 	case IIO_CHAN_INFO_SAMP_FREQ:
545 		inpsamp = nxp_sar_adc_conversion_timing_get(info);
546 		*val = clk_get_rate(info->clk) / (inpsamp + NXP_SAR_ADC_CONV_TIME);
547 		return IIO_VAL_INT;
548 
549 	default:
550 		return -EINVAL;
551 	}
552 }
553 
554 static int nxp_sar_adc_write_raw(struct iio_dev *indio_dev, struct iio_chan_spec const *chan,
555 				 int val, int val2, long mask)
556 {
557 	struct nxp_sar_adc *info = iio_priv(indio_dev);
558 	u32 inpsamp;
559 
560 	switch (mask) {
561 	case IIO_CHAN_INFO_SAMP_FREQ:
562 		/*
563 		 * Configures the sample period duration in terms of the SAR
564 		 * controller clock. The minimum acceptable value is 8.
565 		 * Configuring it to a value lower than 8 sets the sample period
566 		 * to 8 cycles.  We read the clock value and divide by the
567 		 * sampling timing which gives us the number of cycles expected.
568 		 * The value is 8-bit wide, consequently the max value is 0xFF.
569 		 */
570 		inpsamp = clk_get_rate(info->clk) / val - NXP_SAR_ADC_CONV_TIME;
571 		nxp_sar_adc_conversion_timing_set(info, inpsamp);
572 		return 0;
573 
574 	default:
575 		return -EINVAL;
576 	}
577 }
578 
579 static void nxp_sar_adc_dma_cb(void *data)
580 {
581 	struct iio_dev *indio_dev = data;
582 	struct nxp_sar_adc *info = iio_priv(indio_dev);
583 	struct dma_tx_state state;
584 	struct circ_buf *dma_buf;
585 	struct device *dev_dma;
586 	u32 *dma_samples;
587 	s64 timestamp;
588 	int idx, ret;
589 
590 	guard(spinlock_irqsave)(&info->lock);
591 
592 	dma_buf = &info->dma_buf;
593 	dma_samples = (u32 *)dma_buf->buf;
594 	dev_dma = info->dma_chan->device->dev;
595 
596 	/*
597 	 * DMA in some corner cases might have already be charged for
598 	 * the next transfer. Potentially there can be a race where
599 	 * the residue changes while the dma engine updates the
600 	 * buffer. That could be handled by using the
601 	 * callback_result() instead of callback() because the residue
602 	 * will be passed as a parameter to the function. However this
603 	 * new callback is pretty new and the backend does not update
604 	 * the residue. So let's stick to the version other drivers do
605 	 * which has proven running well in production since several
606 	 * years.
607 	 */
608 	dmaengine_tx_status(info->dma_chan, info->cookie, &state);
609 
610 	dma_sync_single_for_cpu(dev_dma, info->rx_dma_buf,
611 				NXP_SAR_ADC_DMA_BUFF_SZ, DMA_FROM_DEVICE);
612 
613 	/* Current head position. */
614 	dma_buf->head = (NXP_SAR_ADC_DMA_BUFF_SZ - state.residue) /
615 			NXP_SAR_ADC_DMA_SAMPLE_SZ;
616 
617 	/* If everything was transferred, avoid an off by one error. */
618 	if (!state.residue)
619 		dma_buf->head--;
620 
621 	/* Something went wrong and nothing transferred. */
622 	if (state.residue != NXP_SAR_ADC_DMA_BUFF_SZ) {
623 		/* Make sure that head is multiple of info->channels_used. */
624 		dma_buf->head -= dma_buf->head % info->channels_used;
625 
626 		/*
627 		 * dma_buf->tail != dma_buf->head condition will become false
628 		 * because dma_buf->tail will be incremented with 1.
629 		 */
630 		while (dma_buf->tail != dma_buf->head) {
631 			idx = dma_buf->tail % info->channels_used;
632 			info->buffer[idx] = dma_samples[dma_buf->tail];
633 			dma_buf->tail = (dma_buf->tail + 1) % NXP_SAR_ADC_DMA_SAMPLE_CNT;
634 			if (idx != info->channels_used - 1)
635 				continue;
636 
637 			/*
638 			 * iio_push_to_buffers_with_ts() should not be
639 			 * called with dma_samples as parameter. The samples
640 			 * will be smashed if timestamp is enabled.
641 			 */
642 			timestamp = iio_get_time_ns(indio_dev);
643 			ret = iio_push_to_buffers_with_ts(indio_dev, info->buffer,
644 							  sizeof(info->buffer),
645 							  timestamp);
646 			if (ret < 0 && ret != -EBUSY)
647 				dev_err_ratelimited(&indio_dev->dev,
648 						    "failed to push iio buffer: %d",
649 						    ret);
650 		}
651 
652 		dma_buf->tail = dma_buf->head;
653 	}
654 
655 	dma_sync_single_for_device(dev_dma, info->rx_dma_buf,
656 				   NXP_SAR_ADC_DMA_BUFF_SZ, DMA_FROM_DEVICE);
657 }
658 
659 static int nxp_sar_adc_start_cyclic_dma(struct iio_dev *indio_dev)
660 {
661 	struct nxp_sar_adc *info = iio_priv(indio_dev);
662 	struct dma_slave_config config;
663 	struct dma_async_tx_descriptor *desc;
664 	int ret;
665 
666 	info->dma_buf.head = 0;
667 	info->dma_buf.tail = 0;
668 
669 	config.direction = DMA_DEV_TO_MEM;
670 	config.src_addr_width = NXP_SAR_ADC_DMA_SAMPLE_SZ;
671 	config.src_addr = NXP_SAR_ADC_CDR(info->regs_phys, info->buffered_chan[0]);
672 	config.src_port_window_size = info->channels_used;
673 	config.src_maxburst = info->channels_used;
674 	ret = dmaengine_slave_config(info->dma_chan, &config);
675 	if (ret < 0)
676 		return ret;
677 
678 	desc = dmaengine_prep_dma_cyclic(info->dma_chan,
679 					 info->rx_dma_buf,
680 					 NXP_SAR_ADC_DMA_BUFF_SZ,
681 					 NXP_SAR_ADC_DMA_BUFF_SZ / 2,
682 					 DMA_DEV_TO_MEM, DMA_PREP_INTERRUPT);
683 	if (!desc)
684 		return -EINVAL;
685 
686 	desc->callback = nxp_sar_adc_dma_cb;
687 	desc->callback_param = indio_dev;
688 	info->cookie = dmaengine_submit(desc);
689 	ret = dma_submit_error(info->cookie);
690 	if (ret) {
691 		dmaengine_terminate_async(info->dma_chan);
692 		return ret;
693 	}
694 
695 	dma_async_issue_pending(info->dma_chan);
696 
697 	return 0;
698 }
699 
700 static void nxp_sar_adc_buffer_software_do_predisable(struct iio_dev *indio_dev)
701 {
702 	struct nxp_sar_adc *info = iio_priv(indio_dev);
703 
704 	/*
705 	 * The ADC DMAEN bit should be cleared before DMA transaction
706 	 * is canceled.
707 	 */
708 	nxp_sar_adc_stop_conversion(info);
709 	dmaengine_terminate_sync(info->dma_chan);
710 	nxp_sar_adc_dma_cfg(info, false);
711 	nxp_sar_adc_dma_channels_disable(info, *indio_dev->active_scan_mask);
712 
713 	dma_release_channel(info->dma_chan);
714 }
715 
716 static int nxp_sar_adc_buffer_software_do_postenable(struct iio_dev *indio_dev)
717 {
718 	struct nxp_sar_adc *info = iio_priv(indio_dev);
719 	int ret;
720 
721 	nxp_sar_adc_dma_channels_enable(info, *indio_dev->active_scan_mask);
722 
723 	nxp_sar_adc_dma_cfg(info, true);
724 
725 	ret = nxp_sar_adc_start_cyclic_dma(indio_dev);
726 	if (ret)
727 		goto out_dma_channels_disable;
728 
729 	ret = nxp_sar_adc_start_conversion(info, false);
730 	if (ret)
731 		goto out_stop_cyclic_dma;
732 
733 	return 0;
734 
735 out_stop_cyclic_dma:
736 	dmaengine_terminate_sync(info->dma_chan);
737 
738 out_dma_channels_disable:
739 	nxp_sar_adc_dma_cfg(info, false);
740 	nxp_sar_adc_dma_channels_disable(info, *indio_dev->active_scan_mask);
741 
742 	return ret;
743 }
744 
745 static void nxp_sar_adc_buffer_trigger_do_predisable(struct iio_dev *indio_dev)
746 {
747 	struct nxp_sar_adc *info = iio_priv(indio_dev);
748 
749 	nxp_sar_adc_irq_cfg(info, false);
750 }
751 
752 static int nxp_sar_adc_buffer_trigger_do_postenable(struct iio_dev *indio_dev)
753 {
754 	struct nxp_sar_adc *info = iio_priv(indio_dev);
755 
756 	nxp_sar_adc_irq_cfg(info, true);
757 
758 	return 0;
759 }
760 
761 static int nxp_sar_adc_buffer_postenable(struct iio_dev *indio_dev)
762 {
763 	struct nxp_sar_adc *info = iio_priv(indio_dev);
764 	int current_mode = iio_device_get_current_mode(indio_dev);
765 	unsigned long channel;
766 	int ret;
767 
768 	info->dma_chan = dma_request_chan(indio_dev->dev.parent, "rx");
769 	if (IS_ERR(info->dma_chan))
770 		return PTR_ERR(info->dma_chan);
771 
772 	info->channels_used = 0;
773 
774 	/*
775 	 * The SAR-ADC has two groups of channels.
776 	 *
777 	 *	- Group #0:
778 	 *	* bit 0-7  : channel 0 -> channel 7
779 	 *	* bit 8-31 : reserved
780 	 *
781 	 *	- Group #32:
782 	 *	* bit 0-7  : Internal
783 	 *	* bit 8-31 : reserved
784 	 *
785 	 * The 8 channels from group #0 are used in this driver for
786 	 * ADC as described when declaring the IIO device and the
787 	 * mapping is the same. That means the active_scan_mask can be
788 	 * used directly to write the channel interrupt mask.
789 	 */
790 	nxp_sar_adc_channels_enable(info, *indio_dev->active_scan_mask);
791 
792 	for_each_set_bit(channel, indio_dev->active_scan_mask, NXP_SAR_ADC_NR_CHANNELS)
793 		info->buffered_chan[info->channels_used++] = channel;
794 
795 	nxp_sar_adc_enable(info);
796 
797 	if (current_mode == INDIO_BUFFER_SOFTWARE)
798 		ret = nxp_sar_adc_buffer_software_do_postenable(indio_dev);
799 	else
800 		ret = nxp_sar_adc_buffer_trigger_do_postenable(indio_dev);
801 	if (ret)
802 		goto out_postenable;
803 
804 	return 0;
805 
806 out_postenable:
807 	nxp_sar_adc_disable(info);
808 	nxp_sar_adc_channels_disable(info, *indio_dev->active_scan_mask);
809 
810 	return ret;
811 }
812 
813 static int nxp_sar_adc_buffer_predisable(struct iio_dev *indio_dev)
814 {
815 	struct nxp_sar_adc *info = iio_priv(indio_dev);
816 	int currentmode = iio_device_get_current_mode(indio_dev);
817 
818 	if (currentmode == INDIO_BUFFER_SOFTWARE)
819 		nxp_sar_adc_buffer_software_do_predisable(indio_dev);
820 	else
821 		nxp_sar_adc_buffer_trigger_do_predisable(indio_dev);
822 
823 	nxp_sar_adc_disable(info);
824 
825 	nxp_sar_adc_channels_disable(info, *indio_dev->active_scan_mask);
826 
827 	return 0;
828 }
829 
830 static irqreturn_t nxp_sar_adc_trigger_handler(int irq, void *p)
831 {
832 	struct iio_poll_func *pf = p;
833 	struct iio_dev *indio_dev = pf->indio_dev;
834 	struct nxp_sar_adc *info = iio_priv(indio_dev);
835 	int ret;
836 
837 	ret = nxp_sar_adc_start_conversion(info, true);
838 	if (ret < 0)
839 		dev_dbg(&indio_dev->dev, "Failed to start conversion\n");
840 
841 	return IRQ_HANDLED;
842 }
843 
844 static const struct iio_buffer_setup_ops iio_triggered_buffer_setup_ops = {
845 	.postenable = nxp_sar_adc_buffer_postenable,
846 	.predisable = nxp_sar_adc_buffer_predisable,
847 };
848 
849 static const struct iio_info nxp_sar_adc_iio_info = {
850 	.read_raw  = nxp_sar_adc_read_raw,
851 	.write_raw = nxp_sar_adc_write_raw,
852 };
853 
854 static int nxp_sar_adc_dma_probe(struct device *dev, struct nxp_sar_adc *info)
855 {
856 	u8 *rx_buf;
857 
858 	rx_buf = dmam_alloc_coherent(dev, NXP_SAR_ADC_DMA_BUFF_SZ,
859 				     &info->rx_dma_buf, GFP_KERNEL);
860 	if (!rx_buf)
861 		return -ENOMEM;
862 
863 	info->dma_buf.buf = rx_buf;
864 
865 	return 0;
866 }
867 
868 /*
869  * The documentation describes the reset values for the registers.
870  * However some registers do not have these values after a reset. It
871  * is not a desirable situation. In some other SoC family
872  * documentation NXP recommends not assuming the default values are
873  * set and to initialize the registers conforming to the documentation
874  * reset information to prevent this situation. Assume the same rule
875  * applies here as there is a discrepancy between what is read from
876  * the registers at reset time and the documentation.
877  */
878 static void nxp_sar_adc_set_default_values(struct nxp_sar_adc *info)
879 {
880 	writel(0x00003901, NXP_SAR_ADC_MCR(info->regs));
881 	writel(0x00000001, NXP_SAR_ADC_MSR(info->regs));
882 	writel(0x00000014, NXP_SAR_ADC_CTR0(info->regs));
883 	writel(0x00000014, NXP_SAR_ADC_CTR1(info->regs));
884 	writel(0x00000000, NXP_SAR_ADC_CIMR0(info->regs));
885 	writel(0x00000000, NXP_SAR_ADC_CIMR1(info->regs));
886 	writel(0x00000000, NXP_SAR_ADC_NCMR0(info->regs));
887 	writel(0x00000000, NXP_SAR_ADC_NCMR1(info->regs));
888 }
889 
890 static int nxp_sar_adc_probe(struct platform_device *pdev)
891 {
892 	struct device *dev = &pdev->dev;
893 	const struct nxp_sar_adc_data *data = device_get_match_data(dev);
894 	struct nxp_sar_adc *info;
895 	struct iio_dev *indio_dev;
896 	struct resource *mem;
897 	int irq, ret;
898 
899 	indio_dev = devm_iio_device_alloc(dev, sizeof(*info));
900 	if (!indio_dev)
901 		return -ENOMEM;
902 
903 	info = iio_priv(indio_dev);
904 	info->vref_mV = data->vref_mV;
905 	spin_lock_init(&info->lock);
906 	info->regs = devm_platform_get_and_ioremap_resource(pdev, 0, &mem);
907 	if (IS_ERR(info->regs))
908 		return dev_err_probe(dev, PTR_ERR(info->regs),
909 				     "Failed to get and remap resource");
910 
911 	info->regs_phys = mem->start;
912 
913 	irq = platform_get_irq(pdev, 0);
914 	if (irq < 0)
915 		return irq;
916 
917 	ret = devm_request_irq(dev, irq, nxp_sar_adc_isr, 0, dev_name(dev),
918 			       indio_dev);
919 	if (ret < 0)
920 		return ret;
921 
922 	info->clk = devm_clk_get_enabled(dev, NULL);
923 	if (IS_ERR(info->clk))
924 		return dev_err_probe(dev, PTR_ERR(info->clk),
925 				     "Failed to get the clock\n");
926 
927 	platform_set_drvdata(pdev, indio_dev);
928 
929 	init_completion(&info->completion);
930 
931 	indio_dev->name = data->model;
932 	indio_dev->info = &nxp_sar_adc_iio_info;
933 	indio_dev->modes = INDIO_DIRECT_MODE | INDIO_BUFFER_SOFTWARE;
934 	indio_dev->channels = nxp_sar_adc_iio_channels;
935 	indio_dev->num_channels = ARRAY_SIZE(nxp_sar_adc_iio_channels);
936 
937 	nxp_sar_adc_set_default_values(info);
938 
939 	ret = nxp_sar_adc_calibration(info);
940 	if (ret)
941 		dev_err_probe(dev, ret, "Calibration failed\n");
942 
943 	ret = nxp_sar_adc_dma_probe(dev, info);
944 	if (ret)
945 		return dev_err_probe(dev, ret, "Failed to initialize the DMA\n");
946 
947 	ret = devm_iio_triggered_buffer_setup(dev, indio_dev,
948 					      &iio_pollfunc_store_time,
949 					      &nxp_sar_adc_trigger_handler,
950 					      &iio_triggered_buffer_setup_ops);
951 	if (ret < 0)
952 		return dev_err_probe(dev, ret, "Couldn't initialise the buffer\n");
953 
954 	ret = devm_iio_device_register(dev, indio_dev);
955 	if (ret)
956 		return dev_err_probe(dev, ret, "Couldn't register the device\n");
957 
958 	return 0;
959 }
960 
961 static int nxp_sar_adc_suspend(struct device *dev)
962 {
963 	struct nxp_sar_adc *info = iio_priv(dev_get_drvdata(dev));
964 
965 	info->pwdn = nxp_sar_adc_disable(info);
966 	info->inpsamp = nxp_sar_adc_conversion_timing_get(info);
967 
968 	clk_disable_unprepare(info->clk);
969 
970 	return 0;
971 }
972 
973 static int nxp_sar_adc_resume(struct device *dev)
974 {
975 	struct nxp_sar_adc *info = iio_priv(dev_get_drvdata(dev));
976 	int ret;
977 
978 	ret = clk_prepare_enable(info->clk);
979 	if (ret)
980 		return ret;
981 
982 	nxp_sar_adc_conversion_timing_set(info, info->inpsamp);
983 
984 	if (!info->pwdn)
985 		nxp_sar_adc_enable(info);
986 
987 	return 0;
988 }
989 
990 static DEFINE_SIMPLE_DEV_PM_OPS(nxp_sar_adc_pm_ops, nxp_sar_adc_suspend,
991 				nxp_sar_adc_resume);
992 
993 static const struct nxp_sar_adc_data s32g2_sar_adc_data = {
994 	.vref_mV = 1800,
995 	.model = "s32g2-sar-adc",
996 };
997 
998 static const struct of_device_id nxp_sar_adc_match[] = {
999 	{ .compatible = "nxp,s32g2-sar-adc", .data = &s32g2_sar_adc_data },
1000 	{ }
1001 };
1002 MODULE_DEVICE_TABLE(of, nxp_sar_adc_match);
1003 
1004 static struct platform_driver nxp_sar_adc_driver = {
1005 	.probe = nxp_sar_adc_probe,
1006 	.driver = {
1007 		.name = "nxp-sar-adc",
1008 		.of_match_table = nxp_sar_adc_match,
1009 		.pm = pm_sleep_ptr(&nxp_sar_adc_pm_ops),
1010 	},
1011 };
1012 module_platform_driver(nxp_sar_adc_driver);
1013 
1014 MODULE_AUTHOR("NXP");
1015 MODULE_DESCRIPTION("NXP SAR-ADC driver");
1016 MODULE_LICENSE("GPL");
1017