1 // SPDX-License-Identifier: GPL-2.0
2 /*
3 * This file is part of STM32 ADC driver
4 *
5 * Copyright (C) 2016, STMicroelectronics - All Rights Reserved
6 * Author: Fabrice Gasnier <fabrice.gasnier@st.com>.
7 */
8
9 #include <linux/array_size.h>
10 #include <linux/clk.h>
11 #include <linux/debugfs.h>
12 #include <linux/delay.h>
13 #include <linux/dma-mapping.h>
14 #include <linux/dmaengine.h>
15 #include <linux/iio/iio.h>
16 #include <linux/iio/buffer.h>
17 #include <linux/iio/timer/stm32-lptim-trigger.h>
18 #include <linux/iio/timer/stm32-timer-trigger.h>
19 #include <linux/iio/trigger.h>
20 #include <linux/iio/trigger_consumer.h>
21 #include <linux/iio/triggered_buffer.h>
22 #include <linux/interrupt.h>
23 #include <linux/io.h>
24 #include <linux/iopoll.h>
25 #include <linux/module.h>
26 #include <linux/mod_devicetable.h>
27 #include <linux/nvmem-consumer.h>
28 #include <linux/platform_device.h>
29 #include <linux/pm_runtime.h>
30 #include <linux/property.h>
31
32 #include "stm32-adc-core.h"
33
34 /* Number of linear calibration shadow registers / LINCALRDYW control bits */
35 #define STM32H7_LINCALFACT_NUM 6
36
37 /* BOOST bit must be set on STM32H7 when ADC clock is above 20MHz */
38 #define STM32H7_BOOST_CLKRATE 20000000UL
39
40 #define STM32_ADC_CH_MAX 20 /* max number of channels */
41 #define STM32_ADC_CH_SZ 16 /* max channel name size */
42 #define STM32_ADC_MAX_SQ 16 /* SQ1..SQ16 */
43 #define STM32_ADC_MAX_SMP 7 /* SMPx range is [0..7] */
44 #define STM32_ADC_TIMEOUT_US 100000
45 #define STM32_ADC_TIMEOUT (msecs_to_jiffies(STM32_ADC_TIMEOUT_US / 1000))
46 #define STM32_ADC_HW_STOP_DELAY_MS 100
47 #define STM32_ADC_VREFINT_VOLTAGE 3300
48
49 #define STM32_DMA_BUFFER_SIZE PAGE_SIZE
50
51 /* External trigger enable */
52 enum stm32_adc_exten {
53 STM32_EXTEN_SWTRIG,
54 STM32_EXTEN_HWTRIG_RISING_EDGE,
55 STM32_EXTEN_HWTRIG_FALLING_EDGE,
56 STM32_EXTEN_HWTRIG_BOTH_EDGES,
57 };
58
59 /* extsel - trigger mux selection value */
60 enum stm32_adc_extsel {
61 STM32_EXT0,
62 STM32_EXT1,
63 STM32_EXT2,
64 STM32_EXT3,
65 STM32_EXT4,
66 STM32_EXT5,
67 STM32_EXT6,
68 STM32_EXT7,
69 STM32_EXT8,
70 STM32_EXT9,
71 STM32_EXT10,
72 STM32_EXT11,
73 STM32_EXT12,
74 STM32_EXT13,
75 STM32_EXT14,
76 STM32_EXT15,
77 STM32_EXT16,
78 STM32_EXT17,
79 STM32_EXT18,
80 STM32_EXT19,
81 STM32_EXT20,
82 };
83
84 enum stm32_adc_int_ch {
85 STM32_ADC_INT_CH_NONE = -1,
86 STM32_ADC_INT_CH_VDDCORE,
87 STM32_ADC_INT_CH_VDDCPU,
88 STM32_ADC_INT_CH_VDDQ_DDR,
89 STM32_ADC_INT_CH_VREFINT,
90 STM32_ADC_INT_CH_VBAT,
91 STM32_ADC_INT_CH_NB,
92 };
93
94 /**
95 * struct stm32_adc_ic - ADC internal channels
96 * @name: name of the internal channel
97 * @idx: internal channel enum index
98 */
99 struct stm32_adc_ic {
100 const char *name;
101 u32 idx;
102 };
103
104 static const struct stm32_adc_ic stm32_adc_ic[STM32_ADC_INT_CH_NB] = {
105 { "vddcore", STM32_ADC_INT_CH_VDDCORE },
106 { "vddcpu", STM32_ADC_INT_CH_VDDCPU },
107 { "vddq_ddr", STM32_ADC_INT_CH_VDDQ_DDR },
108 { "vrefint", STM32_ADC_INT_CH_VREFINT },
109 { "vbat", STM32_ADC_INT_CH_VBAT },
110 };
111
112 /**
113 * struct stm32_adc_trig_info - ADC trigger info
114 * @name: name of the trigger, corresponding to its source
115 * @extsel: trigger selection
116 */
117 struct stm32_adc_trig_info {
118 const char *name;
119 enum stm32_adc_extsel extsel;
120 };
121
122 /**
123 * struct stm32_adc_calib - optional adc calibration data
124 * @lincalfact: Linearity calibration factor
125 * @lincal_saved: Indicates that linear calibration factors are saved
126 */
127 struct stm32_adc_calib {
128 u32 lincalfact[STM32H7_LINCALFACT_NUM];
129 bool lincal_saved;
130 };
131
132 /**
133 * struct stm32_adc_regs - stm32 ADC misc registers & bitfield desc
134 * @reg: register offset
135 * @mask: bitfield mask
136 * @shift: left shift
137 */
138 struct stm32_adc_regs {
139 int reg;
140 int mask;
141 int shift;
142 };
143
144 /**
145 * struct stm32_adc_vrefint - stm32 ADC internal reference voltage data
146 * @vrefint_cal: vrefint calibration value from nvmem
147 * @vrefint_data: vrefint actual value
148 */
149 struct stm32_adc_vrefint {
150 u32 vrefint_cal;
151 u32 vrefint_data;
152 };
153
154 /**
155 * struct stm32_adc_regspec - stm32 registers definition
156 * @dr: data register offset
157 * @ier_eoc: interrupt enable register & eocie bitfield
158 * @ier_ovr: interrupt enable register & overrun bitfield
159 * @isr_eoc: interrupt status register & eoc bitfield
160 * @isr_ovr: interrupt status register & overrun bitfield
161 * @sqr: reference to sequence registers array
162 * @exten: trigger control register & bitfield
163 * @extsel: trigger selection register & bitfield
164 * @res: resolution selection register & bitfield
165 * @difsel: differential mode selection register & bitfield
166 * @smpr: smpr1 & smpr2 registers offset array
167 * @smp_bits: smpr1 & smpr2 index and bitfields
168 * @or_vddcore: option register & vddcore bitfield
169 * @or_vddcpu: option register & vddcpu bitfield
170 * @or_vddq_ddr: option register & vddq_ddr bitfield
171 * @ccr_vbat: common register & vbat bitfield
172 * @ccr_vref: common register & vrefint bitfield
173 */
174 struct stm32_adc_regspec {
175 const u32 dr;
176 const struct stm32_adc_regs ier_eoc;
177 const struct stm32_adc_regs ier_ovr;
178 const struct stm32_adc_regs isr_eoc;
179 const struct stm32_adc_regs isr_ovr;
180 const struct stm32_adc_regs *sqr;
181 const struct stm32_adc_regs exten;
182 const struct stm32_adc_regs extsel;
183 const struct stm32_adc_regs res;
184 const struct stm32_adc_regs difsel;
185 const u32 smpr[2];
186 const struct stm32_adc_regs *smp_bits;
187 const struct stm32_adc_regs or_vddcore;
188 const struct stm32_adc_regs or_vddcpu;
189 const struct stm32_adc_regs or_vddq_ddr;
190 const struct stm32_adc_regs ccr_vbat;
191 const struct stm32_adc_regs ccr_vref;
192 };
193
194 struct stm32_adc;
195
196 /**
197 * struct stm32_adc_cfg - stm32 compatible configuration data
198 * @regs: registers descriptions
199 * @adc_info: per instance input channels definitions
200 * @trigs: external trigger sources
201 * @clk_required: clock is required
202 * @has_vregready: vregready status flag presence
203 * @has_boostmode: boost mode support flag
204 * @has_linearcal: linear calibration support flag
205 * @has_presel: channel preselection support flag
206 * @has_oversampling: oversampling support flag
207 * @prepare: optional prepare routine (power-up, enable)
208 * @start_conv: routine to start conversions
209 * @stop_conv: routine to stop conversions
210 * @unprepare: optional unprepare routine (disable, power-down)
211 * @irq_clear: routine to clear irqs
212 * @set_ovs: routine to set oversampling configuration
213 * @smp_cycles: programmable sampling time (ADC clock cycles)
214 * @ts_int_ch: pointer to array of internal channels minimum sampling time in ns
215 */
216 struct stm32_adc_cfg {
217 const struct stm32_adc_regspec *regs;
218 const struct stm32_adc_info *adc_info;
219 struct stm32_adc_trig_info *trigs;
220 bool clk_required;
221 bool has_vregready;
222 bool has_boostmode;
223 bool has_linearcal;
224 bool has_presel;
225 bool has_oversampling;
226 int (*prepare)(struct iio_dev *);
227 void (*start_conv)(struct iio_dev *, bool dma);
228 void (*stop_conv)(struct iio_dev *);
229 void (*unprepare)(struct iio_dev *);
230 void (*irq_clear)(struct iio_dev *indio_dev, u32 msk);
231 void (*set_ovs)(struct iio_dev *indio_dev, u32 ovs_idx);
232 const unsigned int *smp_cycles;
233 const unsigned int *ts_int_ch;
234 };
235
236 /**
237 * struct stm32_adc - private data of each ADC IIO instance
238 * @common: reference to ADC block common data
239 * @offset: ADC instance register offset in ADC block
240 * @cfg: compatible configuration data
241 * @completion: end of single conversion completion
242 * @buffer: data buffer + 8 bytes for timestamp if enabled
243 * @clk: clock for this adc instance
244 * @irq: interrupt for this adc instance
245 * @lock: spinlock
246 * @bufi: data buffer index
247 * @num_conv: expected number of scan conversions
248 * @res: data resolution (e.g. RES bitfield value)
249 * @trigger_polarity: external trigger polarity (e.g. exten)
250 * @dma_chan: dma channel
251 * @rx_buf: dma rx buffer cpu address
252 * @rx_dma_buf: dma rx buffer bus address
253 * @rx_buf_sz: dma rx buffer size
254 * @difsel: bitmask to set single-ended/differential channel
255 * @pcsel: bitmask to preselect channels on some devices
256 * @smpr_val: sampling time settings (e.g. smpr1 / smpr2)
257 * @cal: optional calibration data on some devices
258 * @vrefint: internal reference voltage data
259 * @chan_name: channel name array
260 * @num_diff: number of differential channels
261 * @int_ch: internal channel indexes array
262 * @nsmps: number of channels with optional sample time
263 * @ovs_idx: current oversampling ratio index (in oversampling array)
264 */
265 struct stm32_adc {
266 struct stm32_adc_common *common;
267 u32 offset;
268 const struct stm32_adc_cfg *cfg;
269 struct completion completion;
270 u16 buffer[STM32_ADC_MAX_SQ + 4] __aligned(8);
271 struct clk *clk;
272 int irq;
273 spinlock_t lock; /* interrupt lock */
274 unsigned int bufi;
275 unsigned int num_conv;
276 u32 res;
277 u32 trigger_polarity;
278 struct dma_chan *dma_chan;
279 u8 *rx_buf;
280 dma_addr_t rx_dma_buf;
281 unsigned int rx_buf_sz;
282 u32 difsel;
283 u32 pcsel;
284 u32 smpr_val[2];
285 struct stm32_adc_calib cal;
286 struct stm32_adc_vrefint vrefint;
287 char chan_name[STM32_ADC_CH_MAX][STM32_ADC_CH_SZ];
288 u32 num_diff;
289 int int_ch[STM32_ADC_INT_CH_NB];
290 int nsmps;
291 int ovs_idx;
292 };
293
294 struct stm32_adc_diff_channel {
295 u32 vinp;
296 u32 vinn;
297 };
298
299 /**
300 * struct stm32_adc_info - stm32 ADC, per instance config data
301 * @max_channels: Number of channels
302 * @resolutions: available resolutions
303 * @oversampling: available oversampling ratios
304 * @num_res: number of available resolutions
305 * @num_ovs: number of available oversampling ratios
306 */
307 struct stm32_adc_info {
308 int max_channels;
309 const unsigned int *resolutions;
310 const unsigned int *oversampling;
311 const unsigned int num_res;
312 const unsigned int num_ovs;
313 };
314
315 static const unsigned int stm32h7_adc_oversampling_avail[] = {
316 1, 2, 4, 8, 16, 32, 64, 128, 256, 512, 1024,
317 };
318
319 static const unsigned int stm32mp13_adc_oversampling_avail[] = {
320 1, 2, 4, 8, 16, 32, 64, 128, 256,
321 };
322
323 static const unsigned int stm32f4_adc_resolutions[] = {
324 /* sorted values so the index matches RES[1:0] in STM32F4_ADC_CR1 */
325 12, 10, 8, 6,
326 };
327
328 /* stm32f4 can have up to 16 channels */
329 static const struct stm32_adc_info stm32f4_adc_info = {
330 .max_channels = 16,
331 .resolutions = stm32f4_adc_resolutions,
332 .num_res = ARRAY_SIZE(stm32f4_adc_resolutions),
333 };
334
335 static const unsigned int stm32h7_adc_resolutions[] = {
336 /* sorted values so the index matches RES[2:0] in STM32H7_ADC_CFGR */
337 16, 14, 12, 10, 8,
338 };
339
340 /* stm32h7 can have up to 20 channels */
341 static const struct stm32_adc_info stm32h7_adc_info = {
342 .max_channels = STM32_ADC_CH_MAX,
343 .resolutions = stm32h7_adc_resolutions,
344 .oversampling = stm32h7_adc_oversampling_avail,
345 .num_res = ARRAY_SIZE(stm32h7_adc_resolutions),
346 .num_ovs = ARRAY_SIZE(stm32h7_adc_oversampling_avail),
347 };
348
349 /* stm32mp13 can have up to 19 channels */
350 static const struct stm32_adc_info stm32mp13_adc_info = {
351 .max_channels = 19,
352 .resolutions = stm32f4_adc_resolutions,
353 .oversampling = stm32mp13_adc_oversampling_avail,
354 .num_res = ARRAY_SIZE(stm32f4_adc_resolutions),
355 .num_ovs = ARRAY_SIZE(stm32mp13_adc_oversampling_avail),
356 };
357
358 /*
359 * stm32f4_sq - describe regular sequence registers
360 * - L: sequence len (register & bit field)
361 * - SQ1..SQ16: sequence entries (register & bit field)
362 */
363 static const struct stm32_adc_regs stm32f4_sq[STM32_ADC_MAX_SQ + 1] = {
364 /* L: len bit field description to be kept as first element */
365 { STM32F4_ADC_SQR1, GENMASK(23, 20), 20 },
366 /* SQ1..SQ16 registers & bit fields (reg, mask, shift) */
367 { STM32F4_ADC_SQR3, GENMASK(4, 0), 0 },
368 { STM32F4_ADC_SQR3, GENMASK(9, 5), 5 },
369 { STM32F4_ADC_SQR3, GENMASK(14, 10), 10 },
370 { STM32F4_ADC_SQR3, GENMASK(19, 15), 15 },
371 { STM32F4_ADC_SQR3, GENMASK(24, 20), 20 },
372 { STM32F4_ADC_SQR3, GENMASK(29, 25), 25 },
373 { STM32F4_ADC_SQR2, GENMASK(4, 0), 0 },
374 { STM32F4_ADC_SQR2, GENMASK(9, 5), 5 },
375 { STM32F4_ADC_SQR2, GENMASK(14, 10), 10 },
376 { STM32F4_ADC_SQR2, GENMASK(19, 15), 15 },
377 { STM32F4_ADC_SQR2, GENMASK(24, 20), 20 },
378 { STM32F4_ADC_SQR2, GENMASK(29, 25), 25 },
379 { STM32F4_ADC_SQR1, GENMASK(4, 0), 0 },
380 { STM32F4_ADC_SQR1, GENMASK(9, 5), 5 },
381 { STM32F4_ADC_SQR1, GENMASK(14, 10), 10 },
382 { STM32F4_ADC_SQR1, GENMASK(19, 15), 15 },
383 };
384
385 /* STM32F4 external trigger sources for all instances */
386 static struct stm32_adc_trig_info stm32f4_adc_trigs[] = {
387 { TIM1_CH1, STM32_EXT0 },
388 { TIM1_CH2, STM32_EXT1 },
389 { TIM1_CH3, STM32_EXT2 },
390 { TIM2_CH2, STM32_EXT3 },
391 { TIM2_CH3, STM32_EXT4 },
392 { TIM2_CH4, STM32_EXT5 },
393 { TIM2_TRGO, STM32_EXT6 },
394 { TIM3_CH1, STM32_EXT7 },
395 { TIM3_TRGO, STM32_EXT8 },
396 { TIM4_CH4, STM32_EXT9 },
397 { TIM5_CH1, STM32_EXT10 },
398 { TIM5_CH2, STM32_EXT11 },
399 { TIM5_CH3, STM32_EXT12 },
400 { TIM8_CH1, STM32_EXT13 },
401 { TIM8_TRGO, STM32_EXT14 },
402 {}, /* sentinel */
403 };
404
405 /*
406 * stm32f4_smp_bits[] - describe sampling time register index & bit fields
407 * Sorted so it can be indexed by channel number.
408 */
409 static const struct stm32_adc_regs stm32f4_smp_bits[] = {
410 /* STM32F4_ADC_SMPR2: smpr[] index, mask, shift for SMP0 to SMP9 */
411 { 1, GENMASK(2, 0), 0 },
412 { 1, GENMASK(5, 3), 3 },
413 { 1, GENMASK(8, 6), 6 },
414 { 1, GENMASK(11, 9), 9 },
415 { 1, GENMASK(14, 12), 12 },
416 { 1, GENMASK(17, 15), 15 },
417 { 1, GENMASK(20, 18), 18 },
418 { 1, GENMASK(23, 21), 21 },
419 { 1, GENMASK(26, 24), 24 },
420 { 1, GENMASK(29, 27), 27 },
421 /* STM32F4_ADC_SMPR1, smpr[] index, mask, shift for SMP10 to SMP18 */
422 { 0, GENMASK(2, 0), 0 },
423 { 0, GENMASK(5, 3), 3 },
424 { 0, GENMASK(8, 6), 6 },
425 { 0, GENMASK(11, 9), 9 },
426 { 0, GENMASK(14, 12), 12 },
427 { 0, GENMASK(17, 15), 15 },
428 { 0, GENMASK(20, 18), 18 },
429 { 0, GENMASK(23, 21), 21 },
430 { 0, GENMASK(26, 24), 24 },
431 };
432
433 /* STM32F4 programmable sampling time (ADC clock cycles) */
434 static const unsigned int stm32f4_adc_smp_cycles[STM32_ADC_MAX_SMP + 1] = {
435 3, 15, 28, 56, 84, 112, 144, 480,
436 };
437
438 static const struct stm32_adc_regspec stm32f4_adc_regspec = {
439 .dr = STM32F4_ADC_DR,
440 .ier_eoc = { STM32F4_ADC_CR1, STM32F4_EOCIE },
441 .ier_ovr = { STM32F4_ADC_CR1, STM32F4_OVRIE },
442 .isr_eoc = { STM32F4_ADC_SR, STM32F4_EOC },
443 .isr_ovr = { STM32F4_ADC_SR, STM32F4_OVR },
444 .sqr = stm32f4_sq,
445 .exten = { STM32F4_ADC_CR2, STM32F4_EXTEN_MASK, STM32F4_EXTEN_SHIFT },
446 .extsel = { STM32F4_ADC_CR2, STM32F4_EXTSEL_MASK,
447 STM32F4_EXTSEL_SHIFT },
448 .res = { STM32F4_ADC_CR1, STM32F4_RES_MASK, STM32F4_RES_SHIFT },
449 .smpr = { STM32F4_ADC_SMPR1, STM32F4_ADC_SMPR2 },
450 .smp_bits = stm32f4_smp_bits,
451 };
452
453 static const struct stm32_adc_regs stm32h7_sq[STM32_ADC_MAX_SQ + 1] = {
454 /* L: len bit field description to be kept as first element */
455 { STM32H7_ADC_SQR1, GENMASK(3, 0), 0 },
456 /* SQ1..SQ16 registers & bit fields (reg, mask, shift) */
457 { STM32H7_ADC_SQR1, GENMASK(10, 6), 6 },
458 { STM32H7_ADC_SQR1, GENMASK(16, 12), 12 },
459 { STM32H7_ADC_SQR1, GENMASK(22, 18), 18 },
460 { STM32H7_ADC_SQR1, GENMASK(28, 24), 24 },
461 { STM32H7_ADC_SQR2, GENMASK(4, 0), 0 },
462 { STM32H7_ADC_SQR2, GENMASK(10, 6), 6 },
463 { STM32H7_ADC_SQR2, GENMASK(16, 12), 12 },
464 { STM32H7_ADC_SQR2, GENMASK(22, 18), 18 },
465 { STM32H7_ADC_SQR2, GENMASK(28, 24), 24 },
466 { STM32H7_ADC_SQR3, GENMASK(4, 0), 0 },
467 { STM32H7_ADC_SQR3, GENMASK(10, 6), 6 },
468 { STM32H7_ADC_SQR3, GENMASK(16, 12), 12 },
469 { STM32H7_ADC_SQR3, GENMASK(22, 18), 18 },
470 { STM32H7_ADC_SQR3, GENMASK(28, 24), 24 },
471 { STM32H7_ADC_SQR4, GENMASK(4, 0), 0 },
472 { STM32H7_ADC_SQR4, GENMASK(10, 6), 6 },
473 };
474
475 /* STM32H7 external trigger sources for all instances */
476 static struct stm32_adc_trig_info stm32h7_adc_trigs[] = {
477 { TIM1_CH1, STM32_EXT0 },
478 { TIM1_CH2, STM32_EXT1 },
479 { TIM1_CH3, STM32_EXT2 },
480 { TIM2_CH2, STM32_EXT3 },
481 { TIM3_TRGO, STM32_EXT4 },
482 { TIM4_CH4, STM32_EXT5 },
483 { TIM8_TRGO, STM32_EXT7 },
484 { TIM8_TRGO2, STM32_EXT8 },
485 { TIM1_TRGO, STM32_EXT9 },
486 { TIM1_TRGO2, STM32_EXT10 },
487 { TIM2_TRGO, STM32_EXT11 },
488 { TIM4_TRGO, STM32_EXT12 },
489 { TIM6_TRGO, STM32_EXT13 },
490 { TIM15_TRGO, STM32_EXT14 },
491 { TIM3_CH4, STM32_EXT15 },
492 { LPTIM1_OUT, STM32_EXT18 },
493 { LPTIM2_OUT, STM32_EXT19 },
494 { LPTIM3_OUT, STM32_EXT20 },
495 { }
496 };
497
498 /*
499 * stm32h7_smp_bits - describe sampling time register index & bit fields
500 * Sorted so it can be indexed by channel number.
501 */
502 static const struct stm32_adc_regs stm32h7_smp_bits[] = {
503 /* STM32H7_ADC_SMPR1, smpr[] index, mask, shift for SMP0 to SMP9 */
504 { 0, GENMASK(2, 0), 0 },
505 { 0, GENMASK(5, 3), 3 },
506 { 0, GENMASK(8, 6), 6 },
507 { 0, GENMASK(11, 9), 9 },
508 { 0, GENMASK(14, 12), 12 },
509 { 0, GENMASK(17, 15), 15 },
510 { 0, GENMASK(20, 18), 18 },
511 { 0, GENMASK(23, 21), 21 },
512 { 0, GENMASK(26, 24), 24 },
513 { 0, GENMASK(29, 27), 27 },
514 /* STM32H7_ADC_SMPR2, smpr[] index, mask, shift for SMP10 to SMP19 */
515 { 1, GENMASK(2, 0), 0 },
516 { 1, GENMASK(5, 3), 3 },
517 { 1, GENMASK(8, 6), 6 },
518 { 1, GENMASK(11, 9), 9 },
519 { 1, GENMASK(14, 12), 12 },
520 { 1, GENMASK(17, 15), 15 },
521 { 1, GENMASK(20, 18), 18 },
522 { 1, GENMASK(23, 21), 21 },
523 { 1, GENMASK(26, 24), 24 },
524 { 1, GENMASK(29, 27), 27 },
525 };
526
527 /* STM32H7 programmable sampling time (ADC clock cycles, rounded down) */
528 static const unsigned int stm32h7_adc_smp_cycles[STM32_ADC_MAX_SMP + 1] = {
529 1, 2, 8, 16, 32, 64, 387, 810,
530 };
531
532 static const struct stm32_adc_regspec stm32h7_adc_regspec = {
533 .dr = STM32H7_ADC_DR,
534 .ier_eoc = { STM32H7_ADC_IER, STM32H7_EOCIE },
535 .ier_ovr = { STM32H7_ADC_IER, STM32H7_OVRIE },
536 .isr_eoc = { STM32H7_ADC_ISR, STM32H7_EOC },
537 .isr_ovr = { STM32H7_ADC_ISR, STM32H7_OVR },
538 .sqr = stm32h7_sq,
539 .exten = { STM32H7_ADC_CFGR, STM32H7_EXTEN_MASK, STM32H7_EXTEN_SHIFT },
540 .extsel = { STM32H7_ADC_CFGR, STM32H7_EXTSEL_MASK,
541 STM32H7_EXTSEL_SHIFT },
542 .res = { STM32H7_ADC_CFGR, STM32H7_RES_MASK, STM32H7_RES_SHIFT },
543 .difsel = { STM32H7_ADC_DIFSEL, STM32H7_DIFSEL_MASK},
544 .smpr = { STM32H7_ADC_SMPR1, STM32H7_ADC_SMPR2 },
545 .smp_bits = stm32h7_smp_bits,
546 };
547
548 /* STM32MP13 programmable sampling time (ADC clock cycles, rounded down) */
549 static const unsigned int stm32mp13_adc_smp_cycles[STM32_ADC_MAX_SMP + 1] = {
550 2, 6, 12, 24, 47, 92, 247, 640,
551 };
552
553 static const struct stm32_adc_regspec stm32mp13_adc_regspec = {
554 .dr = STM32H7_ADC_DR,
555 .ier_eoc = { STM32H7_ADC_IER, STM32H7_EOCIE },
556 .ier_ovr = { STM32H7_ADC_IER, STM32H7_OVRIE },
557 .isr_eoc = { STM32H7_ADC_ISR, STM32H7_EOC },
558 .isr_ovr = { STM32H7_ADC_ISR, STM32H7_OVR },
559 .sqr = stm32h7_sq,
560 .exten = { STM32H7_ADC_CFGR, STM32H7_EXTEN_MASK, STM32H7_EXTEN_SHIFT },
561 .extsel = { STM32H7_ADC_CFGR, STM32H7_EXTSEL_MASK,
562 STM32H7_EXTSEL_SHIFT },
563 .res = { STM32H7_ADC_CFGR, STM32MP13_RES_MASK, STM32MP13_RES_SHIFT },
564 .difsel = { STM32MP13_ADC_DIFSEL, STM32MP13_DIFSEL_MASK},
565 .smpr = { STM32H7_ADC_SMPR1, STM32H7_ADC_SMPR2 },
566 .smp_bits = stm32h7_smp_bits,
567 .or_vddcore = { STM32MP13_ADC2_OR, STM32MP13_OP0 },
568 .or_vddcpu = { STM32MP13_ADC2_OR, STM32MP13_OP1 },
569 .or_vddq_ddr = { STM32MP13_ADC2_OR, STM32MP13_OP2 },
570 .ccr_vbat = { STM32H7_ADC_CCR, STM32H7_VBATEN },
571 .ccr_vref = { STM32H7_ADC_CCR, STM32H7_VREFEN },
572 };
573
574 static const struct stm32_adc_regspec stm32mp1_adc_regspec = {
575 .dr = STM32H7_ADC_DR,
576 .ier_eoc = { STM32H7_ADC_IER, STM32H7_EOCIE },
577 .ier_ovr = { STM32H7_ADC_IER, STM32H7_OVRIE },
578 .isr_eoc = { STM32H7_ADC_ISR, STM32H7_EOC },
579 .isr_ovr = { STM32H7_ADC_ISR, STM32H7_OVR },
580 .sqr = stm32h7_sq,
581 .exten = { STM32H7_ADC_CFGR, STM32H7_EXTEN_MASK, STM32H7_EXTEN_SHIFT },
582 .extsel = { STM32H7_ADC_CFGR, STM32H7_EXTSEL_MASK,
583 STM32H7_EXTSEL_SHIFT },
584 .res = { STM32H7_ADC_CFGR, STM32H7_RES_MASK, STM32H7_RES_SHIFT },
585 .difsel = { STM32H7_ADC_DIFSEL, STM32H7_DIFSEL_MASK},
586 .smpr = { STM32H7_ADC_SMPR1, STM32H7_ADC_SMPR2 },
587 .smp_bits = stm32h7_smp_bits,
588 .or_vddcore = { STM32MP1_ADC2_OR, STM32MP1_VDDCOREEN },
589 .ccr_vbat = { STM32H7_ADC_CCR, STM32H7_VBATEN },
590 .ccr_vref = { STM32H7_ADC_CCR, STM32H7_VREFEN },
591 };
592
593 /*
594 * STM32 ADC registers access routines
595 * @adc: stm32 adc instance
596 * @reg: reg offset in adc instance
597 *
598 * Note: All instances share same base, with 0x0, 0x100 or 0x200 offset resp.
599 * for adc1, adc2 and adc3.
600 */
stm32_adc_readl(struct stm32_adc * adc,u32 reg)601 static u32 stm32_adc_readl(struct stm32_adc *adc, u32 reg)
602 {
603 return readl_relaxed(adc->common->base + adc->offset + reg);
604 }
605
606 #define stm32_adc_readl_addr(addr) stm32_adc_readl(adc, addr)
607
608 #define stm32_adc_readl_poll_timeout(reg, val, cond, sleep_us, timeout_us) \
609 readx_poll_timeout(stm32_adc_readl_addr, reg, val, \
610 cond, sleep_us, timeout_us)
611
stm32_adc_readw(struct stm32_adc * adc,u32 reg)612 static u16 stm32_adc_readw(struct stm32_adc *adc, u32 reg)
613 {
614 return readw_relaxed(adc->common->base + adc->offset + reg);
615 }
616
stm32_adc_writel(struct stm32_adc * adc,u32 reg,u32 val)617 static void stm32_adc_writel(struct stm32_adc *adc, u32 reg, u32 val)
618 {
619 writel_relaxed(val, adc->common->base + adc->offset + reg);
620 }
621
stm32_adc_set_bits(struct stm32_adc * adc,u32 reg,u32 bits)622 static void stm32_adc_set_bits(struct stm32_adc *adc, u32 reg, u32 bits)
623 {
624 unsigned long flags;
625
626 spin_lock_irqsave(&adc->lock, flags);
627 stm32_adc_writel(adc, reg, stm32_adc_readl(adc, reg) | bits);
628 spin_unlock_irqrestore(&adc->lock, flags);
629 }
630
stm32_adc_set_bits_common(struct stm32_adc * adc,u32 reg,u32 bits)631 static void stm32_adc_set_bits_common(struct stm32_adc *adc, u32 reg, u32 bits)
632 {
633 spin_lock(&adc->common->lock);
634 writel_relaxed(readl_relaxed(adc->common->base + reg) | bits,
635 adc->common->base + reg);
636 spin_unlock(&adc->common->lock);
637 }
638
stm32_adc_clr_bits(struct stm32_adc * adc,u32 reg,u32 bits)639 static void stm32_adc_clr_bits(struct stm32_adc *adc, u32 reg, u32 bits)
640 {
641 unsigned long flags;
642
643 spin_lock_irqsave(&adc->lock, flags);
644 stm32_adc_writel(adc, reg, stm32_adc_readl(adc, reg) & ~bits);
645 spin_unlock_irqrestore(&adc->lock, flags);
646 }
647
stm32_adc_clr_bits_common(struct stm32_adc * adc,u32 reg,u32 bits)648 static void stm32_adc_clr_bits_common(struct stm32_adc *adc, u32 reg, u32 bits)
649 {
650 spin_lock(&adc->common->lock);
651 writel_relaxed(readl_relaxed(adc->common->base + reg) & ~bits,
652 adc->common->base + reg);
653 spin_unlock(&adc->common->lock);
654 }
655
656 /**
657 * stm32_adc_conv_irq_enable() - Enable end of conversion interrupt
658 * @adc: stm32 adc instance
659 */
stm32_adc_conv_irq_enable(struct stm32_adc * adc)660 static void stm32_adc_conv_irq_enable(struct stm32_adc *adc)
661 {
662 stm32_adc_set_bits(adc, adc->cfg->regs->ier_eoc.reg,
663 adc->cfg->regs->ier_eoc.mask);
664 };
665
666 /**
667 * stm32_adc_conv_irq_disable() - Disable end of conversion interrupt
668 * @adc: stm32 adc instance
669 */
stm32_adc_conv_irq_disable(struct stm32_adc * adc)670 static void stm32_adc_conv_irq_disable(struct stm32_adc *adc)
671 {
672 stm32_adc_clr_bits(adc, adc->cfg->regs->ier_eoc.reg,
673 adc->cfg->regs->ier_eoc.mask);
674 }
675
stm32_adc_ovr_irq_enable(struct stm32_adc * adc)676 static void stm32_adc_ovr_irq_enable(struct stm32_adc *adc)
677 {
678 stm32_adc_set_bits(adc, adc->cfg->regs->ier_ovr.reg,
679 adc->cfg->regs->ier_ovr.mask);
680 }
681
stm32_adc_ovr_irq_disable(struct stm32_adc * adc)682 static void stm32_adc_ovr_irq_disable(struct stm32_adc *adc)
683 {
684 stm32_adc_clr_bits(adc, adc->cfg->regs->ier_ovr.reg,
685 adc->cfg->regs->ier_ovr.mask);
686 }
687
stm32_adc_set_res(struct stm32_adc * adc)688 static void stm32_adc_set_res(struct stm32_adc *adc)
689 {
690 const struct stm32_adc_regs *res = &adc->cfg->regs->res;
691 u32 val;
692
693 val = stm32_adc_readl(adc, res->reg);
694 val = (val & ~res->mask) | (adc->res << res->shift);
695 stm32_adc_writel(adc, res->reg, val);
696 }
697
stm32_adc_hw_stop(struct device * dev)698 static int stm32_adc_hw_stop(struct device *dev)
699 {
700 struct iio_dev *indio_dev = dev_get_drvdata(dev);
701 struct stm32_adc *adc = iio_priv(indio_dev);
702
703 if (adc->cfg->unprepare)
704 adc->cfg->unprepare(indio_dev);
705
706 clk_disable_unprepare(adc->clk);
707
708 return 0;
709 }
710
stm32_adc_hw_start(struct device * dev)711 static int stm32_adc_hw_start(struct device *dev)
712 {
713 struct iio_dev *indio_dev = dev_get_drvdata(dev);
714 struct stm32_adc *adc = iio_priv(indio_dev);
715 int ret;
716
717 ret = clk_prepare_enable(adc->clk);
718 if (ret)
719 return ret;
720
721 stm32_adc_set_res(adc);
722
723 if (adc->cfg->prepare) {
724 ret = adc->cfg->prepare(indio_dev);
725 if (ret)
726 goto err_clk_dis;
727 }
728
729 return 0;
730
731 err_clk_dis:
732 clk_disable_unprepare(adc->clk);
733
734 return ret;
735 }
736
stm32_adc_int_ch_enable(struct iio_dev * indio_dev)737 static void stm32_adc_int_ch_enable(struct iio_dev *indio_dev)
738 {
739 struct stm32_adc *adc = iio_priv(indio_dev);
740 u32 i;
741
742 for (i = 0; i < STM32_ADC_INT_CH_NB; i++) {
743 if (adc->int_ch[i] == STM32_ADC_INT_CH_NONE)
744 continue;
745
746 switch (i) {
747 case STM32_ADC_INT_CH_VDDCORE:
748 dev_dbg(&indio_dev->dev, "Enable VDDCore\n");
749 stm32_adc_set_bits(adc, adc->cfg->regs->or_vddcore.reg,
750 adc->cfg->regs->or_vddcore.mask);
751 break;
752 case STM32_ADC_INT_CH_VDDCPU:
753 dev_dbg(&indio_dev->dev, "Enable VDDCPU\n");
754 stm32_adc_set_bits(adc, adc->cfg->regs->or_vddcpu.reg,
755 adc->cfg->regs->or_vddcpu.mask);
756 break;
757 case STM32_ADC_INT_CH_VDDQ_DDR:
758 dev_dbg(&indio_dev->dev, "Enable VDDQ_DDR\n");
759 stm32_adc_set_bits(adc, adc->cfg->regs->or_vddq_ddr.reg,
760 adc->cfg->regs->or_vddq_ddr.mask);
761 break;
762 case STM32_ADC_INT_CH_VREFINT:
763 dev_dbg(&indio_dev->dev, "Enable VREFInt\n");
764 stm32_adc_set_bits_common(adc, adc->cfg->regs->ccr_vref.reg,
765 adc->cfg->regs->ccr_vref.mask);
766 break;
767 case STM32_ADC_INT_CH_VBAT:
768 dev_dbg(&indio_dev->dev, "Enable VBAT\n");
769 stm32_adc_set_bits_common(adc, adc->cfg->regs->ccr_vbat.reg,
770 adc->cfg->regs->ccr_vbat.mask);
771 break;
772 }
773 }
774 }
775
stm32_adc_int_ch_disable(struct stm32_adc * adc)776 static void stm32_adc_int_ch_disable(struct stm32_adc *adc)
777 {
778 u32 i;
779
780 for (i = 0; i < STM32_ADC_INT_CH_NB; i++) {
781 if (adc->int_ch[i] == STM32_ADC_INT_CH_NONE)
782 continue;
783
784 switch (i) {
785 case STM32_ADC_INT_CH_VDDCORE:
786 stm32_adc_clr_bits(adc, adc->cfg->regs->or_vddcore.reg,
787 adc->cfg->regs->or_vddcore.mask);
788 break;
789 case STM32_ADC_INT_CH_VDDCPU:
790 stm32_adc_clr_bits(adc, adc->cfg->regs->or_vddcpu.reg,
791 adc->cfg->regs->or_vddcpu.mask);
792 break;
793 case STM32_ADC_INT_CH_VDDQ_DDR:
794 stm32_adc_clr_bits(adc, adc->cfg->regs->or_vddq_ddr.reg,
795 adc->cfg->regs->or_vddq_ddr.mask);
796 break;
797 case STM32_ADC_INT_CH_VREFINT:
798 stm32_adc_clr_bits_common(adc, adc->cfg->regs->ccr_vref.reg,
799 adc->cfg->regs->ccr_vref.mask);
800 break;
801 case STM32_ADC_INT_CH_VBAT:
802 stm32_adc_clr_bits_common(adc, adc->cfg->regs->ccr_vbat.reg,
803 adc->cfg->regs->ccr_vbat.mask);
804 break;
805 }
806 }
807 }
808
809 /**
810 * stm32f4_adc_start_conv() - Start conversions for regular channels.
811 * @indio_dev: IIO device instance
812 * @dma: use dma to transfer conversion result
813 *
814 * Start conversions for regular channels.
815 * Also take care of normal or DMA mode. Circular DMA may be used for regular
816 * conversions, in IIO buffer modes. Otherwise, use ADC interrupt with direct
817 * DR read instead (e.g. read_raw, or triggered buffer mode without DMA).
818 */
stm32f4_adc_start_conv(struct iio_dev * indio_dev,bool dma)819 static void stm32f4_adc_start_conv(struct iio_dev *indio_dev, bool dma)
820 {
821 struct stm32_adc *adc = iio_priv(indio_dev);
822
823 stm32_adc_set_bits(adc, STM32F4_ADC_CR1, STM32F4_SCAN);
824
825 if (dma)
826 stm32_adc_set_bits(adc, STM32F4_ADC_CR2,
827 STM32F4_DMA | STM32F4_DDS);
828
829 stm32_adc_set_bits(adc, STM32F4_ADC_CR2, STM32F4_EOCS | STM32F4_ADON);
830
831 /* Wait for Power-up time (tSTAB from datasheet) */
832 usleep_range(2, 3);
833
834 /* Software start ? (e.g. trigger detection disabled ?) */
835 if (!(stm32_adc_readl(adc, STM32F4_ADC_CR2) & STM32F4_EXTEN_MASK))
836 stm32_adc_set_bits(adc, STM32F4_ADC_CR2, STM32F4_SWSTART);
837 }
838
stm32f4_adc_stop_conv(struct iio_dev * indio_dev)839 static void stm32f4_adc_stop_conv(struct iio_dev *indio_dev)
840 {
841 struct stm32_adc *adc = iio_priv(indio_dev);
842
843 stm32_adc_clr_bits(adc, STM32F4_ADC_CR2, STM32F4_EXTEN_MASK);
844 stm32_adc_clr_bits(adc, STM32F4_ADC_SR, STM32F4_STRT);
845
846 stm32_adc_clr_bits(adc, STM32F4_ADC_CR1, STM32F4_SCAN);
847 stm32_adc_clr_bits(adc, STM32F4_ADC_CR2,
848 STM32F4_ADON | STM32F4_DMA | STM32F4_DDS);
849 }
850
stm32f4_adc_irq_clear(struct iio_dev * indio_dev,u32 msk)851 static void stm32f4_adc_irq_clear(struct iio_dev *indio_dev, u32 msk)
852 {
853 struct stm32_adc *adc = iio_priv(indio_dev);
854
855 stm32_adc_clr_bits(adc, adc->cfg->regs->isr_eoc.reg, msk);
856 }
857
stm32h7_adc_start_conv(struct iio_dev * indio_dev,bool dma)858 static void stm32h7_adc_start_conv(struct iio_dev *indio_dev, bool dma)
859 {
860 struct stm32_adc *adc = iio_priv(indio_dev);
861 enum stm32h7_adc_dmngt dmngt;
862 unsigned long flags;
863 u32 val;
864
865 if (dma)
866 dmngt = STM32H7_DMNGT_DMA_CIRC;
867 else
868 dmngt = STM32H7_DMNGT_DR_ONLY;
869
870 spin_lock_irqsave(&adc->lock, flags);
871 val = stm32_adc_readl(adc, STM32H7_ADC_CFGR);
872 val = (val & ~STM32H7_DMNGT_MASK) | (dmngt << STM32H7_DMNGT_SHIFT);
873 stm32_adc_writel(adc, STM32H7_ADC_CFGR, val);
874 spin_unlock_irqrestore(&adc->lock, flags);
875
876 stm32_adc_set_bits(adc, STM32H7_ADC_CR, STM32H7_ADSTART);
877 }
878
stm32h7_adc_stop_conv(struct iio_dev * indio_dev)879 static void stm32h7_adc_stop_conv(struct iio_dev *indio_dev)
880 {
881 struct stm32_adc *adc = iio_priv(indio_dev);
882 int ret;
883 u32 val;
884
885 stm32_adc_set_bits(adc, STM32H7_ADC_CR, STM32H7_ADSTP);
886
887 ret = stm32_adc_readl_poll_timeout(STM32H7_ADC_CR, val,
888 !(val & (STM32H7_ADSTART)),
889 100, STM32_ADC_TIMEOUT_US);
890 if (ret)
891 dev_warn(&indio_dev->dev, "stop failed\n");
892
893 /* STM32H7_DMNGT_MASK covers STM32MP13_DMAEN & STM32MP13_DMACFG */
894 stm32_adc_clr_bits(adc, STM32H7_ADC_CFGR, STM32H7_DMNGT_MASK);
895 }
896
stm32h7_adc_irq_clear(struct iio_dev * indio_dev,u32 msk)897 static void stm32h7_adc_irq_clear(struct iio_dev *indio_dev, u32 msk)
898 {
899 struct stm32_adc *adc = iio_priv(indio_dev);
900 /* On STM32H7 IRQs are cleared by writing 1 into ISR register */
901 stm32_adc_set_bits(adc, adc->cfg->regs->isr_eoc.reg, msk);
902 }
903
stm32mp13_adc_start_conv(struct iio_dev * indio_dev,bool dma)904 static void stm32mp13_adc_start_conv(struct iio_dev *indio_dev, bool dma)
905 {
906 struct stm32_adc *adc = iio_priv(indio_dev);
907
908 if (dma)
909 stm32_adc_set_bits(adc, STM32H7_ADC_CFGR,
910 STM32MP13_DMAEN | STM32MP13_DMACFG);
911
912 stm32_adc_set_bits(adc, STM32H7_ADC_CR, STM32H7_ADSTART);
913 }
914
stm32h7_adc_set_ovs(struct iio_dev * indio_dev,u32 ovs_idx)915 static void stm32h7_adc_set_ovs(struct iio_dev *indio_dev, u32 ovs_idx)
916 {
917 struct stm32_adc *adc = iio_priv(indio_dev);
918 u32 ovsr_bits, bits, msk;
919
920 msk = STM32H7_ROVSE | STM32H7_OVSR_MASK | STM32H7_OVSS_MASK;
921 stm32_adc_clr_bits(adc, STM32H7_ADC_CFGR2, msk);
922
923 if (!ovs_idx)
924 return;
925
926 /*
927 * Only the oversampling ratios corresponding to 2^ovs_idx are exposed in sysfs.
928 * Oversampling ratios [2,3,...,1024] are mapped on OVSR register values [1,2,...,1023].
929 * OVSR = 2^ovs_idx - 1
930 * These ratio increase the resolution by ovs_idx bits. Apply a right shift to keep initial
931 * resolution given by "assigned-resolution-bits" property.
932 * OVSS = ovs_idx
933 */
934 ovsr_bits = GENMASK(ovs_idx - 1, 0);
935 bits = STM32H7_ROVSE | STM32H7_OVSS(ovs_idx) | STM32H7_OVSR(ovsr_bits);
936
937 stm32_adc_set_bits(adc, STM32H7_ADC_CFGR2, bits & msk);
938 }
939
stm32mp13_adc_set_ovs(struct iio_dev * indio_dev,u32 ovs_idx)940 static void stm32mp13_adc_set_ovs(struct iio_dev *indio_dev, u32 ovs_idx)
941 {
942 struct stm32_adc *adc = iio_priv(indio_dev);
943 u32 bits, msk;
944
945 msk = STM32H7_ROVSE | STM32MP13_OVSR_MASK | STM32MP13_OVSS_MASK;
946 stm32_adc_clr_bits(adc, STM32H7_ADC_CFGR2, msk);
947
948 if (!ovs_idx)
949 return;
950
951 /*
952 * The oversampling ratios [2,4,8,..,256] are mapped on OVSR register values [0,1,...,7].
953 * OVSR = ovs_idx - 1
954 * These ratio increase the resolution by ovs_idx bits. Apply a right shift to keep initial
955 * resolution given by "assigned-resolution-bits" property.
956 * OVSS = ovs_idx
957 */
958 bits = STM32H7_ROVSE | STM32MP13_OVSS(ovs_idx);
959 if (ovs_idx - 1)
960 bits |= STM32MP13_OVSR(ovs_idx - 1);
961
962 stm32_adc_set_bits(adc, STM32H7_ADC_CFGR2, bits & msk);
963 }
964
stm32h7_adc_exit_pwr_down(struct iio_dev * indio_dev)965 static int stm32h7_adc_exit_pwr_down(struct iio_dev *indio_dev)
966 {
967 struct stm32_adc *adc = iio_priv(indio_dev);
968 int ret;
969 u32 val;
970
971 /* Exit deep power down, then enable ADC voltage regulator */
972 stm32_adc_clr_bits(adc, STM32H7_ADC_CR, STM32H7_DEEPPWD);
973 stm32_adc_set_bits(adc, STM32H7_ADC_CR, STM32H7_ADVREGEN);
974
975 if (adc->cfg->has_boostmode &&
976 adc->common->rate > STM32H7_BOOST_CLKRATE)
977 stm32_adc_set_bits(adc, STM32H7_ADC_CR, STM32H7_BOOST);
978
979 /* Wait for startup time */
980 if (!adc->cfg->has_vregready) {
981 usleep_range(10, 20);
982 return 0;
983 }
984
985 ret = stm32_adc_readl_poll_timeout(STM32H7_ADC_ISR, val,
986 val & STM32MP1_VREGREADY, 100,
987 STM32_ADC_TIMEOUT_US);
988 if (ret) {
989 stm32_adc_set_bits(adc, STM32H7_ADC_CR, STM32H7_DEEPPWD);
990 dev_err(&indio_dev->dev, "Failed to exit power down\n");
991 }
992
993 return ret;
994 }
995
stm32h7_adc_enter_pwr_down(struct stm32_adc * adc)996 static void stm32h7_adc_enter_pwr_down(struct stm32_adc *adc)
997 {
998 if (adc->cfg->has_boostmode)
999 stm32_adc_clr_bits(adc, STM32H7_ADC_CR, STM32H7_BOOST);
1000
1001 /* Setting DEEPPWD disables ADC vreg and clears ADVREGEN */
1002 stm32_adc_set_bits(adc, STM32H7_ADC_CR, STM32H7_DEEPPWD);
1003 }
1004
stm32h7_adc_enable(struct iio_dev * indio_dev)1005 static int stm32h7_adc_enable(struct iio_dev *indio_dev)
1006 {
1007 struct stm32_adc *adc = iio_priv(indio_dev);
1008 int ret;
1009 u32 val;
1010
1011 stm32_adc_set_bits(adc, STM32H7_ADC_CR, STM32H7_ADEN);
1012
1013 /* Poll for ADRDY to be set (after adc startup time) */
1014 ret = stm32_adc_readl_poll_timeout(STM32H7_ADC_ISR, val,
1015 val & STM32H7_ADRDY,
1016 100, STM32_ADC_TIMEOUT_US);
1017 if (ret) {
1018 stm32_adc_set_bits(adc, STM32H7_ADC_CR, STM32H7_ADDIS);
1019 dev_err(&indio_dev->dev, "Failed to enable ADC\n");
1020 } else {
1021 /* Clear ADRDY by writing one */
1022 stm32_adc_set_bits(adc, STM32H7_ADC_ISR, STM32H7_ADRDY);
1023 }
1024
1025 return ret;
1026 }
1027
stm32h7_adc_disable(struct iio_dev * indio_dev)1028 static void stm32h7_adc_disable(struct iio_dev *indio_dev)
1029 {
1030 struct stm32_adc *adc = iio_priv(indio_dev);
1031 int ret;
1032 u32 val;
1033
1034 if (!(stm32_adc_readl(adc, STM32H7_ADC_CR) & STM32H7_ADEN))
1035 return;
1036
1037 /* Disable ADC and wait until it's effectively disabled */
1038 stm32_adc_set_bits(adc, STM32H7_ADC_CR, STM32H7_ADDIS);
1039 ret = stm32_adc_readl_poll_timeout(STM32H7_ADC_CR, val,
1040 !(val & STM32H7_ADEN), 100,
1041 STM32_ADC_TIMEOUT_US);
1042 if (ret)
1043 dev_warn(&indio_dev->dev, "Failed to disable\n");
1044 }
1045
1046 /**
1047 * stm32h7_adc_read_selfcalib() - read calibration shadow regs, save result
1048 * @indio_dev: IIO device instance
1049 * Note: Must be called once ADC is enabled, so LINCALRDYW[1..6] are writable
1050 */
stm32h7_adc_read_selfcalib(struct iio_dev * indio_dev)1051 static int stm32h7_adc_read_selfcalib(struct iio_dev *indio_dev)
1052 {
1053 struct stm32_adc *adc = iio_priv(indio_dev);
1054 int i, ret;
1055 u32 lincalrdyw_mask, val;
1056
1057 /* Read linearity calibration */
1058 lincalrdyw_mask = STM32H7_LINCALRDYW6;
1059 for (i = STM32H7_LINCALFACT_NUM - 1; i >= 0; i--) {
1060 /* Clear STM32H7_LINCALRDYW[6..1]: transfer calib to CALFACT2 */
1061 stm32_adc_clr_bits(adc, STM32H7_ADC_CR, lincalrdyw_mask);
1062
1063 /* Poll: wait calib data to be ready in CALFACT2 register */
1064 ret = stm32_adc_readl_poll_timeout(STM32H7_ADC_CR, val,
1065 !(val & lincalrdyw_mask),
1066 100, STM32_ADC_TIMEOUT_US);
1067 if (ret) {
1068 dev_err(&indio_dev->dev, "Failed to read calfact\n");
1069 return ret;
1070 }
1071
1072 val = stm32_adc_readl(adc, STM32H7_ADC_CALFACT2);
1073 adc->cal.lincalfact[i] = (val & STM32H7_LINCALFACT_MASK);
1074 adc->cal.lincalfact[i] >>= STM32H7_LINCALFACT_SHIFT;
1075
1076 lincalrdyw_mask >>= 1;
1077 }
1078 adc->cal.lincal_saved = true;
1079
1080 return 0;
1081 }
1082
1083 /**
1084 * stm32h7_adc_restore_selfcalib() - Restore saved self-calibration result
1085 * @indio_dev: IIO device instance
1086 * Note: ADC must be enabled, with no on-going conversions.
1087 */
stm32h7_adc_restore_selfcalib(struct iio_dev * indio_dev)1088 static int stm32h7_adc_restore_selfcalib(struct iio_dev *indio_dev)
1089 {
1090 struct stm32_adc *adc = iio_priv(indio_dev);
1091 int i, ret;
1092 u32 lincalrdyw_mask, val;
1093
1094 lincalrdyw_mask = STM32H7_LINCALRDYW6;
1095 for (i = STM32H7_LINCALFACT_NUM - 1; i >= 0; i--) {
1096 /*
1097 * Write saved calibration data to shadow registers:
1098 * Write CALFACT2, and set LINCALRDYW[6..1] bit to trigger
1099 * data write. Then poll to wait for complete transfer.
1100 */
1101 val = adc->cal.lincalfact[i] << STM32H7_LINCALFACT_SHIFT;
1102 stm32_adc_writel(adc, STM32H7_ADC_CALFACT2, val);
1103 stm32_adc_set_bits(adc, STM32H7_ADC_CR, lincalrdyw_mask);
1104 ret = stm32_adc_readl_poll_timeout(STM32H7_ADC_CR, val,
1105 val & lincalrdyw_mask,
1106 100, STM32_ADC_TIMEOUT_US);
1107 if (ret) {
1108 dev_err(&indio_dev->dev, "Failed to write calfact\n");
1109 return ret;
1110 }
1111
1112 /*
1113 * Read back calibration data, has two effects:
1114 * - It ensures bits LINCALRDYW[6..1] are kept cleared
1115 * for next time calibration needs to be restored.
1116 * - BTW, bit clear triggers a read, then check data has been
1117 * correctly written.
1118 */
1119 stm32_adc_clr_bits(adc, STM32H7_ADC_CR, lincalrdyw_mask);
1120 ret = stm32_adc_readl_poll_timeout(STM32H7_ADC_CR, val,
1121 !(val & lincalrdyw_mask),
1122 100, STM32_ADC_TIMEOUT_US);
1123 if (ret) {
1124 dev_err(&indio_dev->dev, "Failed to read calfact\n");
1125 return ret;
1126 }
1127 val = stm32_adc_readl(adc, STM32H7_ADC_CALFACT2);
1128 if (val != adc->cal.lincalfact[i] << STM32H7_LINCALFACT_SHIFT) {
1129 dev_err(&indio_dev->dev, "calfact not consistent\n");
1130 return -EIO;
1131 }
1132
1133 lincalrdyw_mask >>= 1;
1134 }
1135
1136 return 0;
1137 }
1138
1139 /*
1140 * Fixed timeout value for ADC calibration.
1141 * worst cases:
1142 * - low clock frequency
1143 * - maximum prescalers
1144 * Calibration requires:
1145 * - 131,072 ADC clock cycle for the linear calibration
1146 * - 20 ADC clock cycle for the offset calibration
1147 *
1148 * Set to 100ms for now
1149 */
1150 #define STM32H7_ADC_CALIB_TIMEOUT_US 100000
1151
1152 /**
1153 * stm32h7_adc_selfcalib() - Procedure to calibrate ADC
1154 * @indio_dev: IIO device instance
1155 * @do_lincal: linear calibration request flag
1156 * Note: Must be called once ADC is out of power down.
1157 *
1158 * Run offset calibration unconditionally.
1159 * Run linear calibration if requested & supported.
1160 */
stm32h7_adc_selfcalib(struct iio_dev * indio_dev,int do_lincal)1161 static int stm32h7_adc_selfcalib(struct iio_dev *indio_dev, int do_lincal)
1162 {
1163 struct stm32_adc *adc = iio_priv(indio_dev);
1164 int ret;
1165 u32 msk = STM32H7_ADCALDIF;
1166 u32 val;
1167
1168 if (adc->cfg->has_linearcal && do_lincal)
1169 msk |= STM32H7_ADCALLIN;
1170 /* ADC must be disabled for calibration */
1171 stm32h7_adc_disable(indio_dev);
1172
1173 /*
1174 * Select calibration mode:
1175 * - Offset calibration for single ended inputs
1176 * - No linearity calibration (do it later, before reading it)
1177 */
1178 stm32_adc_clr_bits(adc, STM32H7_ADC_CR, msk);
1179
1180 /* Start calibration, then wait for completion */
1181 stm32_adc_set_bits(adc, STM32H7_ADC_CR, STM32H7_ADCAL);
1182 ret = stm32_adc_readl_poll_timeout(STM32H7_ADC_CR, val,
1183 !(val & STM32H7_ADCAL), 100,
1184 STM32H7_ADC_CALIB_TIMEOUT_US);
1185 if (ret) {
1186 dev_err(&indio_dev->dev, "calibration (single-ended) error %d\n", ret);
1187 goto out;
1188 }
1189
1190 /*
1191 * Select calibration mode, then start calibration:
1192 * - Offset calibration for differential input
1193 * - Linearity calibration (needs to be done only once for single/diff)
1194 * will run simultaneously with offset calibration.
1195 */
1196 stm32_adc_set_bits(adc, STM32H7_ADC_CR, msk);
1197 stm32_adc_set_bits(adc, STM32H7_ADC_CR, STM32H7_ADCAL);
1198 ret = stm32_adc_readl_poll_timeout(STM32H7_ADC_CR, val,
1199 !(val & STM32H7_ADCAL), 100,
1200 STM32H7_ADC_CALIB_TIMEOUT_US);
1201 if (ret) {
1202 dev_err(&indio_dev->dev, "calibration (diff%s) error %d\n",
1203 (msk & STM32H7_ADCALLIN) ? "+linear" : "", ret);
1204 goto out;
1205 }
1206
1207 out:
1208 stm32_adc_clr_bits(adc, STM32H7_ADC_CR, msk);
1209
1210 return ret;
1211 }
1212
1213 /**
1214 * stm32h7_adc_check_selfcalib() - Check linear calibration status
1215 * @indio_dev: IIO device instance
1216 *
1217 * Used to check if linear calibration has been done.
1218 * Return true if linear calibration factors are already saved in private data
1219 * or if a linear calibration has been done at boot stage.
1220 */
stm32h7_adc_check_selfcalib(struct iio_dev * indio_dev)1221 static int stm32h7_adc_check_selfcalib(struct iio_dev *indio_dev)
1222 {
1223 struct stm32_adc *adc = iio_priv(indio_dev);
1224 u32 val;
1225
1226 if (adc->cal.lincal_saved)
1227 return true;
1228
1229 /*
1230 * Check if linear calibration factors are available in ADC registers,
1231 * by checking that all LINCALRDYWx bits are set.
1232 */
1233 val = stm32_adc_readl(adc, STM32H7_ADC_CR) & STM32H7_LINCALRDYW_MASK;
1234 if (val == STM32H7_LINCALRDYW_MASK)
1235 return true;
1236
1237 return false;
1238 }
1239
1240 /**
1241 * stm32h7_adc_prepare() - Leave power down mode to enable ADC.
1242 * @indio_dev: IIO device instance
1243 * Leave power down mode.
1244 * Configure channels as single ended or differential before enabling ADC.
1245 * Enable ADC.
1246 * Restore calibration data.
1247 * Pre-select channels that may be used in PCSEL (required by input MUX / IO):
1248 * - Only one input is selected for single ended (e.g. 'vinp')
1249 * - Two inputs are selected for differential channels (e.g. 'vinp' & 'vinn')
1250 */
stm32h7_adc_prepare(struct iio_dev * indio_dev)1251 static int stm32h7_adc_prepare(struct iio_dev *indio_dev)
1252 {
1253 struct stm32_adc *adc = iio_priv(indio_dev);
1254 int lincal_done = false;
1255 int ret;
1256
1257 ret = stm32h7_adc_exit_pwr_down(indio_dev);
1258 if (ret)
1259 return ret;
1260
1261 if (adc->cfg->has_linearcal)
1262 lincal_done = stm32h7_adc_check_selfcalib(indio_dev);
1263
1264 /* Always run offset calibration. Run linear calibration only once */
1265 ret = stm32h7_adc_selfcalib(indio_dev, !lincal_done);
1266 if (ret < 0)
1267 goto pwr_dwn;
1268
1269 stm32_adc_int_ch_enable(indio_dev);
1270
1271 stm32_adc_writel(adc, adc->cfg->regs->difsel.reg, adc->difsel);
1272
1273 ret = stm32h7_adc_enable(indio_dev);
1274 if (ret)
1275 goto ch_disable;
1276
1277 if (adc->cfg->has_linearcal) {
1278 if (!adc->cal.lincal_saved)
1279 ret = stm32h7_adc_read_selfcalib(indio_dev);
1280 else
1281 ret = stm32h7_adc_restore_selfcalib(indio_dev);
1282
1283 if (ret)
1284 goto disable;
1285 }
1286
1287 if (adc->cfg->has_presel)
1288 stm32_adc_writel(adc, STM32H7_ADC_PCSEL, adc->pcsel);
1289
1290 return 0;
1291
1292 disable:
1293 stm32h7_adc_disable(indio_dev);
1294 ch_disable:
1295 stm32_adc_int_ch_disable(adc);
1296 pwr_dwn:
1297 stm32h7_adc_enter_pwr_down(adc);
1298
1299 return ret;
1300 }
1301
stm32h7_adc_unprepare(struct iio_dev * indio_dev)1302 static void stm32h7_adc_unprepare(struct iio_dev *indio_dev)
1303 {
1304 struct stm32_adc *adc = iio_priv(indio_dev);
1305
1306 if (adc->cfg->has_presel)
1307 stm32_adc_writel(adc, STM32H7_ADC_PCSEL, 0);
1308 stm32h7_adc_disable(indio_dev);
1309 stm32_adc_int_ch_disable(adc);
1310 stm32h7_adc_enter_pwr_down(adc);
1311 }
1312
1313 /**
1314 * stm32_adc_conf_scan_seq() - Build regular channels scan sequence
1315 * @indio_dev: IIO device
1316 * @scan_mask: channels to be converted
1317 *
1318 * Conversion sequence :
1319 * Apply sampling time settings for all channels.
1320 * Configure ADC scan sequence based on selected channels in scan_mask.
1321 * Add channels to SQR registers, from scan_mask LSB to MSB, then
1322 * program sequence len.
1323 */
stm32_adc_conf_scan_seq(struct iio_dev * indio_dev,const unsigned long * scan_mask)1324 static int stm32_adc_conf_scan_seq(struct iio_dev *indio_dev,
1325 const unsigned long *scan_mask)
1326 {
1327 struct stm32_adc *adc = iio_priv(indio_dev);
1328 const struct stm32_adc_regs *sqr = adc->cfg->regs->sqr;
1329 const struct iio_chan_spec *chan;
1330 u32 val, bit;
1331 int i = 0;
1332
1333 /* Apply sampling time settings */
1334 stm32_adc_writel(adc, adc->cfg->regs->smpr[0], adc->smpr_val[0]);
1335 stm32_adc_writel(adc, adc->cfg->regs->smpr[1], adc->smpr_val[1]);
1336
1337 for_each_set_bit(bit, scan_mask, iio_get_masklength(indio_dev)) {
1338 chan = indio_dev->channels + bit;
1339 /*
1340 * Assign one channel per SQ entry in regular
1341 * sequence, starting with SQ1.
1342 */
1343 i++;
1344 if (i > STM32_ADC_MAX_SQ)
1345 return -EINVAL;
1346
1347 dev_dbg(&indio_dev->dev, "%s chan %d to SQ%d\n",
1348 __func__, chan->channel, i);
1349
1350 val = stm32_adc_readl(adc, sqr[i].reg);
1351 val &= ~sqr[i].mask;
1352 val |= chan->channel << sqr[i].shift;
1353 stm32_adc_writel(adc, sqr[i].reg, val);
1354 }
1355
1356 if (!i)
1357 return -EINVAL;
1358
1359 /* Sequence len */
1360 val = stm32_adc_readl(adc, sqr[0].reg);
1361 val &= ~sqr[0].mask;
1362 val |= ((i - 1) << sqr[0].shift);
1363 stm32_adc_writel(adc, sqr[0].reg, val);
1364
1365 return 0;
1366 }
1367
1368 /**
1369 * stm32_adc_get_trig_extsel() - Get external trigger selection
1370 * @indio_dev: IIO device structure
1371 * @trig: trigger
1372 *
1373 * Returns trigger extsel value, if trig matches, -EINVAL otherwise.
1374 */
stm32_adc_get_trig_extsel(struct iio_dev * indio_dev,struct iio_trigger * trig)1375 static int stm32_adc_get_trig_extsel(struct iio_dev *indio_dev,
1376 struct iio_trigger *trig)
1377 {
1378 struct stm32_adc *adc = iio_priv(indio_dev);
1379 int i;
1380
1381 /* lookup triggers registered by stm32 timer trigger driver */
1382 for (i = 0; adc->cfg->trigs[i].name; i++) {
1383 /**
1384 * Checking both stm32 timer trigger type and trig name
1385 * should be safe against arbitrary trigger names.
1386 */
1387 if ((is_stm32_timer_trigger(trig) ||
1388 is_stm32_lptim_trigger(trig)) &&
1389 !strcmp(adc->cfg->trigs[i].name, trig->name)) {
1390 return adc->cfg->trigs[i].extsel;
1391 }
1392 }
1393
1394 return -EINVAL;
1395 }
1396
1397 /**
1398 * stm32_adc_set_trig() - Set a regular trigger
1399 * @indio_dev: IIO device
1400 * @trig: IIO trigger
1401 *
1402 * Set trigger source/polarity (e.g. SW, or HW with polarity) :
1403 * - if HW trigger disabled (e.g. trig == NULL, conversion launched by sw)
1404 * - if HW trigger enabled, set source & polarity
1405 */
stm32_adc_set_trig(struct iio_dev * indio_dev,struct iio_trigger * trig)1406 static int stm32_adc_set_trig(struct iio_dev *indio_dev,
1407 struct iio_trigger *trig)
1408 {
1409 struct stm32_adc *adc = iio_priv(indio_dev);
1410 u32 val, extsel = 0, exten = STM32_EXTEN_SWTRIG;
1411 unsigned long flags;
1412 int ret;
1413
1414 if (trig) {
1415 ret = stm32_adc_get_trig_extsel(indio_dev, trig);
1416 if (ret < 0)
1417 return ret;
1418
1419 /* set trigger source and polarity (default to rising edge) */
1420 extsel = ret;
1421 exten = adc->trigger_polarity + STM32_EXTEN_HWTRIG_RISING_EDGE;
1422 }
1423
1424 spin_lock_irqsave(&adc->lock, flags);
1425 val = stm32_adc_readl(adc, adc->cfg->regs->exten.reg);
1426 val &= ~(adc->cfg->regs->exten.mask | adc->cfg->regs->extsel.mask);
1427 val |= exten << adc->cfg->regs->exten.shift;
1428 val |= extsel << adc->cfg->regs->extsel.shift;
1429 stm32_adc_writel(adc, adc->cfg->regs->exten.reg, val);
1430 spin_unlock_irqrestore(&adc->lock, flags);
1431
1432 return 0;
1433 }
1434
stm32_adc_set_trig_pol(struct iio_dev * indio_dev,const struct iio_chan_spec * chan,unsigned int type)1435 static int stm32_adc_set_trig_pol(struct iio_dev *indio_dev,
1436 const struct iio_chan_spec *chan,
1437 unsigned int type)
1438 {
1439 struct stm32_adc *adc = iio_priv(indio_dev);
1440
1441 adc->trigger_polarity = type;
1442
1443 return 0;
1444 }
1445
stm32_adc_get_trig_pol(struct iio_dev * indio_dev,const struct iio_chan_spec * chan)1446 static int stm32_adc_get_trig_pol(struct iio_dev *indio_dev,
1447 const struct iio_chan_spec *chan)
1448 {
1449 struct stm32_adc *adc = iio_priv(indio_dev);
1450
1451 return adc->trigger_polarity;
1452 }
1453
1454 static const char * const stm32_trig_pol_items[] = {
1455 "rising-edge", "falling-edge", "both-edges",
1456 };
1457
1458 static const struct iio_enum stm32_adc_trig_pol = {
1459 .items = stm32_trig_pol_items,
1460 .num_items = ARRAY_SIZE(stm32_trig_pol_items),
1461 .get = stm32_adc_get_trig_pol,
1462 .set = stm32_adc_set_trig_pol,
1463 };
1464
1465 /**
1466 * stm32_adc_single_conv() - Performs a single conversion
1467 * @indio_dev: IIO device
1468 * @chan: IIO channel
1469 * @res: conversion result
1470 *
1471 * The function performs a single conversion on a given channel:
1472 * - Apply sampling time settings
1473 * - Program sequencer with one channel (e.g. in SQ1 with len = 1)
1474 * - Use SW trigger
1475 * - Start conversion, then wait for interrupt completion.
1476 */
stm32_adc_single_conv(struct iio_dev * indio_dev,const struct iio_chan_spec * chan,int * res)1477 static int stm32_adc_single_conv(struct iio_dev *indio_dev,
1478 const struct iio_chan_spec *chan,
1479 int *res)
1480 {
1481 struct stm32_adc *adc = iio_priv(indio_dev);
1482 struct device *dev = indio_dev->dev.parent;
1483 const struct stm32_adc_regspec *regs = adc->cfg->regs;
1484 long time_left;
1485 u32 val;
1486 int ret;
1487
1488 reinit_completion(&adc->completion);
1489
1490 adc->bufi = 0;
1491
1492 ret = pm_runtime_resume_and_get(dev);
1493 if (ret < 0)
1494 return ret;
1495
1496 /* Apply sampling time settings */
1497 stm32_adc_writel(adc, regs->smpr[0], adc->smpr_val[0]);
1498 stm32_adc_writel(adc, regs->smpr[1], adc->smpr_val[1]);
1499
1500 /* Program chan number in regular sequence (SQ1) */
1501 val = stm32_adc_readl(adc, regs->sqr[1].reg);
1502 val &= ~regs->sqr[1].mask;
1503 val |= chan->channel << regs->sqr[1].shift;
1504 stm32_adc_writel(adc, regs->sqr[1].reg, val);
1505
1506 /* Set regular sequence len (0 for 1 conversion) */
1507 stm32_adc_clr_bits(adc, regs->sqr[0].reg, regs->sqr[0].mask);
1508
1509 /* Trigger detection disabled (conversion can be launched in SW) */
1510 stm32_adc_clr_bits(adc, regs->exten.reg, regs->exten.mask);
1511
1512 stm32_adc_conv_irq_enable(adc);
1513
1514 adc->cfg->start_conv(indio_dev, false);
1515
1516 time_left = wait_for_completion_interruptible_timeout(
1517 &adc->completion, STM32_ADC_TIMEOUT);
1518 if (time_left == 0) {
1519 ret = -ETIMEDOUT;
1520 } else if (time_left < 0) {
1521 ret = time_left;
1522 } else {
1523 *res = adc->buffer[0];
1524 ret = IIO_VAL_INT;
1525 }
1526
1527 adc->cfg->stop_conv(indio_dev);
1528
1529 stm32_adc_conv_irq_disable(adc);
1530
1531 pm_runtime_mark_last_busy(dev);
1532 pm_runtime_put_autosuspend(dev);
1533
1534 return ret;
1535 }
1536
stm32_adc_write_raw(struct iio_dev * indio_dev,struct iio_chan_spec const * chan,int val,int val2,long mask)1537 static int stm32_adc_write_raw(struct iio_dev *indio_dev,
1538 struct iio_chan_spec const *chan,
1539 int val, int val2, long mask)
1540 {
1541 struct stm32_adc *adc = iio_priv(indio_dev);
1542 struct device *dev = indio_dev->dev.parent;
1543 int nb = adc->cfg->adc_info->num_ovs;
1544 unsigned int idx;
1545 int ret;
1546
1547 switch (mask) {
1548 case IIO_CHAN_INFO_OVERSAMPLING_RATIO:
1549 if (val2)
1550 return -EINVAL;
1551
1552 for (idx = 0; idx < nb; idx++)
1553 if (adc->cfg->adc_info->oversampling[idx] == val)
1554 break;
1555 if (idx >= nb)
1556 return -EINVAL;
1557
1558 if (!iio_device_claim_direct(indio_dev))
1559 return -EBUSY;
1560
1561 ret = pm_runtime_resume_and_get(dev);
1562 if (ret < 0)
1563 goto err;
1564
1565 adc->cfg->set_ovs(indio_dev, idx);
1566
1567 pm_runtime_mark_last_busy(dev);
1568 pm_runtime_put_autosuspend(dev);
1569
1570 adc->ovs_idx = idx;
1571
1572 err:
1573 iio_device_release_direct(indio_dev);
1574
1575 return ret;
1576 default:
1577 return -EINVAL;
1578 }
1579 }
1580
stm32_adc_read_avail(struct iio_dev * indio_dev,struct iio_chan_spec const * chan,const int ** vals,int * type,int * length,long m)1581 static int stm32_adc_read_avail(struct iio_dev *indio_dev,
1582 struct iio_chan_spec const *chan,
1583 const int **vals, int *type, int *length, long m)
1584 {
1585 struct stm32_adc *adc = iio_priv(indio_dev);
1586
1587 switch (m) {
1588 case IIO_CHAN_INFO_OVERSAMPLING_RATIO:
1589 *type = IIO_VAL_INT;
1590 *length = adc->cfg->adc_info->num_ovs;
1591 *vals = adc->cfg->adc_info->oversampling;
1592 return IIO_AVAIL_LIST;
1593 default:
1594 return -EINVAL;
1595 }
1596 }
1597
stm32_adc_read_raw(struct iio_dev * indio_dev,struct iio_chan_spec const * chan,int * val,int * val2,long mask)1598 static int stm32_adc_read_raw(struct iio_dev *indio_dev,
1599 struct iio_chan_spec const *chan,
1600 int *val, int *val2, long mask)
1601 {
1602 struct stm32_adc *adc = iio_priv(indio_dev);
1603 int ret;
1604
1605 switch (mask) {
1606 case IIO_CHAN_INFO_RAW:
1607 case IIO_CHAN_INFO_PROCESSED:
1608 if (!iio_device_claim_direct(indio_dev))
1609 return -EBUSY;
1610 if (chan->type == IIO_VOLTAGE)
1611 ret = stm32_adc_single_conv(indio_dev, chan, val);
1612 else
1613 ret = -EINVAL;
1614
1615 if (mask == IIO_CHAN_INFO_PROCESSED)
1616 *val = STM32_ADC_VREFINT_VOLTAGE * adc->vrefint.vrefint_cal / *val;
1617
1618 iio_device_release_direct(indio_dev);
1619 return ret;
1620
1621 case IIO_CHAN_INFO_SCALE:
1622 if (chan->differential) {
1623 *val = adc->common->vref_mv * 2;
1624 *val2 = chan->scan_type.realbits;
1625 } else {
1626 *val = adc->common->vref_mv;
1627 *val2 = chan->scan_type.realbits;
1628 }
1629 return IIO_VAL_FRACTIONAL_LOG2;
1630
1631 case IIO_CHAN_INFO_OFFSET:
1632 if (chan->differential)
1633 /* ADC_full_scale / 2 */
1634 *val = -((1 << chan->scan_type.realbits) / 2);
1635 else
1636 *val = 0;
1637 return IIO_VAL_INT;
1638
1639 case IIO_CHAN_INFO_OVERSAMPLING_RATIO:
1640 *val = adc->cfg->adc_info->oversampling[adc->ovs_idx];
1641 return IIO_VAL_INT;
1642
1643 default:
1644 return -EINVAL;
1645 }
1646 }
1647
stm32_adc_irq_clear(struct iio_dev * indio_dev,u32 msk)1648 static void stm32_adc_irq_clear(struct iio_dev *indio_dev, u32 msk)
1649 {
1650 struct stm32_adc *adc = iio_priv(indio_dev);
1651
1652 adc->cfg->irq_clear(indio_dev, msk);
1653 }
1654
stm32_adc_threaded_isr(int irq,void * data)1655 static irqreturn_t stm32_adc_threaded_isr(int irq, void *data)
1656 {
1657 struct iio_dev *indio_dev = data;
1658 struct stm32_adc *adc = iio_priv(indio_dev);
1659 const struct stm32_adc_regspec *regs = adc->cfg->regs;
1660 u32 status = stm32_adc_readl(adc, regs->isr_eoc.reg);
1661
1662 /* Check ovr status right now, as ovr mask should be already disabled */
1663 if (status & regs->isr_ovr.mask) {
1664 /*
1665 * Clear ovr bit to avoid subsequent calls to IRQ handler.
1666 * This requires to stop ADC first. OVR bit state in ISR,
1667 * is propaged to CSR register by hardware.
1668 */
1669 adc->cfg->stop_conv(indio_dev);
1670 stm32_adc_irq_clear(indio_dev, regs->isr_ovr.mask);
1671 dev_err(&indio_dev->dev, "Overrun, stopping: restart needed\n");
1672 return IRQ_HANDLED;
1673 }
1674
1675 return IRQ_NONE;
1676 }
1677
stm32_adc_isr(int irq,void * data)1678 static irqreturn_t stm32_adc_isr(int irq, void *data)
1679 {
1680 struct iio_dev *indio_dev = data;
1681 struct stm32_adc *adc = iio_priv(indio_dev);
1682 const struct stm32_adc_regspec *regs = adc->cfg->regs;
1683 u32 status = stm32_adc_readl(adc, regs->isr_eoc.reg);
1684
1685 if (status & regs->isr_ovr.mask) {
1686 /*
1687 * Overrun occurred on regular conversions: data for wrong
1688 * channel may be read. Unconditionally disable interrupts
1689 * to stop processing data and print error message.
1690 * Restarting the capture can be done by disabling, then
1691 * re-enabling it (e.g. write 0, then 1 to buffer/enable).
1692 */
1693 stm32_adc_ovr_irq_disable(adc);
1694 stm32_adc_conv_irq_disable(adc);
1695 return IRQ_WAKE_THREAD;
1696 }
1697
1698 if (status & regs->isr_eoc.mask) {
1699 /* Reading DR also clears EOC status flag */
1700 adc->buffer[adc->bufi] = stm32_adc_readw(adc, regs->dr);
1701 if (iio_buffer_enabled(indio_dev)) {
1702 adc->bufi++;
1703 if (adc->bufi >= adc->num_conv) {
1704 stm32_adc_conv_irq_disable(adc);
1705 iio_trigger_poll(indio_dev->trig);
1706 }
1707 } else {
1708 complete(&adc->completion);
1709 }
1710 return IRQ_HANDLED;
1711 }
1712
1713 return IRQ_NONE;
1714 }
1715
1716 /**
1717 * stm32_adc_validate_trigger() - validate trigger for stm32 adc
1718 * @indio_dev: IIO device
1719 * @trig: new trigger
1720 *
1721 * Returns: 0 if trig matches one of the triggers registered by stm32 adc
1722 * driver, -EINVAL otherwise.
1723 */
stm32_adc_validate_trigger(struct iio_dev * indio_dev,struct iio_trigger * trig)1724 static int stm32_adc_validate_trigger(struct iio_dev *indio_dev,
1725 struct iio_trigger *trig)
1726 {
1727 return stm32_adc_get_trig_extsel(indio_dev, trig) < 0 ? -EINVAL : 0;
1728 }
1729
stm32_adc_set_watermark(struct iio_dev * indio_dev,unsigned int val)1730 static int stm32_adc_set_watermark(struct iio_dev *indio_dev, unsigned int val)
1731 {
1732 struct stm32_adc *adc = iio_priv(indio_dev);
1733 unsigned int watermark = STM32_DMA_BUFFER_SIZE / 2;
1734 unsigned int rx_buf_sz = STM32_DMA_BUFFER_SIZE;
1735
1736 /*
1737 * dma cyclic transfers are used, buffer is split into two periods.
1738 * There should be :
1739 * - always one buffer (period) dma is working on
1740 * - one buffer (period) driver can push data.
1741 */
1742 watermark = min(watermark, val * (unsigned)(sizeof(u16)));
1743 adc->rx_buf_sz = min(rx_buf_sz, watermark * 2 * adc->num_conv);
1744
1745 return 0;
1746 }
1747
stm32_adc_update_scan_mode(struct iio_dev * indio_dev,const unsigned long * scan_mask)1748 static int stm32_adc_update_scan_mode(struct iio_dev *indio_dev,
1749 const unsigned long *scan_mask)
1750 {
1751 struct stm32_adc *adc = iio_priv(indio_dev);
1752 struct device *dev = indio_dev->dev.parent;
1753 int ret;
1754
1755 ret = pm_runtime_resume_and_get(dev);
1756 if (ret < 0)
1757 return ret;
1758
1759 adc->num_conv = bitmap_weight(scan_mask, iio_get_masklength(indio_dev));
1760
1761 ret = stm32_adc_conf_scan_seq(indio_dev, scan_mask);
1762 pm_runtime_mark_last_busy(dev);
1763 pm_runtime_put_autosuspend(dev);
1764
1765 return ret;
1766 }
1767
stm32_adc_fwnode_xlate(struct iio_dev * indio_dev,const struct fwnode_reference_args * iiospec)1768 static int stm32_adc_fwnode_xlate(struct iio_dev *indio_dev,
1769 const struct fwnode_reference_args *iiospec)
1770 {
1771 int i;
1772
1773 for (i = 0; i < indio_dev->num_channels; i++)
1774 if (indio_dev->channels[i].channel == iiospec->args[0])
1775 return i;
1776
1777 return -EINVAL;
1778 }
1779
1780 /**
1781 * stm32_adc_debugfs_reg_access - read or write register value
1782 * @indio_dev: IIO device structure
1783 * @reg: register offset
1784 * @writeval: value to write
1785 * @readval: value to read
1786 *
1787 * To read a value from an ADC register:
1788 * echo [ADC reg offset] > direct_reg_access
1789 * cat direct_reg_access
1790 *
1791 * To write a value in a ADC register:
1792 * echo [ADC_reg_offset] [value] > direct_reg_access
1793 */
stm32_adc_debugfs_reg_access(struct iio_dev * indio_dev,unsigned reg,unsigned writeval,unsigned * readval)1794 static int stm32_adc_debugfs_reg_access(struct iio_dev *indio_dev,
1795 unsigned reg, unsigned writeval,
1796 unsigned *readval)
1797 {
1798 struct stm32_adc *adc = iio_priv(indio_dev);
1799 struct device *dev = indio_dev->dev.parent;
1800 int ret;
1801
1802 ret = pm_runtime_resume_and_get(dev);
1803 if (ret < 0)
1804 return ret;
1805
1806 if (!readval)
1807 stm32_adc_writel(adc, reg, writeval);
1808 else
1809 *readval = stm32_adc_readl(adc, reg);
1810
1811 pm_runtime_mark_last_busy(dev);
1812 pm_runtime_put_autosuspend(dev);
1813
1814 return 0;
1815 }
1816
1817 static const struct iio_info stm32_adc_iio_info = {
1818 .read_raw = stm32_adc_read_raw,
1819 .write_raw = stm32_adc_write_raw,
1820 .read_avail = stm32_adc_read_avail,
1821 .validate_trigger = stm32_adc_validate_trigger,
1822 .hwfifo_set_watermark = stm32_adc_set_watermark,
1823 .update_scan_mode = stm32_adc_update_scan_mode,
1824 .debugfs_reg_access = stm32_adc_debugfs_reg_access,
1825 .fwnode_xlate = stm32_adc_fwnode_xlate,
1826 };
1827
stm32_adc_dma_residue(struct stm32_adc * adc)1828 static unsigned int stm32_adc_dma_residue(struct stm32_adc *adc)
1829 {
1830 struct dma_tx_state state;
1831 enum dma_status status;
1832
1833 status = dmaengine_tx_status(adc->dma_chan,
1834 adc->dma_chan->cookie,
1835 &state);
1836 if (status == DMA_IN_PROGRESS) {
1837 /* Residue is size in bytes from end of buffer */
1838 unsigned int i = adc->rx_buf_sz - state.residue;
1839 unsigned int size;
1840
1841 /* Return available bytes */
1842 if (i >= adc->bufi)
1843 size = i - adc->bufi;
1844 else
1845 size = adc->rx_buf_sz + i - adc->bufi;
1846
1847 return size;
1848 }
1849
1850 return 0;
1851 }
1852
stm32_adc_dma_buffer_done(void * data)1853 static void stm32_adc_dma_buffer_done(void *data)
1854 {
1855 struct iio_dev *indio_dev = data;
1856 struct stm32_adc *adc = iio_priv(indio_dev);
1857 int residue = stm32_adc_dma_residue(adc);
1858
1859 /*
1860 * In DMA mode the trigger services of IIO are not used
1861 * (e.g. no call to iio_trigger_poll).
1862 * Calling irq handler associated to the hardware trigger is not
1863 * relevant as the conversions have already been done. Data
1864 * transfers are performed directly in DMA callback instead.
1865 * This implementation avoids to call trigger irq handler that
1866 * may sleep, in an atomic context (DMA irq handler context).
1867 */
1868 dev_dbg(&indio_dev->dev, "%s bufi=%d\n", __func__, adc->bufi);
1869
1870 while (residue >= indio_dev->scan_bytes) {
1871 u16 *buffer = (u16 *)&adc->rx_buf[adc->bufi];
1872
1873 iio_push_to_buffers(indio_dev, buffer);
1874
1875 residue -= indio_dev->scan_bytes;
1876 adc->bufi += indio_dev->scan_bytes;
1877 if (adc->bufi >= adc->rx_buf_sz)
1878 adc->bufi = 0;
1879 }
1880 }
1881
stm32_adc_dma_start(struct iio_dev * indio_dev)1882 static int stm32_adc_dma_start(struct iio_dev *indio_dev)
1883 {
1884 struct stm32_adc *adc = iio_priv(indio_dev);
1885 struct dma_async_tx_descriptor *desc;
1886 dma_cookie_t cookie;
1887 int ret;
1888
1889 if (!adc->dma_chan)
1890 return 0;
1891
1892 dev_dbg(&indio_dev->dev, "%s size=%d watermark=%d\n", __func__,
1893 adc->rx_buf_sz, adc->rx_buf_sz / 2);
1894
1895 /* Prepare a DMA cyclic transaction */
1896 desc = dmaengine_prep_dma_cyclic(adc->dma_chan,
1897 adc->rx_dma_buf,
1898 adc->rx_buf_sz, adc->rx_buf_sz / 2,
1899 DMA_DEV_TO_MEM,
1900 DMA_PREP_INTERRUPT);
1901 if (!desc)
1902 return -EBUSY;
1903
1904 desc->callback = stm32_adc_dma_buffer_done;
1905 desc->callback_param = indio_dev;
1906
1907 cookie = dmaengine_submit(desc);
1908 ret = dma_submit_error(cookie);
1909 if (ret) {
1910 dmaengine_terminate_sync(adc->dma_chan);
1911 return ret;
1912 }
1913
1914 /* Issue pending DMA requests */
1915 dma_async_issue_pending(adc->dma_chan);
1916
1917 return 0;
1918 }
1919
stm32_adc_buffer_postenable(struct iio_dev * indio_dev)1920 static int stm32_adc_buffer_postenable(struct iio_dev *indio_dev)
1921 {
1922 struct stm32_adc *adc = iio_priv(indio_dev);
1923 struct device *dev = indio_dev->dev.parent;
1924 int ret;
1925
1926 ret = pm_runtime_resume_and_get(dev);
1927 if (ret < 0)
1928 return ret;
1929
1930 ret = stm32_adc_set_trig(indio_dev, indio_dev->trig);
1931 if (ret) {
1932 dev_err(&indio_dev->dev, "Can't set trigger\n");
1933 goto err_pm_put;
1934 }
1935
1936 ret = stm32_adc_dma_start(indio_dev);
1937 if (ret) {
1938 dev_err(&indio_dev->dev, "Can't start dma\n");
1939 goto err_clr_trig;
1940 }
1941
1942 /* Reset adc buffer index */
1943 adc->bufi = 0;
1944
1945 stm32_adc_ovr_irq_enable(adc);
1946
1947 if (!adc->dma_chan)
1948 stm32_adc_conv_irq_enable(adc);
1949
1950 adc->cfg->start_conv(indio_dev, !!adc->dma_chan);
1951
1952 return 0;
1953
1954 err_clr_trig:
1955 stm32_adc_set_trig(indio_dev, NULL);
1956 err_pm_put:
1957 pm_runtime_mark_last_busy(dev);
1958 pm_runtime_put_autosuspend(dev);
1959
1960 return ret;
1961 }
1962
stm32_adc_buffer_predisable(struct iio_dev * indio_dev)1963 static int stm32_adc_buffer_predisable(struct iio_dev *indio_dev)
1964 {
1965 struct stm32_adc *adc = iio_priv(indio_dev);
1966 struct device *dev = indio_dev->dev.parent;
1967
1968 adc->cfg->stop_conv(indio_dev);
1969 if (!adc->dma_chan)
1970 stm32_adc_conv_irq_disable(adc);
1971
1972 stm32_adc_ovr_irq_disable(adc);
1973
1974 if (adc->dma_chan)
1975 dmaengine_terminate_sync(adc->dma_chan);
1976
1977 if (stm32_adc_set_trig(indio_dev, NULL))
1978 dev_err(&indio_dev->dev, "Can't clear trigger\n");
1979
1980 pm_runtime_mark_last_busy(dev);
1981 pm_runtime_put_autosuspend(dev);
1982
1983 return 0;
1984 }
1985
1986 static const struct iio_buffer_setup_ops stm32_adc_buffer_setup_ops = {
1987 .postenable = &stm32_adc_buffer_postenable,
1988 .predisable = &stm32_adc_buffer_predisable,
1989 };
1990
stm32_adc_trigger_handler(int irq,void * p)1991 static irqreturn_t stm32_adc_trigger_handler(int irq, void *p)
1992 {
1993 struct iio_poll_func *pf = p;
1994 struct iio_dev *indio_dev = pf->indio_dev;
1995 struct stm32_adc *adc = iio_priv(indio_dev);
1996
1997 dev_dbg(&indio_dev->dev, "%s bufi=%d\n", __func__, adc->bufi);
1998
1999 /* reset buffer index */
2000 adc->bufi = 0;
2001 iio_push_to_buffers_with_ts(indio_dev, adc->buffer, sizeof(adc->buffer),
2002 pf->timestamp);
2003 iio_trigger_notify_done(indio_dev->trig);
2004
2005 /* re-enable eoc irq */
2006 stm32_adc_conv_irq_enable(adc);
2007
2008 return IRQ_HANDLED;
2009 }
2010
2011 static const struct iio_chan_spec_ext_info stm32_adc_ext_info[] = {
2012 IIO_ENUM("trigger_polarity", IIO_SHARED_BY_ALL, &stm32_adc_trig_pol),
2013 {
2014 .name = "trigger_polarity_available",
2015 .shared = IIO_SHARED_BY_ALL,
2016 .read = iio_enum_available_read,
2017 .private = (uintptr_t)&stm32_adc_trig_pol,
2018 },
2019 { }
2020 };
2021
stm32_adc_debugfs_init(struct iio_dev * indio_dev)2022 static void stm32_adc_debugfs_init(struct iio_dev *indio_dev)
2023 {
2024 struct stm32_adc *adc = iio_priv(indio_dev);
2025 struct dentry *d = iio_get_debugfs_dentry(indio_dev);
2026 struct stm32_adc_calib *cal = &adc->cal;
2027 char buf[16];
2028 unsigned int i;
2029
2030 if (!adc->cfg->has_linearcal)
2031 return;
2032
2033 for (i = 0; i < STM32H7_LINCALFACT_NUM; i++) {
2034 snprintf(buf, sizeof(buf), "lincalfact%d", i + 1);
2035 debugfs_create_u32(buf, 0444, d, &cal->lincalfact[i]);
2036 }
2037 }
2038
stm32_adc_fw_get_resolution(struct iio_dev * indio_dev)2039 static int stm32_adc_fw_get_resolution(struct iio_dev *indio_dev)
2040 {
2041 struct device *dev = &indio_dev->dev;
2042 struct stm32_adc *adc = iio_priv(indio_dev);
2043 unsigned int i;
2044 u32 res;
2045
2046 if (device_property_read_u32(dev, "assigned-resolution-bits", &res))
2047 res = adc->cfg->adc_info->resolutions[0];
2048
2049 for (i = 0; i < adc->cfg->adc_info->num_res; i++)
2050 if (res == adc->cfg->adc_info->resolutions[i])
2051 break;
2052 if (i >= adc->cfg->adc_info->num_res) {
2053 dev_err(&indio_dev->dev, "Bad resolution: %u bits\n", res);
2054 return -EINVAL;
2055 }
2056
2057 dev_dbg(&indio_dev->dev, "Using %u bits resolution\n", res);
2058 adc->res = i;
2059
2060 return 0;
2061 }
2062
stm32_adc_smpr_init(struct stm32_adc * adc,int channel,u32 smp_ns)2063 static void stm32_adc_smpr_init(struct stm32_adc *adc, int channel, u32 smp_ns)
2064 {
2065 const struct stm32_adc_regs *smpr = &adc->cfg->regs->smp_bits[channel];
2066 u32 period_ns, shift = smpr->shift, mask = smpr->mask;
2067 unsigned int i, smp, r = smpr->reg;
2068
2069 /*
2070 * For internal channels, ensure that the sampling time cannot
2071 * be lower than the one specified in the datasheet
2072 */
2073 for (i = 0; i < STM32_ADC_INT_CH_NB; i++)
2074 if (channel == adc->int_ch[i] && adc->int_ch[i] != STM32_ADC_INT_CH_NONE)
2075 smp_ns = max(smp_ns, adc->cfg->ts_int_ch[i]);
2076
2077 /* Determine sampling time (ADC clock cycles) */
2078 period_ns = NSEC_PER_SEC / adc->common->rate;
2079 for (smp = 0; smp <= STM32_ADC_MAX_SMP; smp++)
2080 if ((period_ns * adc->cfg->smp_cycles[smp]) >= smp_ns)
2081 break;
2082 if (smp > STM32_ADC_MAX_SMP)
2083 smp = STM32_ADC_MAX_SMP;
2084
2085 /* pre-build sampling time registers (e.g. smpr1, smpr2) */
2086 adc->smpr_val[r] = (adc->smpr_val[r] & ~mask) | (smp << shift);
2087 }
2088
stm32_adc_chan_init_one(struct iio_dev * indio_dev,struct iio_chan_spec * chan,u32 vinp,u32 vinn,int scan_index,bool differential)2089 static void stm32_adc_chan_init_one(struct iio_dev *indio_dev,
2090 struct iio_chan_spec *chan, u32 vinp,
2091 u32 vinn, int scan_index, bool differential)
2092 {
2093 struct stm32_adc *adc = iio_priv(indio_dev);
2094 char *name = adc->chan_name[vinp];
2095
2096 chan->type = IIO_VOLTAGE;
2097 chan->channel = vinp;
2098 if (differential) {
2099 chan->differential = 1;
2100 chan->channel2 = vinn;
2101 snprintf(name, STM32_ADC_CH_SZ, "in%d-in%d", vinp, vinn);
2102 } else {
2103 snprintf(name, STM32_ADC_CH_SZ, "in%d", vinp);
2104 }
2105 chan->datasheet_name = name;
2106 chan->scan_index = scan_index;
2107 chan->indexed = 1;
2108 if (chan->channel == adc->int_ch[STM32_ADC_INT_CH_VREFINT])
2109 chan->info_mask_separate = BIT(IIO_CHAN_INFO_PROCESSED);
2110 else
2111 chan->info_mask_separate = BIT(IIO_CHAN_INFO_RAW);
2112 chan->info_mask_shared_by_type = BIT(IIO_CHAN_INFO_SCALE) |
2113 BIT(IIO_CHAN_INFO_OFFSET);
2114 if (adc->cfg->has_oversampling) {
2115 chan->info_mask_shared_by_all |= BIT(IIO_CHAN_INFO_OVERSAMPLING_RATIO);
2116 chan->info_mask_shared_by_all_available = BIT(IIO_CHAN_INFO_OVERSAMPLING_RATIO);
2117 }
2118 chan->scan_type.sign = 'u';
2119 chan->scan_type.realbits = adc->cfg->adc_info->resolutions[adc->res];
2120 chan->scan_type.storagebits = 16;
2121 chan->ext_info = stm32_adc_ext_info;
2122
2123 /* pre-build selected channels mask */
2124 adc->pcsel |= BIT(chan->channel);
2125 if (differential) {
2126 /* pre-build diff channels mask */
2127 adc->difsel |= BIT(chan->channel) & adc->cfg->regs->difsel.mask;
2128 /* Also add negative input to pre-selected channels */
2129 adc->pcsel |= BIT(chan->channel2);
2130 }
2131 }
2132
stm32_adc_get_legacy_chan_count(struct iio_dev * indio_dev,struct stm32_adc * adc)2133 static int stm32_adc_get_legacy_chan_count(struct iio_dev *indio_dev, struct stm32_adc *adc)
2134 {
2135 struct device *dev = &indio_dev->dev;
2136 const struct stm32_adc_info *adc_info = adc->cfg->adc_info;
2137 int num_channels = 0, ret;
2138
2139 dev_dbg(&indio_dev->dev, "using legacy channel config\n");
2140
2141 ret = device_property_count_u32(dev, "st,adc-channels");
2142 if (ret > adc_info->max_channels) {
2143 dev_err(&indio_dev->dev, "Bad st,adc-channels?\n");
2144 return -EINVAL;
2145 } else if (ret > 0) {
2146 num_channels += ret;
2147 }
2148
2149 /*
2150 * each st,adc-diff-channels is a group of 2 u32 so we divide @ret
2151 * to get the *real* number of channels.
2152 */
2153 ret = device_property_count_u32(dev, "st,adc-diff-channels");
2154 if (ret > 0) {
2155 ret /= (int)(sizeof(struct stm32_adc_diff_channel) / sizeof(u32));
2156 if (ret > adc_info->max_channels) {
2157 dev_err(&indio_dev->dev, "Bad st,adc-diff-channels?\n");
2158 return -EINVAL;
2159 } else if (ret > 0) {
2160 adc->num_diff = ret;
2161 num_channels += ret;
2162 }
2163 }
2164
2165 /* Optional sample time is provided either for each, or all channels */
2166 adc->nsmps = device_property_count_u32(dev, "st,min-sample-time-nsecs");
2167 if (adc->nsmps > 1 && adc->nsmps != num_channels) {
2168 dev_err(&indio_dev->dev, "Invalid st,min-sample-time-nsecs\n");
2169 return -EINVAL;
2170 }
2171
2172 return num_channels;
2173 }
2174
stm32_adc_legacy_chan_init(struct iio_dev * indio_dev,struct stm32_adc * adc,struct iio_chan_spec * channels,int nchans)2175 static int stm32_adc_legacy_chan_init(struct iio_dev *indio_dev,
2176 struct stm32_adc *adc,
2177 struct iio_chan_spec *channels,
2178 int nchans)
2179 {
2180 const struct stm32_adc_info *adc_info = adc->cfg->adc_info;
2181 struct stm32_adc_diff_channel diff[STM32_ADC_CH_MAX];
2182 struct device *dev = &indio_dev->dev;
2183 u32 num_diff = adc->num_diff;
2184 int num_se = nchans - num_diff;
2185 int size = num_diff * sizeof(*diff) / sizeof(u32);
2186 int scan_index = 0, ret, i, c;
2187 u32 smp = 0, smps[STM32_ADC_CH_MAX], chans[STM32_ADC_CH_MAX];
2188
2189 if (num_diff) {
2190 ret = device_property_read_u32_array(dev, "st,adc-diff-channels",
2191 (u32 *)diff, size);
2192 if (ret) {
2193 dev_err(&indio_dev->dev, "Failed to get diff channels %d\n", ret);
2194 return ret;
2195 }
2196
2197 for (i = 0; i < num_diff; i++) {
2198 if (diff[i].vinp >= adc_info->max_channels ||
2199 diff[i].vinn >= adc_info->max_channels) {
2200 dev_err(&indio_dev->dev, "Invalid channel in%d-in%d\n",
2201 diff[i].vinp, diff[i].vinn);
2202 return -EINVAL;
2203 }
2204
2205 stm32_adc_chan_init_one(indio_dev, &channels[scan_index],
2206 diff[i].vinp, diff[i].vinn,
2207 scan_index, true);
2208 scan_index++;
2209 }
2210 }
2211 if (num_se > 0) {
2212 ret = device_property_read_u32_array(dev, "st,adc-channels", chans, num_se);
2213 if (ret) {
2214 dev_err(&indio_dev->dev, "Failed to get st,adc-channels %d\n", ret);
2215 return ret;
2216 }
2217
2218 for (c = 0; c < num_se; c++) {
2219 if (chans[c] >= adc_info->max_channels) {
2220 dev_err(&indio_dev->dev, "Invalid channel %d\n",
2221 chans[c]);
2222 return -EINVAL;
2223 }
2224
2225 /* Channel can't be configured both as single-ended & diff */
2226 for (i = 0; i < num_diff; i++) {
2227 if (chans[c] == diff[i].vinp) {
2228 dev_err(&indio_dev->dev, "channel %d misconfigured\n",
2229 chans[c]);
2230 return -EINVAL;
2231 }
2232 }
2233 stm32_adc_chan_init_one(indio_dev, &channels[scan_index],
2234 chans[c], 0, scan_index, false);
2235 scan_index++;
2236 }
2237 }
2238
2239 if (adc->nsmps > 0) {
2240 ret = device_property_read_u32_array(dev, "st,min-sample-time-nsecs",
2241 smps, adc->nsmps);
2242 if (ret)
2243 return ret;
2244 }
2245
2246 for (i = 0; i < scan_index; i++) {
2247 /*
2248 * This check is used with the above logic so that smp value
2249 * will only be modified if valid u32 value can be decoded. This
2250 * allows to get either no value, 1 shared value for all indexes,
2251 * or one value per channel. The point is to have the same
2252 * behavior as 'of_property_read_u32_index()'.
2253 */
2254 if (i < adc->nsmps)
2255 smp = smps[i];
2256
2257 /* Prepare sampling time settings */
2258 stm32_adc_smpr_init(adc, channels[i].channel, smp);
2259 }
2260
2261 return scan_index;
2262 }
2263
stm32_adc_populate_int_ch(struct iio_dev * indio_dev,const char * ch_name,int chan)2264 static int stm32_adc_populate_int_ch(struct iio_dev *indio_dev, const char *ch_name,
2265 int chan)
2266 {
2267 struct stm32_adc *adc = iio_priv(indio_dev);
2268 u16 vrefint;
2269 int i, ret;
2270
2271 for (i = 0; i < STM32_ADC_INT_CH_NB; i++) {
2272 if (!strncmp(stm32_adc_ic[i].name, ch_name, STM32_ADC_CH_SZ)) {
2273 /* Check internal channel availability */
2274 switch (i) {
2275 case STM32_ADC_INT_CH_VDDCORE:
2276 if (!adc->cfg->regs->or_vddcore.reg)
2277 dev_warn(&indio_dev->dev,
2278 "%s channel not available\n", ch_name);
2279 break;
2280 case STM32_ADC_INT_CH_VDDCPU:
2281 if (!adc->cfg->regs->or_vddcpu.reg)
2282 dev_warn(&indio_dev->dev,
2283 "%s channel not available\n", ch_name);
2284 break;
2285 case STM32_ADC_INT_CH_VDDQ_DDR:
2286 if (!adc->cfg->regs->or_vddq_ddr.reg)
2287 dev_warn(&indio_dev->dev,
2288 "%s channel not available\n", ch_name);
2289 break;
2290 case STM32_ADC_INT_CH_VREFINT:
2291 if (!adc->cfg->regs->ccr_vref.reg)
2292 dev_warn(&indio_dev->dev,
2293 "%s channel not available\n", ch_name);
2294 break;
2295 case STM32_ADC_INT_CH_VBAT:
2296 if (!adc->cfg->regs->ccr_vbat.reg)
2297 dev_warn(&indio_dev->dev,
2298 "%s channel not available\n", ch_name);
2299 break;
2300 }
2301
2302 if (stm32_adc_ic[i].idx != STM32_ADC_INT_CH_VREFINT) {
2303 adc->int_ch[i] = chan;
2304 break;
2305 }
2306
2307 /* Get calibration data for vrefint channel */
2308 ret = nvmem_cell_read_u16(&indio_dev->dev, "vrefint", &vrefint);
2309 if (ret && ret != -ENOENT) {
2310 return dev_err_probe(indio_dev->dev.parent, ret,
2311 "nvmem access error\n");
2312 }
2313 if (ret == -ENOENT) {
2314 dev_dbg(&indio_dev->dev, "vrefint calibration not found. Skip vrefint channel\n");
2315 return ret;
2316 } else if (!vrefint) {
2317 dev_dbg(&indio_dev->dev, "Null vrefint calibration value. Skip vrefint channel\n");
2318 return -ENOENT;
2319 }
2320 adc->int_ch[i] = chan;
2321 adc->vrefint.vrefint_cal = vrefint;
2322 }
2323 }
2324
2325 return 0;
2326 }
2327
stm32_adc_generic_chan_init(struct iio_dev * indio_dev,struct stm32_adc * adc,struct iio_chan_spec * channels)2328 static int stm32_adc_generic_chan_init(struct iio_dev *indio_dev,
2329 struct stm32_adc *adc,
2330 struct iio_chan_spec *channels)
2331 {
2332 const struct stm32_adc_info *adc_info = adc->cfg->adc_info;
2333 struct device *dev = &indio_dev->dev;
2334 const char *name;
2335 int val, scan_index = 0, ret;
2336 bool differential;
2337 u32 vin[2];
2338
2339 device_for_each_child_node_scoped(dev, child) {
2340 ret = fwnode_property_read_u32(child, "reg", &val);
2341 if (ret)
2342 return dev_err_probe(dev, ret,
2343 "Missing channel index\n");
2344
2345 ret = fwnode_property_read_string(child, "label", &name);
2346 /* label is optional */
2347 if (!ret) {
2348 if (strlen(name) >= STM32_ADC_CH_SZ)
2349 return dev_err_probe(dev, -EINVAL,
2350 "Label %s exceeds %d characters\n",
2351 name, STM32_ADC_CH_SZ);
2352
2353 strscpy(adc->chan_name[val], name, STM32_ADC_CH_SZ);
2354 ret = stm32_adc_populate_int_ch(indio_dev, name, val);
2355 if (ret == -ENOENT)
2356 continue;
2357 else if (ret)
2358 return ret;
2359 } else if (ret != -EINVAL) {
2360 return dev_err_probe(dev, ret, "Invalid label\n");
2361 }
2362
2363 if (val >= adc_info->max_channels)
2364 return dev_err_probe(dev, -EINVAL,
2365 "Invalid channel %d\n", val);
2366
2367 differential = false;
2368 ret = fwnode_property_read_u32_array(child, "diff-channels", vin, 2);
2369 /* diff-channels is optional */
2370 if (!ret) {
2371 differential = true;
2372 if (vin[0] != val || vin[1] >= adc_info->max_channels)
2373 return dev_err_probe(dev, -EINVAL,
2374 "Invalid channel in%d-in%d\n",
2375 vin[0], vin[1]);
2376 } else if (ret != -EINVAL) {
2377 return dev_err_probe(dev, ret,
2378 "Invalid diff-channels property\n");
2379 }
2380
2381 stm32_adc_chan_init_one(indio_dev, &channels[scan_index], val,
2382 vin[1], scan_index, differential);
2383
2384 val = 0;
2385 ret = fwnode_property_read_u32(child, "st,min-sample-time-ns", &val);
2386 /* st,min-sample-time-ns is optional */
2387 if (ret && ret != -EINVAL)
2388 return dev_err_probe(dev, ret,
2389 "Invalid st,min-sample-time-ns property\n");
2390
2391 stm32_adc_smpr_init(adc, channels[scan_index].channel, val);
2392 if (differential)
2393 stm32_adc_smpr_init(adc, vin[1], val);
2394
2395 scan_index++;
2396 }
2397
2398 return scan_index;
2399 }
2400
stm32_adc_chan_fw_init(struct iio_dev * indio_dev,bool timestamping)2401 static int stm32_adc_chan_fw_init(struct iio_dev *indio_dev, bool timestamping)
2402 {
2403 struct stm32_adc *adc = iio_priv(indio_dev);
2404 const struct stm32_adc_info *adc_info = adc->cfg->adc_info;
2405 struct iio_chan_spec *channels;
2406 int scan_index = 0, num_channels = 0, ret, i;
2407 bool legacy = false;
2408
2409 for (i = 0; i < STM32_ADC_INT_CH_NB; i++)
2410 adc->int_ch[i] = STM32_ADC_INT_CH_NONE;
2411
2412 num_channels = device_get_child_node_count(&indio_dev->dev);
2413 /* If no channels have been found, fallback to channels legacy properties. */
2414 if (!num_channels) {
2415 legacy = true;
2416
2417 ret = stm32_adc_get_legacy_chan_count(indio_dev, adc);
2418 if (!ret) {
2419 dev_err(indio_dev->dev.parent, "No channel found\n");
2420 return -ENODATA;
2421 } else if (ret < 0) {
2422 return ret;
2423 }
2424
2425 num_channels = ret;
2426 }
2427
2428 if (num_channels > adc_info->max_channels) {
2429 dev_err(&indio_dev->dev, "Channel number [%d] exceeds %d\n",
2430 num_channels, adc_info->max_channels);
2431 return -EINVAL;
2432 }
2433
2434 if (timestamping)
2435 num_channels++;
2436
2437 channels = devm_kcalloc(&indio_dev->dev, num_channels,
2438 sizeof(struct iio_chan_spec), GFP_KERNEL);
2439 if (!channels)
2440 return -ENOMEM;
2441
2442 if (legacy)
2443 ret = stm32_adc_legacy_chan_init(indio_dev, adc, channels,
2444 timestamping ? num_channels - 1 : num_channels);
2445 else
2446 ret = stm32_adc_generic_chan_init(indio_dev, adc, channels);
2447 if (ret < 0)
2448 return ret;
2449 scan_index = ret;
2450
2451 if (timestamping) {
2452 struct iio_chan_spec *timestamp = &channels[scan_index];
2453
2454 timestamp->type = IIO_TIMESTAMP;
2455 timestamp->channel = -1;
2456 timestamp->scan_index = scan_index;
2457 timestamp->scan_type.sign = 's';
2458 timestamp->scan_type.realbits = 64;
2459 timestamp->scan_type.storagebits = 64;
2460
2461 scan_index++;
2462 }
2463
2464 indio_dev->num_channels = scan_index;
2465 indio_dev->channels = channels;
2466
2467 return 0;
2468 }
2469
stm32_adc_dma_request(struct device * dev,struct iio_dev * indio_dev)2470 static int stm32_adc_dma_request(struct device *dev, struct iio_dev *indio_dev)
2471 {
2472 struct stm32_adc *adc = iio_priv(indio_dev);
2473 struct dma_slave_config config;
2474 int ret;
2475
2476 adc->dma_chan = dma_request_chan(dev, "rx");
2477 if (IS_ERR(adc->dma_chan)) {
2478 ret = PTR_ERR(adc->dma_chan);
2479 if (ret != -ENODEV)
2480 return dev_err_probe(dev, ret,
2481 "DMA channel request failed with\n");
2482
2483 /* DMA is optional: fall back to IRQ mode */
2484 adc->dma_chan = NULL;
2485 return 0;
2486 }
2487
2488 adc->rx_buf = dma_alloc_coherent(adc->dma_chan->device->dev,
2489 STM32_DMA_BUFFER_SIZE,
2490 &adc->rx_dma_buf, GFP_KERNEL);
2491 if (!adc->rx_buf) {
2492 ret = -ENOMEM;
2493 goto err_release;
2494 }
2495
2496 /* Configure DMA channel to read data register */
2497 memset(&config, 0, sizeof(config));
2498 config.src_addr = (dma_addr_t)adc->common->phys_base;
2499 config.src_addr += adc->offset + adc->cfg->regs->dr;
2500 config.src_addr_width = DMA_SLAVE_BUSWIDTH_2_BYTES;
2501
2502 ret = dmaengine_slave_config(adc->dma_chan, &config);
2503 if (ret)
2504 goto err_free;
2505
2506 return 0;
2507
2508 err_free:
2509 dma_free_coherent(adc->dma_chan->device->dev, STM32_DMA_BUFFER_SIZE,
2510 adc->rx_buf, adc->rx_dma_buf);
2511 err_release:
2512 dma_release_channel(adc->dma_chan);
2513
2514 return ret;
2515 }
2516
stm32_adc_probe(struct platform_device * pdev)2517 static int stm32_adc_probe(struct platform_device *pdev)
2518 {
2519 struct iio_dev *indio_dev;
2520 struct device *dev = &pdev->dev;
2521 irqreturn_t (*handler)(int irq, void *p) = NULL;
2522 struct stm32_adc *adc;
2523 bool timestamping = false;
2524 int ret;
2525
2526 indio_dev = devm_iio_device_alloc(&pdev->dev, sizeof(*adc));
2527 if (!indio_dev)
2528 return -ENOMEM;
2529
2530 adc = iio_priv(indio_dev);
2531 adc->common = dev_get_drvdata(pdev->dev.parent);
2532 spin_lock_init(&adc->lock);
2533 init_completion(&adc->completion);
2534 adc->cfg = device_get_match_data(dev);
2535
2536 indio_dev->name = dev_name(&pdev->dev);
2537 device_set_node(&indio_dev->dev, dev_fwnode(&pdev->dev));
2538 indio_dev->info = &stm32_adc_iio_info;
2539 indio_dev->modes = INDIO_DIRECT_MODE | INDIO_HARDWARE_TRIGGERED;
2540
2541 platform_set_drvdata(pdev, indio_dev);
2542
2543 ret = device_property_read_u32(dev, "reg", &adc->offset);
2544 if (ret != 0) {
2545 dev_err(&pdev->dev, "missing reg property\n");
2546 return -EINVAL;
2547 }
2548
2549 adc->irq = platform_get_irq(pdev, 0);
2550 if (adc->irq < 0)
2551 return adc->irq;
2552
2553 ret = devm_request_threaded_irq(&pdev->dev, adc->irq, stm32_adc_isr,
2554 stm32_adc_threaded_isr,
2555 0, pdev->name, indio_dev);
2556 if (ret) {
2557 dev_err(&pdev->dev, "failed to request IRQ\n");
2558 return ret;
2559 }
2560
2561 adc->clk = devm_clk_get(&pdev->dev, NULL);
2562 if (IS_ERR(adc->clk)) {
2563 ret = PTR_ERR(adc->clk);
2564 if (ret == -ENOENT && !adc->cfg->clk_required) {
2565 adc->clk = NULL;
2566 } else {
2567 dev_err(&pdev->dev, "Can't get clock\n");
2568 return ret;
2569 }
2570 }
2571
2572 ret = stm32_adc_fw_get_resolution(indio_dev);
2573 if (ret < 0)
2574 return ret;
2575
2576 ret = stm32_adc_dma_request(dev, indio_dev);
2577 if (ret < 0)
2578 return ret;
2579
2580 if (!adc->dma_chan) {
2581 /* For PIO mode only, iio_pollfunc_store_time stores a timestamp
2582 * in the primary trigger IRQ handler and stm32_adc_trigger_handler
2583 * runs in the IRQ thread to push out buffer along with timestamp.
2584 */
2585 handler = &stm32_adc_trigger_handler;
2586 timestamping = true;
2587 }
2588
2589 ret = stm32_adc_chan_fw_init(indio_dev, timestamping);
2590 if (ret < 0)
2591 goto err_dma_disable;
2592
2593 ret = iio_triggered_buffer_setup(indio_dev,
2594 &iio_pollfunc_store_time, handler,
2595 &stm32_adc_buffer_setup_ops);
2596 if (ret) {
2597 dev_err(&pdev->dev, "buffer setup failed\n");
2598 goto err_dma_disable;
2599 }
2600
2601 /* Get stm32-adc-core PM online */
2602 pm_runtime_get_noresume(dev);
2603 pm_runtime_set_active(dev);
2604 pm_runtime_set_autosuspend_delay(dev, STM32_ADC_HW_STOP_DELAY_MS);
2605 pm_runtime_use_autosuspend(dev);
2606 pm_runtime_enable(dev);
2607
2608 ret = stm32_adc_hw_start(dev);
2609 if (ret)
2610 goto err_buffer_cleanup;
2611
2612 ret = iio_device_register(indio_dev);
2613 if (ret) {
2614 dev_err(&pdev->dev, "iio dev register failed\n");
2615 goto err_hw_stop;
2616 }
2617
2618 pm_runtime_mark_last_busy(dev);
2619 pm_runtime_put_autosuspend(dev);
2620
2621 if (IS_ENABLED(CONFIG_DEBUG_FS))
2622 stm32_adc_debugfs_init(indio_dev);
2623
2624 return 0;
2625
2626 err_hw_stop:
2627 stm32_adc_hw_stop(dev);
2628
2629 err_buffer_cleanup:
2630 pm_runtime_disable(dev);
2631 pm_runtime_set_suspended(dev);
2632 pm_runtime_put_noidle(dev);
2633 iio_triggered_buffer_cleanup(indio_dev);
2634
2635 err_dma_disable:
2636 if (adc->dma_chan) {
2637 dma_free_coherent(adc->dma_chan->device->dev,
2638 STM32_DMA_BUFFER_SIZE,
2639 adc->rx_buf, adc->rx_dma_buf);
2640 dma_release_channel(adc->dma_chan);
2641 }
2642
2643 return ret;
2644 }
2645
stm32_adc_remove(struct platform_device * pdev)2646 static void stm32_adc_remove(struct platform_device *pdev)
2647 {
2648 struct iio_dev *indio_dev = platform_get_drvdata(pdev);
2649 struct stm32_adc *adc = iio_priv(indio_dev);
2650
2651 pm_runtime_get_sync(&pdev->dev);
2652 /* iio_device_unregister() also removes debugfs entries */
2653 iio_device_unregister(indio_dev);
2654 stm32_adc_hw_stop(&pdev->dev);
2655 pm_runtime_disable(&pdev->dev);
2656 pm_runtime_set_suspended(&pdev->dev);
2657 pm_runtime_put_noidle(&pdev->dev);
2658 iio_triggered_buffer_cleanup(indio_dev);
2659 if (adc->dma_chan) {
2660 dma_free_coherent(adc->dma_chan->device->dev,
2661 STM32_DMA_BUFFER_SIZE,
2662 adc->rx_buf, adc->rx_dma_buf);
2663 dma_release_channel(adc->dma_chan);
2664 }
2665 }
2666
stm32_adc_suspend(struct device * dev)2667 static int stm32_adc_suspend(struct device *dev)
2668 {
2669 struct iio_dev *indio_dev = dev_get_drvdata(dev);
2670
2671 if (iio_buffer_enabled(indio_dev))
2672 stm32_adc_buffer_predisable(indio_dev);
2673
2674 return pm_runtime_force_suspend(dev);
2675 }
2676
stm32_adc_resume(struct device * dev)2677 static int stm32_adc_resume(struct device *dev)
2678 {
2679 struct iio_dev *indio_dev = dev_get_drvdata(dev);
2680 int ret;
2681
2682 ret = pm_runtime_force_resume(dev);
2683 if (ret < 0)
2684 return ret;
2685
2686 if (!iio_buffer_enabled(indio_dev))
2687 return 0;
2688
2689 ret = stm32_adc_update_scan_mode(indio_dev,
2690 indio_dev->active_scan_mask);
2691 if (ret < 0)
2692 return ret;
2693
2694 return stm32_adc_buffer_postenable(indio_dev);
2695 }
2696
stm32_adc_runtime_suspend(struct device * dev)2697 static int stm32_adc_runtime_suspend(struct device *dev)
2698 {
2699 return stm32_adc_hw_stop(dev);
2700 }
2701
stm32_adc_runtime_resume(struct device * dev)2702 static int stm32_adc_runtime_resume(struct device *dev)
2703 {
2704 return stm32_adc_hw_start(dev);
2705 }
2706
2707 static const struct dev_pm_ops stm32_adc_pm_ops = {
2708 SYSTEM_SLEEP_PM_OPS(stm32_adc_suspend, stm32_adc_resume)
2709 RUNTIME_PM_OPS(stm32_adc_runtime_suspend, stm32_adc_runtime_resume,
2710 NULL)
2711 };
2712
2713 static const struct stm32_adc_cfg stm32f4_adc_cfg = {
2714 .regs = &stm32f4_adc_regspec,
2715 .adc_info = &stm32f4_adc_info,
2716 .trigs = stm32f4_adc_trigs,
2717 .clk_required = true,
2718 .start_conv = stm32f4_adc_start_conv,
2719 .stop_conv = stm32f4_adc_stop_conv,
2720 .smp_cycles = stm32f4_adc_smp_cycles,
2721 .irq_clear = stm32f4_adc_irq_clear,
2722 };
2723
2724 static const unsigned int stm32_adc_min_ts_h7[] = { 0, 0, 0, 4300, 9000 };
2725 static_assert(ARRAY_SIZE(stm32_adc_min_ts_h7) == STM32_ADC_INT_CH_NB);
2726
2727 static const struct stm32_adc_cfg stm32h7_adc_cfg = {
2728 .regs = &stm32h7_adc_regspec,
2729 .adc_info = &stm32h7_adc_info,
2730 .trigs = stm32h7_adc_trigs,
2731 .has_boostmode = true,
2732 .has_linearcal = true,
2733 .has_presel = true,
2734 .has_oversampling = true,
2735 .start_conv = stm32h7_adc_start_conv,
2736 .stop_conv = stm32h7_adc_stop_conv,
2737 .prepare = stm32h7_adc_prepare,
2738 .unprepare = stm32h7_adc_unprepare,
2739 .smp_cycles = stm32h7_adc_smp_cycles,
2740 .irq_clear = stm32h7_adc_irq_clear,
2741 .ts_int_ch = stm32_adc_min_ts_h7,
2742 .set_ovs = stm32h7_adc_set_ovs,
2743 };
2744
2745 static const unsigned int stm32_adc_min_ts_mp1[] = { 100, 100, 100, 4300, 9800 };
2746 static_assert(ARRAY_SIZE(stm32_adc_min_ts_mp1) == STM32_ADC_INT_CH_NB);
2747
2748 static const struct stm32_adc_cfg stm32mp1_adc_cfg = {
2749 .regs = &stm32mp1_adc_regspec,
2750 .adc_info = &stm32h7_adc_info,
2751 .trigs = stm32h7_adc_trigs,
2752 .has_vregready = true,
2753 .has_boostmode = true,
2754 .has_linearcal = true,
2755 .has_presel = true,
2756 .has_oversampling = true,
2757 .start_conv = stm32h7_adc_start_conv,
2758 .stop_conv = stm32h7_adc_stop_conv,
2759 .prepare = stm32h7_adc_prepare,
2760 .unprepare = stm32h7_adc_unprepare,
2761 .smp_cycles = stm32h7_adc_smp_cycles,
2762 .irq_clear = stm32h7_adc_irq_clear,
2763 .ts_int_ch = stm32_adc_min_ts_mp1,
2764 .set_ovs = stm32h7_adc_set_ovs,
2765 };
2766
2767 static const unsigned int stm32_adc_min_ts_mp13[] = { 100, 0, 0, 4300, 9800 };
2768 static_assert(ARRAY_SIZE(stm32_adc_min_ts_mp13) == STM32_ADC_INT_CH_NB);
2769
2770 static const struct stm32_adc_cfg stm32mp13_adc_cfg = {
2771 .regs = &stm32mp13_adc_regspec,
2772 .adc_info = &stm32mp13_adc_info,
2773 .trigs = stm32h7_adc_trigs,
2774 .has_oversampling = true,
2775 .start_conv = stm32mp13_adc_start_conv,
2776 .stop_conv = stm32h7_adc_stop_conv,
2777 .prepare = stm32h7_adc_prepare,
2778 .unprepare = stm32h7_adc_unprepare,
2779 .smp_cycles = stm32mp13_adc_smp_cycles,
2780 .irq_clear = stm32h7_adc_irq_clear,
2781 .ts_int_ch = stm32_adc_min_ts_mp13,
2782 .set_ovs = stm32mp13_adc_set_ovs,
2783 };
2784
2785 static const struct of_device_id stm32_adc_of_match[] = {
2786 { .compatible = "st,stm32f4-adc", .data = (void *)&stm32f4_adc_cfg },
2787 { .compatible = "st,stm32h7-adc", .data = (void *)&stm32h7_adc_cfg },
2788 { .compatible = "st,stm32mp1-adc", .data = (void *)&stm32mp1_adc_cfg },
2789 { .compatible = "st,stm32mp13-adc", .data = (void *)&stm32mp13_adc_cfg },
2790 { }
2791 };
2792 MODULE_DEVICE_TABLE(of, stm32_adc_of_match);
2793
2794 static struct platform_driver stm32_adc_driver = {
2795 .probe = stm32_adc_probe,
2796 .remove = stm32_adc_remove,
2797 .driver = {
2798 .name = "stm32-adc",
2799 .of_match_table = stm32_adc_of_match,
2800 .pm = pm_ptr(&stm32_adc_pm_ops),
2801 },
2802 };
2803 module_platform_driver(stm32_adc_driver);
2804
2805 MODULE_AUTHOR("Fabrice Gasnier <fabrice.gasnier@st.com>");
2806 MODULE_DESCRIPTION("STMicroelectronics STM32 ADC IIO driver");
2807 MODULE_LICENSE("GPL v2");
2808 MODULE_ALIAS("platform:stm32-adc");
2809