xref: /linux/drivers/spi/spi-stm32.c (revision 0262163136de813894cb172aa8ccf762b92e5fd7)
1 // SPDX-License-Identifier: GPL-2.0
2 //
3 // STMicroelectronics STM32 SPI Controller driver
4 //
5 // Copyright (C) 2017, STMicroelectronics - All Rights Reserved
6 // Author(s): Amelie Delaunay <amelie.delaunay@st.com> for STMicroelectronics.
7 
8 #include <linux/bitfield.h>
9 #include <linux/debugfs.h>
10 #include <linux/clk.h>
11 #include <linux/delay.h>
12 #include <linux/dma-mapping.h>
13 #include <linux/dmaengine.h>
14 #include <linux/genalloc.h>
15 #include <linux/interrupt.h>
16 #include <linux/iopoll.h>
17 #include <linux/module.h>
18 #include <linux/of.h>
19 #include <linux/platform_device.h>
20 #include <linux/pinctrl/consumer.h>
21 #include <linux/pm_runtime.h>
22 #include <linux/reset.h>
23 #include <linux/spi/spi.h>
24 
25 #define DRIVER_NAME "spi_stm32"
26 
27 /* STM32F4/7 SPI registers */
28 #define STM32FX_SPI_CR1			0x00
29 #define STM32FX_SPI_CR2			0x04
30 #define STM32FX_SPI_SR			0x08
31 #define STM32FX_SPI_DR			0x0C
32 #define STM32FX_SPI_I2SCFGR		0x1C
33 
34 /* STM32FX_SPI_CR1 bit fields */
35 #define STM32FX_SPI_CR1_CPHA		BIT(0)
36 #define STM32FX_SPI_CR1_CPOL		BIT(1)
37 #define STM32FX_SPI_CR1_MSTR		BIT(2)
38 #define STM32FX_SPI_CR1_BR_SHIFT	3
39 #define STM32FX_SPI_CR1_BR		GENMASK(5, 3)
40 #define STM32FX_SPI_CR1_SPE		BIT(6)
41 #define STM32FX_SPI_CR1_LSBFRST		BIT(7)
42 #define STM32FX_SPI_CR1_SSI		BIT(8)
43 #define STM32FX_SPI_CR1_SSM		BIT(9)
44 #define STM32FX_SPI_CR1_RXONLY		BIT(10)
45 #define STM32F4_SPI_CR1_DFF		BIT(11)
46 #define STM32F7_SPI_CR1_CRCL		BIT(11)
47 #define STM32FX_SPI_CR1_CRCNEXT		BIT(12)
48 #define STM32FX_SPI_CR1_CRCEN		BIT(13)
49 #define STM32FX_SPI_CR1_BIDIOE		BIT(14)
50 #define STM32FX_SPI_CR1_BIDIMODE	BIT(15)
51 #define STM32FX_SPI_CR1_BR_MIN		0
52 #define STM32FX_SPI_CR1_BR_MAX		(GENMASK(5, 3) >> 3)
53 
54 /* STM32FX_SPI_CR2 bit fields */
55 #define STM32FX_SPI_CR2_RXDMAEN		BIT(0)
56 #define STM32FX_SPI_CR2_TXDMAEN		BIT(1)
57 #define STM32FX_SPI_CR2_SSOE		BIT(2)
58 #define STM32FX_SPI_CR2_FRF		BIT(4)
59 #define STM32FX_SPI_CR2_ERRIE		BIT(5)
60 #define STM32FX_SPI_CR2_RXNEIE		BIT(6)
61 #define STM32FX_SPI_CR2_TXEIE		BIT(7)
62 #define STM32F7_SPI_CR2_DS		GENMASK(11, 8)
63 #define STM32F7_SPI_CR2_FRXTH		BIT(12)
64 #define STM32F7_SPI_CR2_LDMA_RX		BIT(13)
65 #define STM32F7_SPI_CR2_LDMA_TX		BIT(14)
66 
67 /* STM32FX_SPI_SR bit fields */
68 #define STM32FX_SPI_SR_RXNE		BIT(0)
69 #define STM32FX_SPI_SR_TXE		BIT(1)
70 #define STM32FX_SPI_SR_CHSIDE		BIT(2)
71 #define STM32FX_SPI_SR_UDR		BIT(3)
72 #define STM32FX_SPI_SR_CRCERR		BIT(4)
73 #define STM32FX_SPI_SR_MODF		BIT(5)
74 #define STM32FX_SPI_SR_OVR		BIT(6)
75 #define STM32FX_SPI_SR_BSY		BIT(7)
76 #define STM32FX_SPI_SR_FRE		BIT(8)
77 #define STM32F7_SPI_SR_FRLVL		GENMASK(10, 9)
78 #define STM32F7_SPI_SR_FTLVL		GENMASK(12, 11)
79 
80 /* STM32FX_SPI_I2SCFGR bit fields */
81 #define STM32FX_SPI_I2SCFGR_I2SMOD	BIT(11)
82 
83 /* STM32F4 SPI Baud Rate min/max divisor */
84 #define STM32FX_SPI_BR_DIV_MIN		(2 << STM32FX_SPI_CR1_BR_MIN)
85 #define STM32FX_SPI_BR_DIV_MAX		(2 << STM32FX_SPI_CR1_BR_MAX)
86 
87 /* STM32H7 SPI registers */
88 #define STM32H7_SPI_CR1			0x00
89 #define STM32H7_SPI_CR2			0x04
90 #define STM32H7_SPI_CFG1		0x08
91 #define STM32H7_SPI_CFG2		0x0C
92 #define STM32H7_SPI_IER			0x10
93 #define STM32H7_SPI_SR			0x14
94 #define STM32H7_SPI_IFCR		0x18
95 #define STM32H7_SPI_TXDR		0x20
96 #define STM32H7_SPI_RXDR		0x30
97 #define STM32H7_SPI_I2SCFGR		0x50
98 
99 /* STM32H7_SPI_CR1 bit fields */
100 #define STM32H7_SPI_CR1_SPE		BIT(0)
101 #define STM32H7_SPI_CR1_MASRX		BIT(8)
102 #define STM32H7_SPI_CR1_CSTART		BIT(9)
103 #define STM32H7_SPI_CR1_CSUSP		BIT(10)
104 #define STM32H7_SPI_CR1_HDDIR		BIT(11)
105 #define STM32H7_SPI_CR1_SSI		BIT(12)
106 
107 /* STM32H7_SPI_CR2 bit fields */
108 #define STM32H7_SPI_CR2_TSIZE		GENMASK(15, 0)
109 #define STM32H7_SPI_TSIZE_MAX		GENMASK(15, 0)
110 
111 /* STM32H7_SPI_CFG1 bit fields */
112 #define STM32H7_SPI_CFG1_DSIZE		GENMASK(4, 0)
113 #define STM32H7_SPI_CFG1_FTHLV		GENMASK(8, 5)
114 #define STM32H7_SPI_CFG1_RXDMAEN	BIT(14)
115 #define STM32H7_SPI_CFG1_TXDMAEN	BIT(15)
116 #define STM32H7_SPI_CFG1_MBR		GENMASK(30, 28)
117 #define STM32H7_SPI_CFG1_MBR_SHIFT	28
118 #define STM32H7_SPI_CFG1_MBR_MIN	0
119 #define STM32H7_SPI_CFG1_MBR_MAX	(GENMASK(30, 28) >> 28)
120 
121 /* STM32H7_SPI_CFG2 bit fields */
122 #define STM32H7_SPI_CFG2_MIDI		GENMASK(7, 4)
123 #define STM32H7_SPI_CFG2_COMM		GENMASK(18, 17)
124 #define STM32H7_SPI_CFG2_SP		GENMASK(21, 19)
125 #define STM32H7_SPI_CFG2_MASTER		BIT(22)
126 #define STM32H7_SPI_CFG2_LSBFRST	BIT(23)
127 #define STM32H7_SPI_CFG2_CPHA		BIT(24)
128 #define STM32H7_SPI_CFG2_CPOL		BIT(25)
129 #define STM32H7_SPI_CFG2_SSM		BIT(26)
130 #define STM32H7_SPI_CFG2_SSIOP		BIT(28)
131 #define STM32H7_SPI_CFG2_AFCNTR		BIT(31)
132 
133 /* STM32H7_SPI_IER bit fields */
134 #define STM32H7_SPI_IER_RXPIE		BIT(0)
135 #define STM32H7_SPI_IER_TXPIE		BIT(1)
136 #define STM32H7_SPI_IER_DXPIE		BIT(2)
137 #define STM32H7_SPI_IER_EOTIE		BIT(3)
138 #define STM32H7_SPI_IER_TXTFIE		BIT(4)
139 #define STM32H7_SPI_IER_OVRIE		BIT(6)
140 #define STM32H7_SPI_IER_MODFIE		BIT(9)
141 #define STM32H7_SPI_IER_ALL		GENMASK(10, 0)
142 
143 /* STM32H7_SPI_SR bit fields */
144 #define STM32H7_SPI_SR_RXP		BIT(0)
145 #define STM32H7_SPI_SR_TXP		BIT(1)
146 #define STM32H7_SPI_SR_EOT		BIT(3)
147 #define STM32H7_SPI_SR_OVR		BIT(6)
148 #define STM32H7_SPI_SR_MODF		BIT(9)
149 #define STM32H7_SPI_SR_SUSP		BIT(11)
150 #define STM32H7_SPI_SR_RXPLVL		GENMASK(14, 13)
151 #define STM32H7_SPI_SR_RXWNE		BIT(15)
152 
153 /* STM32H7_SPI_IFCR bit fields */
154 #define STM32H7_SPI_IFCR_ALL		GENMASK(11, 3)
155 
156 /* STM32H7_SPI_I2SCFGR bit fields */
157 #define STM32H7_SPI_I2SCFGR_I2SMOD	BIT(0)
158 
159 /* STM32MP25_SPICFG2 bit fields */
160 #define STM32MP25_SPI_CFG2_RDIOM	BIT(13)
161 
162 /* STM32MP25 SPI registers bit fields */
163 #define STM32MP25_SPI_HWCFGR1			0x3F0
164 
165 /* STM32MP25_SPI_CR2 bit fields */
166 #define STM32MP25_SPI_TSIZE_MAX_LIMITED		GENMASK(9, 0)
167 
168 /* STM32MP25_SPI_HWCFGR1 */
169 #define STM32MP25_SPI_HWCFGR1_FULLCFG		GENMASK(27, 24)
170 #define STM32MP25_SPI_HWCFGR1_FULLCFG_LIMITED	0x0
171 #define STM32MP25_SPI_HWCFGR1_FULLCFG_FULL	0x1
172 #define STM32MP25_SPI_HWCFGR1_DSCFG		GENMASK(19, 16)
173 #define STM32MP25_SPI_HWCFGR1_DSCFG_16_B	0x0
174 #define STM32MP25_SPI_HWCFGR1_DSCFG_32_B	0x1
175 
176 /* STM32H7 SPI Master Baud Rate min/max divisor */
177 #define STM32H7_SPI_MBR_DIV_MIN		(2 << STM32H7_SPI_CFG1_MBR_MIN)
178 #define STM32H7_SPI_MBR_DIV_MAX		(2 << STM32H7_SPI_CFG1_MBR_MAX)
179 
180 /* STM32H7 SPI Communication mode */
181 #define STM32H7_SPI_FULL_DUPLEX		0
182 #define STM32H7_SPI_SIMPLEX_TX		1
183 #define STM32H7_SPI_SIMPLEX_RX		2
184 #define STM32H7_SPI_HALF_DUPLEX		3
185 
186 /* SPI Communication type */
187 #define SPI_FULL_DUPLEX		0
188 #define SPI_SIMPLEX_TX		1
189 #define SPI_SIMPLEX_RX		2
190 #define SPI_3WIRE_TX		3
191 #define SPI_3WIRE_RX		4
192 
193 #define STM32_SPI_AUTOSUSPEND_DELAY		1	/* 1 ms */
194 
195 /*
196  * use PIO for small transfers, avoiding DMA setup/teardown overhead for drivers
197  * without fifo buffers.
198  */
199 #define SPI_DMA_MIN_BYTES	16
200 
201 /* STM32 SPI driver helpers */
202 #define STM32_SPI_HOST_MODE(stm32_spi) (!(stm32_spi)->device_mode)
203 #define STM32_SPI_DEVICE_MODE(stm32_spi) ((stm32_spi)->device_mode)
204 
205 /**
206  * struct stm32_spi_reg - stm32 SPI register & bitfield desc
207  * @reg:		register offset
208  * @mask:		bitfield mask
209  * @shift:		left shift
210  */
211 struct stm32_spi_reg {
212 	int reg;
213 	int mask;
214 	int shift;
215 };
216 
217 /**
218  * struct stm32_spi_regspec - stm32 registers definition, compatible dependent data
219  * @en: enable register and SPI enable bit
220  * @dma_rx_en: SPI DMA RX enable register end SPI DMA RX enable bit
221  * @dma_tx_en: SPI DMA TX enable register end SPI DMA TX enable bit
222  * @cpol: clock polarity register and polarity bit
223  * @cpha: clock phase register and phase bit
224  * @lsb_first: LSB transmitted first register and bit
225  * @cs_high: chips select active value
226  * @br: baud rate register and bitfields
227  * @rx: SPI RX data register
228  * @tx: SPI TX data register
229  * @fullcfg: SPI full or limited feature set register
230  * @rdy_en: SPI ready feature register
231  */
232 struct stm32_spi_regspec {
233 	const struct stm32_spi_reg en;
234 	const struct stm32_spi_reg dma_rx_en;
235 	const struct stm32_spi_reg dma_tx_en;
236 	const struct stm32_spi_reg cpol;
237 	const struct stm32_spi_reg cpha;
238 	const struct stm32_spi_reg lsb_first;
239 	const struct stm32_spi_reg cs_high;
240 	const struct stm32_spi_reg br;
241 	const struct stm32_spi_reg rx;
242 	const struct stm32_spi_reg tx;
243 	const struct stm32_spi_reg fullcfg;
244 	const struct stm32_spi_reg rdy_en;
245 };
246 
247 struct stm32_spi;
248 
249 /**
250  * struct stm32_spi_cfg - stm32 compatible configuration data
251  * @regs: registers descriptions
252  * @get_fifo_size: routine to get fifo size
253  * @get_bpw_mask: routine to get bits per word mask
254  * @disable: routine to disable controller
255  * @config: routine to configure controller as SPI Host
256  * @set_bpw: routine to configure registers to for bits per word
257  * @set_mode: routine to configure registers to desired mode
258  * @set_data_idleness: optional routine to configure registers to desired idle
259  * time between frames (if driver has this functionality)
260  * @set_number_of_data: optional routine to configure registers to desired
261  * number of data (if driver has this functionality)
262  * @write_tx: routine to write to transmit register/FIFO
263  * @read_rx: routine to read from receive register/FIFO
264  * @transfer_one_dma_start: routine to start transfer a single spi_transfer
265  * using DMA
266  * @dma_rx_cb: routine to call after DMA RX channel operation is complete
267  * @dma_tx_cb: routine to call after DMA TX channel operation is complete
268  * @transfer_one_irq: routine to configure interrupts for driver
269  * @irq_handler_event: Interrupt handler for SPI controller events
270  * @irq_handler_thread: thread of interrupt handler for SPI controller
271  * @baud_rate_div_min: minimum baud rate divisor
272  * @baud_rate_div_max: maximum baud rate divisor
273  * @has_fifo: boolean to know if fifo is used for driver
274  * @has_device_mode: is this compatible capable to switch on device mode
275  * @flags: compatible specific SPI controller flags used at registration time
276  * @prevent_dma_burst: boolean to indicate to prevent DMA burst
277  */
278 struct stm32_spi_cfg {
279 	const struct stm32_spi_regspec *regs;
280 	int (*get_fifo_size)(struct stm32_spi *spi);
281 	int (*get_bpw_mask)(struct stm32_spi *spi);
282 	void (*disable)(struct stm32_spi *spi);
283 	int (*config)(struct stm32_spi *spi);
284 	void (*set_bpw)(struct stm32_spi *spi);
285 	int (*set_mode)(struct stm32_spi *spi, unsigned int comm_type);
286 	void (*set_data_idleness)(struct stm32_spi *spi, struct spi_transfer *xfer);
287 	int (*set_number_of_data)(struct stm32_spi *spi, u32 length);
288 	void (*write_tx)(struct stm32_spi *spi);
289 	void (*read_rx)(struct stm32_spi *spi);
290 	void (*transfer_one_dma_start)(struct stm32_spi *spi);
291 	void (*dma_rx_cb)(void *data);
292 	void (*dma_tx_cb)(void *data);
293 	int (*transfer_one_irq)(struct stm32_spi *spi);
294 	irqreturn_t (*irq_handler_event)(int irq, void *dev_id);
295 	irqreturn_t (*irq_handler_thread)(int irq, void *dev_id);
296 	unsigned int baud_rate_div_min;
297 	unsigned int baud_rate_div_max;
298 	bool has_fifo;
299 	bool has_device_mode;
300 	u16 flags;
301 	bool prevent_dma_burst;
302 };
303 
304 /**
305  * struct stm32_spi - private data of the SPI controller
306  * @dev: driver model representation of the controller
307  * @ctrl: controller interface
308  * @cfg: compatible configuration data
309  * @base: virtual memory area
310  * @clk: hw kernel clock feeding the SPI clock generator
311  * @clk_rate: rate of the hw kernel clock feeding the SPI clock generator
312  * @lock: prevent I/O concurrent access
313  * @irq: SPI controller interrupt line
314  * @fifo_size: size of the embedded fifo in bytes
315  * @t_size_max: maximum number of data of one transfer
316  * @feature_set: SPI full or limited feature set
317  * @cur_midi: host inter-data idleness in ns
318  * @cur_speed: speed configured in Hz
319  * @cur_half_period: time of a half bit in us
320  * @cur_bpw: number of bits in a single SPI data frame
321  * @cur_fthlv: fifo threshold level (data frames in a single data packet)
322  * @cur_comm: SPI communication mode
323  * @cur_xferlen: current transfer length in bytes
324  * @cur_usedma: boolean to know if dma is used in current transfer
325  * @tx_buf: data to be written, or NULL
326  * @rx_buf: data to be read, or NULL
327  * @tx_len: number of data to be written in bytes
328  * @rx_len: number of data to be read in bytes
329  * @dma_tx: dma channel for TX transfer
330  * @dma_rx: dma channel for RX transfer
331  * @phys_addr: SPI registers physical base address
332  * @device_mode: the controller is configured as SPI device
333  * @sram_pool: SRAM pool for DMA transfers
334  * @sram_rx_buf_size: size of SRAM buffer for RX transfer
335  * @sram_rx_buf: SRAM buffer for RX transfer
336  * @sram_dma_rx_buf: SRAM buffer physical address for RX transfer
337  * @mdma_rx: MDMA channel for RX transfer
338  */
339 struct stm32_spi {
340 	struct device *dev;
341 	struct spi_controller *ctrl;
342 	const struct stm32_spi_cfg *cfg;
343 	void __iomem *base;
344 	struct clk *clk;
345 	u32 clk_rate;
346 	spinlock_t lock; /* prevent I/O concurrent access */
347 	int irq;
348 	unsigned int fifo_size;
349 	unsigned int t_size_max;
350 	unsigned int feature_set;
351 #define STM32_SPI_FEATURE_LIMITED	STM32MP25_SPI_HWCFGR1_FULLCFG_LIMITED	/* 0x0 */
352 #define STM32_SPI_FEATURE_FULL		STM32MP25_SPI_HWCFGR1_FULLCFG_FULL	/* 0x1 */
353 
354 	unsigned int cur_midi;
355 	unsigned int cur_speed;
356 	unsigned int cur_half_period;
357 	unsigned int cur_bpw;
358 	unsigned int cur_fthlv;
359 	unsigned int cur_comm;
360 	unsigned int cur_xferlen;
361 	bool cur_usedma;
362 
363 	const void *tx_buf;
364 	void *rx_buf;
365 	int tx_len;
366 	int rx_len;
367 	struct dma_chan *dma_tx;
368 	struct dma_chan *dma_rx;
369 	dma_addr_t phys_addr;
370 
371 	bool device_mode;
372 
373 	struct gen_pool *sram_pool;
374 	size_t sram_rx_buf_size;
375 	void *sram_rx_buf;
376 	dma_addr_t sram_dma_rx_buf;
377 	struct dma_chan *mdma_rx;
378 };
379 
380 static const struct stm32_spi_regspec stm32fx_spi_regspec = {
381 	.en = { STM32FX_SPI_CR1, STM32FX_SPI_CR1_SPE },
382 
383 	.dma_rx_en = { STM32FX_SPI_CR2, STM32FX_SPI_CR2_RXDMAEN },
384 	.dma_tx_en = { STM32FX_SPI_CR2, STM32FX_SPI_CR2_TXDMAEN },
385 
386 	.cpol = { STM32FX_SPI_CR1, STM32FX_SPI_CR1_CPOL },
387 	.cpha = { STM32FX_SPI_CR1, STM32FX_SPI_CR1_CPHA },
388 	.lsb_first = { STM32FX_SPI_CR1, STM32FX_SPI_CR1_LSBFRST },
389 	.cs_high = {},
390 	.br = { STM32FX_SPI_CR1, STM32FX_SPI_CR1_BR, STM32FX_SPI_CR1_BR_SHIFT },
391 
392 	.rx = { STM32FX_SPI_DR },
393 	.tx = { STM32FX_SPI_DR },
394 };
395 
396 static const struct stm32_spi_regspec stm32h7_spi_regspec = {
397 	/* SPI data transfer is enabled but spi_ker_ck is idle.
398 	 * CFG1 and CFG2 registers are write protected when SPE is enabled.
399 	 */
400 	.en = { STM32H7_SPI_CR1, STM32H7_SPI_CR1_SPE },
401 
402 	.dma_rx_en = { STM32H7_SPI_CFG1, STM32H7_SPI_CFG1_RXDMAEN },
403 	.dma_tx_en = { STM32H7_SPI_CFG1, STM32H7_SPI_CFG1_TXDMAEN },
404 
405 	.cpol = { STM32H7_SPI_CFG2, STM32H7_SPI_CFG2_CPOL },
406 	.cpha = { STM32H7_SPI_CFG2, STM32H7_SPI_CFG2_CPHA },
407 	.lsb_first = { STM32H7_SPI_CFG2, STM32H7_SPI_CFG2_LSBFRST },
408 	.cs_high = { STM32H7_SPI_CFG2, STM32H7_SPI_CFG2_SSIOP },
409 	.br = { STM32H7_SPI_CFG1, STM32H7_SPI_CFG1_MBR,
410 		STM32H7_SPI_CFG1_MBR_SHIFT },
411 
412 	.rx = { STM32H7_SPI_RXDR },
413 	.tx = { STM32H7_SPI_TXDR },
414 };
415 
416 static const struct stm32_spi_regspec stm32mp25_spi_regspec = {
417 	/* SPI data transfer is enabled but spi_ker_ck is idle.
418 	 * CFG1 and CFG2 registers are write protected when SPE is enabled.
419 	 */
420 	.en = { STM32H7_SPI_CR1, STM32H7_SPI_CR1_SPE },
421 
422 	.dma_rx_en = { STM32H7_SPI_CFG1, STM32H7_SPI_CFG1_RXDMAEN },
423 	.dma_tx_en = { STM32H7_SPI_CFG1, STM32H7_SPI_CFG1_TXDMAEN },
424 
425 	.cpol = { STM32H7_SPI_CFG2, STM32H7_SPI_CFG2_CPOL },
426 	.cpha = { STM32H7_SPI_CFG2, STM32H7_SPI_CFG2_CPHA },
427 	.lsb_first = { STM32H7_SPI_CFG2, STM32H7_SPI_CFG2_LSBFRST },
428 	.cs_high = { STM32H7_SPI_CFG2, STM32H7_SPI_CFG2_SSIOP },
429 	.br = { STM32H7_SPI_CFG1, STM32H7_SPI_CFG1_MBR,
430 		STM32H7_SPI_CFG1_MBR_SHIFT },
431 
432 	.rx = { STM32H7_SPI_RXDR },
433 	.tx = { STM32H7_SPI_TXDR },
434 
435 	.fullcfg = { STM32MP25_SPI_HWCFGR1, STM32MP25_SPI_HWCFGR1_FULLCFG },
436 
437 	.rdy_en = { STM32H7_SPI_CFG2, STM32MP25_SPI_CFG2_RDIOM },
438 };
439 
stm32_spi_set_bits(struct stm32_spi * spi,u32 offset,u32 bits)440 static inline void stm32_spi_set_bits(struct stm32_spi *spi,
441 				      u32 offset, u32 bits)
442 {
443 	writel_relaxed(readl_relaxed(spi->base + offset) | bits,
444 		       spi->base + offset);
445 }
446 
stm32_spi_clr_bits(struct stm32_spi * spi,u32 offset,u32 bits)447 static inline void stm32_spi_clr_bits(struct stm32_spi *spi,
448 				      u32 offset, u32 bits)
449 {
450 	writel_relaxed(readl_relaxed(spi->base + offset) & ~bits,
451 		       spi->base + offset);
452 }
453 
454 /**
455  * stm32h7_spi_get_fifo_size - Return fifo size
456  * @spi: pointer to the spi controller data structure
457  */
stm32h7_spi_get_fifo_size(struct stm32_spi * spi)458 static int stm32h7_spi_get_fifo_size(struct stm32_spi *spi)
459 {
460 	unsigned long flags;
461 	u32 count = 0;
462 
463 	spin_lock_irqsave(&spi->lock, flags);
464 
465 	stm32_spi_set_bits(spi, STM32H7_SPI_CR1, STM32H7_SPI_CR1_SPE);
466 
467 	while (readl_relaxed(spi->base + STM32H7_SPI_SR) & STM32H7_SPI_SR_TXP)
468 		writeb_relaxed(++count, spi->base + STM32H7_SPI_TXDR);
469 
470 	stm32_spi_clr_bits(spi, STM32H7_SPI_CR1, STM32H7_SPI_CR1_SPE);
471 
472 	spin_unlock_irqrestore(&spi->lock, flags);
473 
474 	dev_dbg(spi->dev, "%d x 8-bit fifo size\n", count);
475 
476 	return count;
477 }
478 
479 /**
480  * stm32f4_spi_get_bpw_mask - Return bits per word mask
481  * @spi: pointer to the spi controller data structure
482  */
stm32f4_spi_get_bpw_mask(struct stm32_spi * spi)483 static int stm32f4_spi_get_bpw_mask(struct stm32_spi *spi)
484 {
485 	dev_dbg(spi->dev, "8-bit or 16-bit data frame supported\n");
486 	return SPI_BPW_MASK(8) | SPI_BPW_MASK(16);
487 }
488 
489 /**
490  * stm32f7_spi_get_bpw_mask - Return bits per word mask
491  * @spi: pointer to the spi controller data structure
492  */
stm32f7_spi_get_bpw_mask(struct stm32_spi * spi)493 static int stm32f7_spi_get_bpw_mask(struct stm32_spi *spi)
494 {
495 	dev_dbg(spi->dev, "16-bit maximum data frame\n");
496 	return SPI_BPW_RANGE_MASK(4, 16);
497 }
498 
499 /**
500  * stm32h7_spi_get_bpw_mask - Return bits per word mask
501  * @spi: pointer to the spi controller data structure
502  */
stm32h7_spi_get_bpw_mask(struct stm32_spi * spi)503 static int stm32h7_spi_get_bpw_mask(struct stm32_spi *spi)
504 {
505 	unsigned long flags;
506 	u32 cfg1, max_bpw;
507 
508 	spin_lock_irqsave(&spi->lock, flags);
509 
510 	/*
511 	 * The most significant bit at DSIZE bit field is reserved when the
512 	 * maximum data size of periperal instances is limited to 16-bit
513 	 */
514 	stm32_spi_set_bits(spi, STM32H7_SPI_CFG1, STM32H7_SPI_CFG1_DSIZE);
515 
516 	cfg1 = readl_relaxed(spi->base + STM32H7_SPI_CFG1);
517 	max_bpw = FIELD_GET(STM32H7_SPI_CFG1_DSIZE, cfg1) + 1;
518 
519 	spin_unlock_irqrestore(&spi->lock, flags);
520 
521 	dev_dbg(spi->dev, "%d-bit maximum data frame\n", max_bpw);
522 
523 	return SPI_BPW_RANGE_MASK(4, max_bpw);
524 }
525 
526 /**
527  * stm32mp25_spi_get_bpw_mask - Return bits per word mask
528  * @spi: pointer to the spi controller data structure
529  */
stm32mp25_spi_get_bpw_mask(struct stm32_spi * spi)530 static int stm32mp25_spi_get_bpw_mask(struct stm32_spi *spi)
531 {
532 	u32 dscfg, max_bpw;
533 
534 	if (spi->feature_set == STM32_SPI_FEATURE_LIMITED) {
535 		dev_dbg(spi->dev, "8-bit or 16-bit data frame supported\n");
536 		return SPI_BPW_MASK(8) | SPI_BPW_MASK(16);
537 	}
538 
539 	dscfg = FIELD_GET(STM32MP25_SPI_HWCFGR1_DSCFG,
540 			  readl_relaxed(spi->base + STM32MP25_SPI_HWCFGR1));
541 	max_bpw = 16;
542 	if (dscfg == STM32MP25_SPI_HWCFGR1_DSCFG_32_B)
543 		max_bpw = 32;
544 	dev_dbg(spi->dev, "%d-bit maximum data frame\n", max_bpw);
545 	return SPI_BPW_RANGE_MASK(4, max_bpw);
546 }
547 
548 /**
549  * stm32_spi_prepare_mbr - Determine baud rate divisor value
550  * @spi: pointer to the spi controller data structure
551  * @speed_hz: requested speed
552  * @min_div: minimum baud rate divisor
553  * @max_div: maximum baud rate divisor
554  *
555  * Return baud rate divisor value in case of success or -EINVAL
556  */
stm32_spi_prepare_mbr(struct stm32_spi * spi,u32 speed_hz,u32 min_div,u32 max_div)557 static int stm32_spi_prepare_mbr(struct stm32_spi *spi, u32 speed_hz,
558 				 u32 min_div, u32 max_div)
559 {
560 	u32 div, mbrdiv;
561 
562 	/* Ensure spi->clk_rate is even */
563 	div = DIV_ROUND_CLOSEST(spi->clk_rate & ~0x1, speed_hz);
564 
565 	/*
566 	 * SPI framework set xfer->speed_hz to ctrl->max_speed_hz if
567 	 * xfer->speed_hz is greater than ctrl->max_speed_hz, and it returns
568 	 * an error when xfer->speed_hz is lower than ctrl->min_speed_hz, so
569 	 * no need to check it there.
570 	 * However, we need to ensure the following calculations.
571 	 */
572 	if ((div < min_div) || (div > max_div))
573 		return -EINVAL;
574 
575 	/* Determine the first power of 2 greater than or equal to div */
576 	if (div & (div - 1))
577 		mbrdiv = fls(div);
578 	else
579 		mbrdiv = fls(div) - 1;
580 
581 	spi->cur_speed = spi->clk_rate / (1 << mbrdiv);
582 
583 	spi->cur_half_period = DIV_ROUND_CLOSEST(USEC_PER_SEC, 2 * spi->cur_speed);
584 
585 	return mbrdiv - 1;
586 }
587 
588 /**
589  * stm32h7_spi_prepare_fthlv - Determine FIFO threshold level
590  * @spi: pointer to the spi controller data structure
591  * @xfer_len: length of the message to be transferred
592  */
stm32h7_spi_prepare_fthlv(struct stm32_spi * spi,u32 xfer_len)593 static u32 stm32h7_spi_prepare_fthlv(struct stm32_spi *spi, u32 xfer_len)
594 {
595 	u32 packet, bpw;
596 
597 	/* data packet should not exceed 1/2 of fifo space */
598 	packet = clamp(xfer_len, 1U, spi->fifo_size / 2);
599 
600 	/* align packet size with data registers access */
601 	bpw = DIV_ROUND_UP(spi->cur_bpw, 8);
602 	return DIV_ROUND_UP(packet, bpw);
603 }
604 
605 /**
606  * stm32f4_spi_write_tx - Write bytes to Transmit Data Register
607  * @spi: pointer to the spi controller data structure
608  *
609  * Read from tx_buf depends on remaining bytes to avoid to read beyond
610  * tx_buf end.
611  */
stm32f4_spi_write_tx(struct stm32_spi * spi)612 static void stm32f4_spi_write_tx(struct stm32_spi *spi)
613 {
614 	if ((spi->tx_len > 0) && (readl_relaxed(spi->base + STM32FX_SPI_SR) &
615 				  STM32FX_SPI_SR_TXE)) {
616 		u32 offs = spi->cur_xferlen - spi->tx_len;
617 
618 		if (spi->cur_bpw == 16) {
619 			const u16 *tx_buf16 = (const u16 *)(spi->tx_buf + offs);
620 
621 			writew_relaxed(*tx_buf16, spi->base + STM32FX_SPI_DR);
622 			spi->tx_len -= sizeof(u16);
623 		} else {
624 			const u8 *tx_buf8 = (const u8 *)(spi->tx_buf + offs);
625 
626 			writeb_relaxed(*tx_buf8, spi->base + STM32FX_SPI_DR);
627 			spi->tx_len -= sizeof(u8);
628 		}
629 	}
630 
631 	dev_dbg(spi->dev, "%s: %d bytes left\n", __func__, spi->tx_len);
632 }
633 
634 /**
635  * stm32f7_spi_write_tx - Write bytes to Transmit Data Register
636  * @spi: pointer to the spi controller data structure
637  *
638  * Read from tx_buf depends on remaining bytes to avoid to read beyond
639  * tx_buf end.
640  */
stm32f7_spi_write_tx(struct stm32_spi * spi)641 static void stm32f7_spi_write_tx(struct stm32_spi *spi)
642 {
643 	if ((spi->tx_len > 0) && (readl_relaxed(spi->base + STM32FX_SPI_SR) &
644 				  STM32FX_SPI_SR_TXE)) {
645 		u32 offs = spi->cur_xferlen - spi->tx_len;
646 
647 		if (spi->tx_len >= sizeof(u16)) {
648 			const u16 *tx_buf16 = (const u16 *)(spi->tx_buf + offs);
649 
650 			writew_relaxed(*tx_buf16, spi->base + STM32FX_SPI_DR);
651 			spi->tx_len -= sizeof(u16);
652 		} else {
653 			const u8 *tx_buf8 = (const u8 *)(spi->tx_buf + offs);
654 
655 			writeb_relaxed(*tx_buf8, spi->base + STM32FX_SPI_DR);
656 			spi->tx_len -= sizeof(u8);
657 		}
658 	}
659 
660 	dev_dbg(spi->dev, "%s: %d bytes left\n", __func__, spi->tx_len);
661 }
662 
663 /**
664  * stm32h7_spi_write_txfifo - Write bytes in Transmit Data Register
665  * @spi: pointer to the spi controller data structure
666  *
667  * Read from tx_buf depends on remaining bytes to avoid to read beyond
668  * tx_buf end.
669  */
stm32h7_spi_write_txfifo(struct stm32_spi * spi)670 static void stm32h7_spi_write_txfifo(struct stm32_spi *spi)
671 {
672 	while ((spi->tx_len > 0) &&
673 		       (readl_relaxed(spi->base + STM32H7_SPI_SR) &
674 			STM32H7_SPI_SR_TXP)) {
675 		u32 offs = spi->cur_xferlen - spi->tx_len;
676 
677 		if (spi->tx_len >= sizeof(u32)) {
678 			const u32 *tx_buf32 = (const u32 *)(spi->tx_buf + offs);
679 
680 			writel_relaxed(*tx_buf32, spi->base + STM32H7_SPI_TXDR);
681 			spi->tx_len -= sizeof(u32);
682 		} else if (spi->tx_len >= sizeof(u16)) {
683 			const u16 *tx_buf16 = (const u16 *)(spi->tx_buf + offs);
684 
685 			writew_relaxed(*tx_buf16, spi->base + STM32H7_SPI_TXDR);
686 			spi->tx_len -= sizeof(u16);
687 		} else {
688 			const u8 *tx_buf8 = (const u8 *)(spi->tx_buf + offs);
689 
690 			writeb_relaxed(*tx_buf8, spi->base + STM32H7_SPI_TXDR);
691 			spi->tx_len -= sizeof(u8);
692 		}
693 	}
694 
695 	dev_dbg(spi->dev, "%s: %d bytes left\n", __func__, spi->tx_len);
696 }
697 
698 /**
699  * stm32f4_spi_read_rx - Read bytes from Receive Data Register
700  * @spi: pointer to the spi controller data structure
701  *
702  * Write in rx_buf depends on remaining bytes to avoid to write beyond
703  * rx_buf end.
704  */
stm32f4_spi_read_rx(struct stm32_spi * spi)705 static void stm32f4_spi_read_rx(struct stm32_spi *spi)
706 {
707 	if ((spi->rx_len > 0) && (readl_relaxed(spi->base + STM32FX_SPI_SR) &
708 				  STM32FX_SPI_SR_RXNE)) {
709 		u32 offs = spi->cur_xferlen - spi->rx_len;
710 
711 		if (spi->cur_bpw == 16) {
712 			u16 *rx_buf16 = (u16 *)(spi->rx_buf + offs);
713 
714 			*rx_buf16 = readw_relaxed(spi->base + STM32FX_SPI_DR);
715 			spi->rx_len -= sizeof(u16);
716 		} else {
717 			u8 *rx_buf8 = (u8 *)(spi->rx_buf + offs);
718 
719 			*rx_buf8 = readb_relaxed(spi->base + STM32FX_SPI_DR);
720 			spi->rx_len -= sizeof(u8);
721 		}
722 	}
723 
724 	dev_dbg(spi->dev, "%s: %d bytes left\n", __func__, spi->rx_len);
725 }
726 
727 /**
728  * stm32f7_spi_read_rx - Read bytes from Receive Data Register
729  * @spi: pointer to the spi controller data structure
730  *
731  * Write in rx_buf depends on remaining bytes to avoid to write beyond
732  * rx_buf end.
733  */
stm32f7_spi_read_rx(struct stm32_spi * spi)734 static void stm32f7_spi_read_rx(struct stm32_spi *spi)
735 {
736 	u32 sr = readl_relaxed(spi->base + STM32FX_SPI_SR);
737 	u32 frlvl = FIELD_GET(STM32F7_SPI_SR_FRLVL, sr);
738 
739 	while ((spi->rx_len > 0) && (frlvl > 0)) {
740 		u32 offs = spi->cur_xferlen - spi->rx_len;
741 
742 		if ((spi->rx_len >= sizeof(u16)) && (frlvl >= 2)) {
743 			u16 *rx_buf16 = (u16 *)(spi->rx_buf + offs);
744 
745 			*rx_buf16 = readw_relaxed(spi->base + STM32FX_SPI_DR);
746 			spi->rx_len -= sizeof(u16);
747 		} else {
748 			u8 *rx_buf8 = (u8 *)(spi->rx_buf + offs);
749 
750 			*rx_buf8 = readb_relaxed(spi->base + STM32FX_SPI_DR);
751 			spi->rx_len -= sizeof(u8);
752 		}
753 
754 		sr = readl_relaxed(spi->base + STM32FX_SPI_SR);
755 		frlvl = FIELD_GET(STM32F7_SPI_SR_FRLVL, sr);
756 	}
757 
758 	if (spi->rx_len >= sizeof(u16))
759 		stm32_spi_clr_bits(spi, STM32FX_SPI_CR2, STM32F7_SPI_CR2_FRXTH);
760 	else
761 		stm32_spi_set_bits(spi, STM32FX_SPI_CR2, STM32F7_SPI_CR2_FRXTH);
762 
763 	dev_dbg(spi->dev, "%s: %d bytes left (sr=%08x)\n",
764 		__func__, spi->rx_len, sr);
765 }
766 
767 /**
768  * stm32h7_spi_read_rxfifo - Read bytes in Receive Data Register
769  * @spi: pointer to the spi controller data structure
770  *
771  * Write in rx_buf depends on remaining bytes to avoid to write beyond
772  * rx_buf end.
773  */
stm32h7_spi_read_rxfifo(struct stm32_spi * spi)774 static void stm32h7_spi_read_rxfifo(struct stm32_spi *spi)
775 {
776 	u32 sr = readl_relaxed(spi->base + STM32H7_SPI_SR);
777 	u32 rxplvl = FIELD_GET(STM32H7_SPI_SR_RXPLVL, sr);
778 
779 	while ((spi->rx_len > 0) &&
780 	       ((sr & STM32H7_SPI_SR_RXP) ||
781 		((sr & STM32H7_SPI_SR_EOT) &&
782 		 ((sr & STM32H7_SPI_SR_RXWNE) || (rxplvl > 0))))) {
783 		u32 offs = spi->cur_xferlen - spi->rx_len;
784 
785 		if ((spi->rx_len >= sizeof(u32)) ||
786 		    (sr & STM32H7_SPI_SR_RXWNE)) {
787 			u32 *rx_buf32 = (u32 *)(spi->rx_buf + offs);
788 
789 			*rx_buf32 = readl_relaxed(spi->base + STM32H7_SPI_RXDR);
790 			spi->rx_len -= sizeof(u32);
791 		} else if ((spi->rx_len >= sizeof(u16)) ||
792 			   (!(sr & STM32H7_SPI_SR_RXWNE) &&
793 			    (rxplvl >= 2 || spi->cur_bpw > 8))) {
794 			u16 *rx_buf16 = (u16 *)(spi->rx_buf + offs);
795 
796 			*rx_buf16 = readw_relaxed(spi->base + STM32H7_SPI_RXDR);
797 			spi->rx_len -= sizeof(u16);
798 		} else {
799 			u8 *rx_buf8 = (u8 *)(spi->rx_buf + offs);
800 
801 			*rx_buf8 = readb_relaxed(spi->base + STM32H7_SPI_RXDR);
802 			spi->rx_len -= sizeof(u8);
803 		}
804 
805 		sr = readl_relaxed(spi->base + STM32H7_SPI_SR);
806 		rxplvl = FIELD_GET(STM32H7_SPI_SR_RXPLVL, sr);
807 	}
808 
809 	dev_dbg(spi->dev, "%s: %d bytes left (sr=%08x)\n",
810 		__func__, spi->rx_len, sr);
811 }
812 
813 /**
814  * stm32_spi_enable - Enable SPI controller
815  * @spi: pointer to the spi controller data structure
816  */
stm32_spi_enable(struct stm32_spi * spi)817 static void stm32_spi_enable(struct stm32_spi *spi)
818 {
819 	dev_dbg(spi->dev, "enable controller\n");
820 
821 	stm32_spi_set_bits(spi, spi->cfg->regs->en.reg,
822 			   spi->cfg->regs->en.mask);
823 }
824 
825 /**
826  * stm32fx_spi_disable - Disable SPI controller
827  * @spi: pointer to the spi controller data structure
828  */
stm32fx_spi_disable(struct stm32_spi * spi)829 static void stm32fx_spi_disable(struct stm32_spi *spi)
830 {
831 	unsigned long flags;
832 	u32 sr;
833 
834 	dev_dbg(spi->dev, "disable controller\n");
835 
836 	spin_lock_irqsave(&spi->lock, flags);
837 
838 	if (!(readl_relaxed(spi->base + STM32FX_SPI_CR1) &
839 	      STM32FX_SPI_CR1_SPE)) {
840 		spin_unlock_irqrestore(&spi->lock, flags);
841 		return;
842 	}
843 
844 	/* Disable interrupts */
845 	stm32_spi_clr_bits(spi, STM32FX_SPI_CR2, STM32FX_SPI_CR2_TXEIE |
846 						 STM32FX_SPI_CR2_RXNEIE |
847 						 STM32FX_SPI_CR2_ERRIE);
848 
849 	/* Wait until BSY = 0 */
850 	if (readl_relaxed_poll_timeout_atomic(spi->base + STM32FX_SPI_SR,
851 					      sr, !(sr & STM32FX_SPI_SR_BSY),
852 					      10, 100000) < 0) {
853 		dev_warn(spi->dev, "disabling condition timeout\n");
854 	}
855 
856 	if (spi->cur_usedma && spi->dma_tx)
857 		dmaengine_terminate_async(spi->dma_tx);
858 	if (spi->cur_usedma && spi->dma_rx)
859 		dmaengine_terminate_async(spi->dma_rx);
860 
861 	stm32_spi_clr_bits(spi, STM32FX_SPI_CR1, STM32FX_SPI_CR1_SPE);
862 
863 	stm32_spi_clr_bits(spi, STM32FX_SPI_CR2, STM32FX_SPI_CR2_TXDMAEN |
864 						 STM32FX_SPI_CR2_RXDMAEN);
865 
866 	/* Sequence to clear OVR flag */
867 	readl_relaxed(spi->base + STM32FX_SPI_DR);
868 	readl_relaxed(spi->base + STM32FX_SPI_SR);
869 
870 	spin_unlock_irqrestore(&spi->lock, flags);
871 }
872 
873 /**
874  * stm32h7_spi_disable - Disable SPI controller
875  * @spi: pointer to the spi controller data structure
876  *
877  * RX-Fifo is flushed when SPI controller is disabled.
878  */
stm32h7_spi_disable(struct stm32_spi * spi)879 static void stm32h7_spi_disable(struct stm32_spi *spi)
880 {
881 	unsigned long flags;
882 	u32 cr1;
883 
884 	dev_dbg(spi->dev, "disable controller\n");
885 
886 	spin_lock_irqsave(&spi->lock, flags);
887 
888 	cr1 = readl_relaxed(spi->base + STM32H7_SPI_CR1);
889 
890 	if (!(cr1 & STM32H7_SPI_CR1_SPE)) {
891 		spin_unlock_irqrestore(&spi->lock, flags);
892 		return;
893 	}
894 
895 	/* Add a delay to make sure that transmission is ended. */
896 	if (spi->cur_half_period)
897 		udelay(spi->cur_half_period);
898 
899 	if (spi->cur_usedma && spi->dma_tx)
900 		dmaengine_terminate_async(spi->dma_tx);
901 	if (spi->cur_usedma && spi->dma_rx) {
902 		dmaengine_terminate_async(spi->dma_rx);
903 		if (spi->mdma_rx)
904 			dmaengine_terminate_async(spi->mdma_rx);
905 	}
906 
907 	stm32_spi_clr_bits(spi, STM32H7_SPI_CR1, STM32H7_SPI_CR1_SPE);
908 
909 	stm32_spi_clr_bits(spi, STM32H7_SPI_CFG1, STM32H7_SPI_CFG1_TXDMAEN |
910 						STM32H7_SPI_CFG1_RXDMAEN);
911 
912 	/* Disable interrupts and clear status flags */
913 	writel_relaxed(0, spi->base + STM32H7_SPI_IER);
914 	writel_relaxed(STM32H7_SPI_IFCR_ALL, spi->base + STM32H7_SPI_IFCR);
915 
916 	spin_unlock_irqrestore(&spi->lock, flags);
917 }
918 
919 /**
920  * stm32_spi_can_dma - Determine if the transfer is eligible for DMA use
921  * @ctrl: controller interface
922  * @spi_dev: pointer to the spi device
923  * @transfer: pointer to spi transfer
924  *
925  * If driver has fifo and the current transfer size is greater than fifo size,
926  * use DMA. Otherwise use DMA for transfer longer than defined DMA min bytes.
927  */
stm32_spi_can_dma(struct spi_controller * ctrl,struct spi_device * spi_dev,struct spi_transfer * transfer)928 static bool stm32_spi_can_dma(struct spi_controller *ctrl,
929 			      struct spi_device *spi_dev,
930 			      struct spi_transfer *transfer)
931 {
932 	unsigned int dma_size;
933 	struct stm32_spi *spi = spi_controller_get_devdata(ctrl);
934 
935 	if (spi->cfg->has_fifo)
936 		dma_size = spi->fifo_size;
937 	else
938 		dma_size = SPI_DMA_MIN_BYTES;
939 
940 	dev_dbg(spi->dev, "%s: %s\n", __func__,
941 		(transfer->len > dma_size) ? "true" : "false");
942 
943 	return (transfer->len > dma_size);
944 }
945 
946 /**
947  * stm32fx_spi_irq_event - Interrupt handler for SPI controller events
948  * @irq: interrupt line
949  * @dev_id: SPI controller ctrl interface
950  */
stm32fx_spi_irq_event(int irq,void * dev_id)951 static irqreturn_t stm32fx_spi_irq_event(int irq, void *dev_id)
952 {
953 	struct spi_controller *ctrl = dev_id;
954 	struct stm32_spi *spi = spi_controller_get_devdata(ctrl);
955 	u32 sr, mask = 0;
956 	bool end = false;
957 
958 	spin_lock(&spi->lock);
959 
960 	sr = readl_relaxed(spi->base + STM32FX_SPI_SR);
961 	/*
962 	 * BSY flag is not handled in interrupt but it is normal behavior when
963 	 * this flag is set.
964 	 */
965 	sr &= ~STM32FX_SPI_SR_BSY;
966 
967 	if (!spi->cur_usedma && (spi->cur_comm == SPI_SIMPLEX_TX ||
968 				 spi->cur_comm == SPI_3WIRE_TX)) {
969 		/* OVR flag shouldn't be handled for TX only mode */
970 		sr &= ~(STM32FX_SPI_SR_OVR | STM32FX_SPI_SR_RXNE);
971 		mask |= STM32FX_SPI_SR_TXE;
972 	}
973 
974 	if (!spi->cur_usedma && (spi->cur_comm == SPI_FULL_DUPLEX ||
975 				spi->cur_comm == SPI_SIMPLEX_RX ||
976 				spi->cur_comm == SPI_3WIRE_RX)) {
977 		/* TXE flag is set and is handled when RXNE flag occurs */
978 		sr &= ~STM32FX_SPI_SR_TXE;
979 		mask |= STM32FX_SPI_SR_RXNE | STM32FX_SPI_SR_OVR;
980 	}
981 
982 	if (!(sr & mask)) {
983 		dev_dbg(spi->dev, "spurious IT (sr=0x%08x)\n", sr);
984 		spin_unlock(&spi->lock);
985 		return IRQ_NONE;
986 	}
987 
988 	if (sr & STM32FX_SPI_SR_OVR) {
989 		dev_warn(spi->dev, "Overrun: received value discarded\n");
990 
991 		/* Sequence to clear OVR flag */
992 		readl_relaxed(spi->base + STM32FX_SPI_DR);
993 		readl_relaxed(spi->base + STM32FX_SPI_SR);
994 
995 		/*
996 		 * If overrun is detected, it means that something went wrong,
997 		 * so stop the current transfer. Transfer can wait for next
998 		 * RXNE but DR is already read and end never happens.
999 		 */
1000 		end = true;
1001 		goto end_irq;
1002 	}
1003 
1004 	if (sr & STM32FX_SPI_SR_TXE) {
1005 		if (spi->tx_buf)
1006 			spi->cfg->write_tx(spi);
1007 		if (spi->tx_len == 0)
1008 			end = true;
1009 	}
1010 
1011 	if (sr & STM32FX_SPI_SR_RXNE) {
1012 		spi->cfg->read_rx(spi);
1013 		if (spi->rx_len == 0)
1014 			end = true;
1015 		else if (spi->tx_buf)/* Load data for discontinuous mode */
1016 			spi->cfg->write_tx(spi);
1017 	}
1018 
1019 end_irq:
1020 	if (end) {
1021 		/* Immediately disable interrupts to do not generate new one */
1022 		stm32_spi_clr_bits(spi, STM32FX_SPI_CR2,
1023 					STM32FX_SPI_CR2_TXEIE |
1024 					STM32FX_SPI_CR2_RXNEIE |
1025 					STM32FX_SPI_CR2_ERRIE);
1026 		spin_unlock(&spi->lock);
1027 		return IRQ_WAKE_THREAD;
1028 	}
1029 
1030 	spin_unlock(&spi->lock);
1031 	return IRQ_HANDLED;
1032 }
1033 
1034 /**
1035  * stm32fx_spi_irq_thread - Thread of interrupt handler for SPI controller
1036  * @irq: interrupt line
1037  * @dev_id: SPI controller interface
1038  */
stm32fx_spi_irq_thread(int irq,void * dev_id)1039 static irqreturn_t stm32fx_spi_irq_thread(int irq, void *dev_id)
1040 {
1041 	struct spi_controller *ctrl = dev_id;
1042 	struct stm32_spi *spi = spi_controller_get_devdata(ctrl);
1043 
1044 	spi_finalize_current_transfer(ctrl);
1045 	stm32fx_spi_disable(spi);
1046 
1047 	return IRQ_HANDLED;
1048 }
1049 
1050 /**
1051  * stm32h7_spi_irq_thread - Thread of interrupt handler for SPI controller
1052  * @irq: interrupt line
1053  * @dev_id: SPI controller interface
1054  */
stm32h7_spi_irq_thread(int irq,void * dev_id)1055 static irqreturn_t stm32h7_spi_irq_thread(int irq, void *dev_id)
1056 {
1057 	struct spi_controller *ctrl = dev_id;
1058 	struct stm32_spi *spi = spi_controller_get_devdata(ctrl);
1059 	u32 sr, ier, mask;
1060 	unsigned long flags;
1061 	bool end = false;
1062 
1063 	spin_lock_irqsave(&spi->lock, flags);
1064 
1065 	sr = readl_relaxed(spi->base + STM32H7_SPI_SR);
1066 	ier = readl_relaxed(spi->base + STM32H7_SPI_IER);
1067 
1068 	mask = ier;
1069 	/*
1070 	 * EOTIE enables irq from EOT, SUSP and TXC events. We need to set
1071 	 * SUSP to acknowledge it later. TXC is automatically cleared
1072 	 */
1073 
1074 	mask |= STM32H7_SPI_SR_SUSP;
1075 	/*
1076 	 * DXPIE is set in Full-Duplex, one IT will be raised if TXP and RXP
1077 	 * are set. So in case of Full-Duplex, need to poll TXP and RXP event.
1078 	 */
1079 	if ((spi->cur_comm == SPI_FULL_DUPLEX) && !spi->cur_usedma)
1080 		mask |= STM32H7_SPI_SR_TXP | STM32H7_SPI_SR_RXP;
1081 
1082 	if (!(sr & mask)) {
1083 		dev_vdbg(spi->dev, "spurious IT (sr=0x%08x, ier=0x%08x)\n",
1084 			 sr, ier);
1085 		spin_unlock_irqrestore(&spi->lock, flags);
1086 		return IRQ_NONE;
1087 	}
1088 
1089 	if (sr & STM32H7_SPI_SR_SUSP) {
1090 		static DEFINE_RATELIMIT_STATE(rs,
1091 					      DEFAULT_RATELIMIT_INTERVAL * 10,
1092 					      1);
1093 		ratelimit_set_flags(&rs, RATELIMIT_MSG_ON_RELEASE);
1094 		if (__ratelimit(&rs))
1095 			dev_dbg_ratelimited(spi->dev, "Communication suspended\n");
1096 		if (!spi->cur_usedma && (spi->rx_buf && (spi->rx_len > 0)))
1097 			stm32h7_spi_read_rxfifo(spi);
1098 		/*
1099 		 * If communication is suspended while using DMA, it means
1100 		 * that something went wrong, so stop the current transfer
1101 		 */
1102 		if (spi->cur_usedma)
1103 			end = true;
1104 	}
1105 
1106 	if (sr & STM32H7_SPI_SR_MODF) {
1107 		dev_warn(spi->dev, "Mode fault: transfer aborted\n");
1108 		end = true;
1109 	}
1110 
1111 	if (sr & STM32H7_SPI_SR_OVR) {
1112 		dev_err(spi->dev, "Overrun: RX data lost\n");
1113 		end = true;
1114 	}
1115 
1116 	if (sr & STM32H7_SPI_SR_EOT) {
1117 		dev_dbg(spi->dev, "End of transfer\n");
1118 		if (!spi->cur_usedma && (spi->rx_buf && (spi->rx_len > 0)))
1119 			stm32h7_spi_read_rxfifo(spi);
1120 		if (!spi->cur_usedma ||
1121 		    (spi->cur_comm == SPI_SIMPLEX_TX || spi->cur_comm == SPI_3WIRE_TX) ||
1122 		    (spi->mdma_rx && (spi->cur_comm == SPI_SIMPLEX_RX ||
1123 		     spi->cur_comm == SPI_FULL_DUPLEX)))
1124 			end = true;
1125 	}
1126 
1127 	if (sr & STM32H7_SPI_SR_TXP)
1128 		if (!spi->cur_usedma && (spi->tx_buf && (spi->tx_len > 0)))
1129 			stm32h7_spi_write_txfifo(spi);
1130 
1131 	if (sr & STM32H7_SPI_SR_RXP)
1132 		if (!spi->cur_usedma && (spi->rx_buf && (spi->rx_len > 0)))
1133 			stm32h7_spi_read_rxfifo(spi);
1134 
1135 	writel_relaxed(sr & mask, spi->base + STM32H7_SPI_IFCR);
1136 
1137 	spin_unlock_irqrestore(&spi->lock, flags);
1138 
1139 	if (end) {
1140 		if (spi->cur_usedma && spi->mdma_rx) {
1141 			dmaengine_pause(spi->dma_rx);
1142 			/* Wait for callback */
1143 			return IRQ_HANDLED;
1144 		}
1145 		stm32h7_spi_disable(spi);
1146 		spi_finalize_current_transfer(ctrl);
1147 	}
1148 
1149 	return IRQ_HANDLED;
1150 }
1151 
stm32_spi_optimize_message(struct spi_message * msg)1152 static int stm32_spi_optimize_message(struct spi_message *msg)
1153 {
1154 	struct spi_controller *ctrl = msg->spi->controller;
1155 	struct stm32_spi *spi = spi_controller_get_devdata(ctrl);
1156 
1157 	/* On STM32H7, messages should not exceed a maximum size set
1158 	 * later via the set_number_of_data function. In order to
1159 	 * ensure that, split large messages into several messages
1160 	 */
1161 	if (spi->cfg->set_number_of_data)
1162 		return spi_split_transfers_maxwords(ctrl, msg, spi->t_size_max);
1163 
1164 	return 0;
1165 }
1166 
1167 /**
1168  * stm32_spi_prepare_msg - set up the controller to transfer a single message
1169  * @ctrl: controller interface
1170  * @msg: pointer to spi message
1171  */
stm32_spi_prepare_msg(struct spi_controller * ctrl,struct spi_message * msg)1172 static int stm32_spi_prepare_msg(struct spi_controller *ctrl,
1173 				 struct spi_message *msg)
1174 {
1175 	struct stm32_spi *spi = spi_controller_get_devdata(ctrl);
1176 	struct spi_device *spi_dev = msg->spi;
1177 	struct device_node *np = spi_dev->dev.of_node;
1178 	unsigned long flags;
1179 	u32 clrb = 0, setb = 0;
1180 
1181 	/* SPI target device may need time between data frames */
1182 	spi->cur_midi = 0;
1183 	if (np && !of_property_read_u32(np, "st,spi-midi-ns", &spi->cur_midi))
1184 		dev_dbg(spi->dev, "%dns inter-data idleness\n", spi->cur_midi);
1185 
1186 	if (spi_dev->mode & SPI_CPOL)
1187 		setb |= spi->cfg->regs->cpol.mask;
1188 	else
1189 		clrb |= spi->cfg->regs->cpol.mask;
1190 
1191 	if (spi_dev->mode & SPI_CPHA)
1192 		setb |= spi->cfg->regs->cpha.mask;
1193 	else
1194 		clrb |= spi->cfg->regs->cpha.mask;
1195 
1196 	if (spi_dev->mode & SPI_LSB_FIRST)
1197 		setb |= spi->cfg->regs->lsb_first.mask;
1198 	else
1199 		clrb |= spi->cfg->regs->lsb_first.mask;
1200 
1201 	if (STM32_SPI_DEVICE_MODE(spi) && spi_dev->mode & SPI_CS_HIGH)
1202 		setb |= spi->cfg->regs->cs_high.mask;
1203 	else
1204 		clrb |= spi->cfg->regs->cs_high.mask;
1205 
1206 	if (spi_dev->mode & SPI_READY)
1207 		setb |= spi->cfg->regs->rdy_en.mask;
1208 	else
1209 		clrb |= spi->cfg->regs->rdy_en.mask;
1210 
1211 	dev_dbg(spi->dev, "cpol=%d cpha=%d lsb_first=%d cs_high=%d rdy=%d\n",
1212 		!!(spi_dev->mode & SPI_CPOL),
1213 		!!(spi_dev->mode & SPI_CPHA),
1214 		!!(spi_dev->mode & SPI_LSB_FIRST),
1215 		!!(spi_dev->mode & SPI_CS_HIGH),
1216 		!!(spi_dev->mode & SPI_READY));
1217 
1218 	spin_lock_irqsave(&spi->lock, flags);
1219 
1220 	/* CPOL, CPHA, LSB FIRST, CS_HIGH and RDY_EN bits have common register */
1221 	if (clrb || setb)
1222 		writel_relaxed(
1223 			(readl_relaxed(spi->base + spi->cfg->regs->cpol.reg) &
1224 			 ~clrb) | setb,
1225 			spi->base + spi->cfg->regs->cpol.reg);
1226 
1227 	spin_unlock_irqrestore(&spi->lock, flags);
1228 
1229 	return 0;
1230 }
1231 
1232 /**
1233  * stm32fx_spi_dma_tx_cb - dma callback
1234  * @data: pointer to the spi controller data structure
1235  *
1236  * DMA callback is called when the transfer is complete for DMA TX channel.
1237  */
stm32fx_spi_dma_tx_cb(void * data)1238 static void stm32fx_spi_dma_tx_cb(void *data)
1239 {
1240 	struct stm32_spi *spi = data;
1241 
1242 	if (spi->cur_comm == SPI_SIMPLEX_TX || spi->cur_comm == SPI_3WIRE_TX) {
1243 		spi_finalize_current_transfer(spi->ctrl);
1244 		stm32fx_spi_disable(spi);
1245 	}
1246 }
1247 
1248 /**
1249  * stm32_spi_dma_rx_cb - dma callback
1250  * @data: pointer to the spi controller data structure
1251  *
1252  * DMA callback is called when the transfer is complete for DMA RX channel.
1253  */
stm32_spi_dma_rx_cb(void * data)1254 static void stm32_spi_dma_rx_cb(void *data)
1255 {
1256 	struct stm32_spi *spi = data;
1257 
1258 	spi_finalize_current_transfer(spi->ctrl);
1259 	spi->cfg->disable(spi);
1260 }
1261 
1262 /**
1263  * stm32_spi_dma_config - configure dma slave channel depending on current
1264  *			  transfer bits_per_word.
1265  * @spi: pointer to the spi controller data structure
1266  * @dma_chan: pointer to the DMA channel
1267  * @dma_conf: pointer to the dma_slave_config structure
1268  * @dir: direction of the dma transfer
1269  */
stm32_spi_dma_config(struct stm32_spi * spi,struct dma_chan * dma_chan,struct dma_slave_config * dma_conf,enum dma_transfer_direction dir)1270 static void stm32_spi_dma_config(struct stm32_spi *spi,
1271 				 struct dma_chan *dma_chan,
1272 				 struct dma_slave_config *dma_conf,
1273 				 enum dma_transfer_direction dir)
1274 {
1275 	enum dma_slave_buswidth buswidth;
1276 	struct dma_slave_caps caps;
1277 	u32 maxburst = 1;
1278 	int ret;
1279 
1280 	if (spi->cur_bpw <= 8)
1281 		buswidth = DMA_SLAVE_BUSWIDTH_1_BYTE;
1282 	else if (spi->cur_bpw <= 16)
1283 		buswidth = DMA_SLAVE_BUSWIDTH_2_BYTES;
1284 	else
1285 		buswidth = DMA_SLAVE_BUSWIDTH_4_BYTES;
1286 
1287 	/* Valid for DMA Half or Full Fifo threshold */
1288 	if (!spi->cfg->prevent_dma_burst && spi->cfg->has_fifo && spi->cur_fthlv != 2)
1289 		maxburst = spi->cur_fthlv;
1290 
1291 	/* Get the DMA channel caps, and adjust maxburst if possible */
1292 	ret = dma_get_slave_caps(dma_chan, &caps);
1293 	if (!ret)
1294 		maxburst = min(maxburst, caps.max_burst);
1295 
1296 	memset(dma_conf, 0, sizeof(struct dma_slave_config));
1297 	dma_conf->direction = dir;
1298 	if (dma_conf->direction == DMA_DEV_TO_MEM) { /* RX */
1299 		dma_conf->src_addr = spi->phys_addr + spi->cfg->regs->rx.reg;
1300 		dma_conf->src_addr_width = buswidth;
1301 		dma_conf->src_maxburst = maxburst;
1302 
1303 		dev_dbg(spi->dev, "Rx DMA config buswidth=%d, maxburst=%d\n",
1304 			buswidth, maxburst);
1305 	} else if (dma_conf->direction == DMA_MEM_TO_DEV) { /* TX */
1306 		dma_conf->dst_addr = spi->phys_addr + spi->cfg->regs->tx.reg;
1307 		dma_conf->dst_addr_width = buswidth;
1308 		dma_conf->dst_maxburst = maxburst;
1309 
1310 		dev_dbg(spi->dev, "Tx DMA config buswidth=%d, maxburst=%d\n",
1311 			buswidth, maxburst);
1312 	}
1313 }
1314 
1315 /**
1316  * stm32fx_spi_transfer_one_irq - transfer a single spi_transfer using
1317  *				  interrupts
1318  * @spi: pointer to the spi controller data structure
1319  *
1320  * It must returns 0 if the transfer is finished or 1 if the transfer is still
1321  * in progress.
1322  */
stm32fx_spi_transfer_one_irq(struct stm32_spi * spi)1323 static int stm32fx_spi_transfer_one_irq(struct stm32_spi *spi)
1324 {
1325 	unsigned long flags;
1326 	u32 cr2 = 0;
1327 
1328 	/* Enable the interrupts relative to the current communication mode */
1329 	if (spi->cur_comm == SPI_SIMPLEX_TX || spi->cur_comm == SPI_3WIRE_TX) {
1330 		cr2 |= STM32FX_SPI_CR2_TXEIE;
1331 	} else if (spi->cur_comm == SPI_FULL_DUPLEX ||
1332 				spi->cur_comm == SPI_SIMPLEX_RX ||
1333 				spi->cur_comm == SPI_3WIRE_RX) {
1334 		/* In transmit-only mode, the OVR flag is set in the SR register
1335 		 * since the received data are never read. Therefore set OVR
1336 		 * interrupt only when rx buffer is available.
1337 		 */
1338 		cr2 |= STM32FX_SPI_CR2_RXNEIE | STM32FX_SPI_CR2_ERRIE;
1339 	} else {
1340 		return -EINVAL;
1341 	}
1342 
1343 	spin_lock_irqsave(&spi->lock, flags);
1344 
1345 	stm32_spi_set_bits(spi, STM32FX_SPI_CR2, cr2);
1346 
1347 	stm32_spi_enable(spi);
1348 
1349 	/* starting data transfer when buffer is loaded */
1350 	if (spi->tx_buf)
1351 		spi->cfg->write_tx(spi);
1352 
1353 	spin_unlock_irqrestore(&spi->lock, flags);
1354 
1355 	return 1;
1356 }
1357 
1358 /**
1359  * stm32h7_spi_transfer_one_irq - transfer a single spi_transfer using
1360  *				  interrupts
1361  * @spi: pointer to the spi controller data structure
1362  *
1363  * It must returns 0 if the transfer is finished or 1 if the transfer is still
1364  * in progress.
1365  */
stm32h7_spi_transfer_one_irq(struct stm32_spi * spi)1366 static int stm32h7_spi_transfer_one_irq(struct stm32_spi *spi)
1367 {
1368 	unsigned long flags;
1369 	u32 ier = 0;
1370 
1371 	/* Enable the interrupts relative to the current communication mode */
1372 	if (spi->tx_buf && spi->rx_buf)	/* Full Duplex */
1373 		ier |= STM32H7_SPI_IER_DXPIE;
1374 	else if (spi->tx_buf)		/* Half-Duplex TX dir or Simplex TX */
1375 		ier |= STM32H7_SPI_IER_TXPIE;
1376 	else if (spi->rx_buf)		/* Half-Duplex RX dir or Simplex RX */
1377 		ier |= STM32H7_SPI_IER_RXPIE;
1378 
1379 	/* Enable the interrupts relative to the end of transfer */
1380 	ier |= STM32H7_SPI_IER_EOTIE | STM32H7_SPI_IER_TXTFIE |
1381 	       STM32H7_SPI_IER_OVRIE | STM32H7_SPI_IER_MODFIE;
1382 
1383 	spin_lock_irqsave(&spi->lock, flags);
1384 
1385 	stm32_spi_enable(spi);
1386 
1387 	/* Be sure to have data in fifo before starting data transfer */
1388 	if (spi->tx_buf)
1389 		stm32h7_spi_write_txfifo(spi);
1390 
1391 	if (STM32_SPI_HOST_MODE(spi))
1392 		stm32_spi_set_bits(spi, STM32H7_SPI_CR1, STM32H7_SPI_CR1_CSTART);
1393 
1394 	writel_relaxed(ier, spi->base + STM32H7_SPI_IER);
1395 
1396 	spin_unlock_irqrestore(&spi->lock, flags);
1397 
1398 	return 1;
1399 }
1400 
1401 /**
1402  * stm32fx_spi_transfer_one_dma_start - Set SPI driver registers to start
1403  *					transfer using DMA
1404  * @spi: pointer to the spi controller data structure
1405  */
stm32fx_spi_transfer_one_dma_start(struct stm32_spi * spi)1406 static void stm32fx_spi_transfer_one_dma_start(struct stm32_spi *spi)
1407 {
1408 	/* In DMA mode end of transfer is handled by DMA TX or RX callback. */
1409 	if (spi->cur_comm == SPI_SIMPLEX_RX || spi->cur_comm == SPI_3WIRE_RX ||
1410 	    spi->cur_comm == SPI_FULL_DUPLEX) {
1411 		/*
1412 		 * In transmit-only mode, the OVR flag is set in the SR register
1413 		 * since the received data are never read. Therefore set OVR
1414 		 * interrupt only when rx buffer is available.
1415 		 */
1416 		stm32_spi_set_bits(spi, STM32FX_SPI_CR2, STM32FX_SPI_CR2_ERRIE);
1417 	}
1418 
1419 	stm32_spi_enable(spi);
1420 }
1421 
1422 /**
1423  * stm32f7_spi_transfer_one_dma_start - Set SPI driver registers to start
1424  *					transfer using DMA
1425  * @spi: pointer to the spi controller data structure
1426  */
stm32f7_spi_transfer_one_dma_start(struct stm32_spi * spi)1427 static void stm32f7_spi_transfer_one_dma_start(struct stm32_spi *spi)
1428 {
1429 	/* Configure DMA request trigger threshold according to DMA width */
1430 	if (spi->cur_bpw <= 8)
1431 		stm32_spi_set_bits(spi, STM32FX_SPI_CR2, STM32F7_SPI_CR2_FRXTH);
1432 	else
1433 		stm32_spi_clr_bits(spi, STM32FX_SPI_CR2, STM32F7_SPI_CR2_FRXTH);
1434 
1435 	stm32fx_spi_transfer_one_dma_start(spi);
1436 }
1437 
1438 /**
1439  * stm32h7_spi_transfer_one_dma_start - Set SPI driver registers to start
1440  *					transfer using DMA
1441  * @spi: pointer to the spi controller data structure
1442  */
stm32h7_spi_transfer_one_dma_start(struct stm32_spi * spi)1443 static void stm32h7_spi_transfer_one_dma_start(struct stm32_spi *spi)
1444 {
1445 	uint32_t ier = STM32H7_SPI_IER_OVRIE | STM32H7_SPI_IER_MODFIE;
1446 
1447 	/* Enable the interrupts */
1448 	if (spi->cur_comm == SPI_SIMPLEX_TX || spi->cur_comm == SPI_3WIRE_TX)
1449 		ier |= STM32H7_SPI_IER_EOTIE | STM32H7_SPI_IER_TXTFIE;
1450 	if (spi->mdma_rx && (spi->cur_comm == SPI_SIMPLEX_RX || spi->cur_comm == SPI_FULL_DUPLEX))
1451 		ier |= STM32H7_SPI_IER_EOTIE;
1452 
1453 	stm32_spi_set_bits(spi, STM32H7_SPI_IER, ier);
1454 
1455 	stm32_spi_enable(spi);
1456 
1457 	if (STM32_SPI_HOST_MODE(spi))
1458 		stm32_spi_set_bits(spi, STM32H7_SPI_CR1, STM32H7_SPI_CR1_CSTART);
1459 }
1460 
1461 /**
1462  * stm32_spi_prepare_rx_dma_mdma_chaining - Prepare RX DMA and MDMA chaining
1463  * @spi: pointer to the spi controller data structure
1464  * @xfer: pointer to the spi transfer
1465  * @rx_dma_conf: pointer to the DMA configuration for RX channel
1466  * @rx_dma_desc: pointer to the RX DMA descriptor
1467  * @rx_mdma_desc: pointer to the RX MDMA descriptor
1468  *
1469  * It must return 0 if the chaining is possible or an error code if not.
1470  */
stm32_spi_prepare_rx_dma_mdma_chaining(struct stm32_spi * spi,struct spi_transfer * xfer,struct dma_slave_config * rx_dma_conf,struct dma_async_tx_descriptor ** rx_dma_desc,struct dma_async_tx_descriptor ** rx_mdma_desc)1471 static int stm32_spi_prepare_rx_dma_mdma_chaining(struct stm32_spi *spi,
1472 						  struct spi_transfer *xfer,
1473 						  struct dma_slave_config *rx_dma_conf,
1474 						  struct dma_async_tx_descriptor **rx_dma_desc,
1475 						  struct dma_async_tx_descriptor **rx_mdma_desc)
1476 {
1477 	struct dma_async_tx_descriptor *_mdma_desc = *rx_mdma_desc;
1478 	struct dma_async_tx_descriptor *_dma_desc = *rx_dma_desc;
1479 	struct dma_slave_config rx_mdma_conf = {0};
1480 	u32 sram_period, nents = 0, spi_s_len;
1481 	struct sg_table dma_sgt, mdma_sgt;
1482 	struct scatterlist *spi_s, *s;
1483 	dma_addr_t dma_buf;
1484 	int i, ret;
1485 
1486 	sram_period = spi->sram_rx_buf_size / 2;
1487 
1488 	/* Configure MDMA RX channel */
1489 	rx_mdma_conf.direction = rx_dma_conf->direction;
1490 	rx_mdma_conf.src_addr = spi->sram_dma_rx_buf;
1491 	rx_mdma_conf.peripheral_config = rx_dma_conf->peripheral_config;
1492 	rx_mdma_conf.peripheral_size = rx_dma_conf->peripheral_size;
1493 	dmaengine_slave_config(spi->mdma_rx, &rx_mdma_conf);
1494 
1495 	/* Count the number of entries needed */
1496 	for_each_sg(xfer->rx_sg.sgl, spi_s, xfer->rx_sg.nents, i)
1497 		if (sg_dma_len(spi_s) > sram_period)
1498 			nents += DIV_ROUND_UP(sg_dma_len(spi_s), sram_period);
1499 		else
1500 			nents++;
1501 
1502 	/* Prepare DMA slave_sg DBM transfer DEV_TO_MEM (RX>MEM=SRAM) */
1503 	ret = sg_alloc_table(&dma_sgt, nents, GFP_ATOMIC);
1504 	if (ret)
1505 		return ret;
1506 
1507 	spi_s = xfer->rx_sg.sgl;
1508 	spi_s_len = sg_dma_len(spi_s);
1509 	dma_buf = spi->sram_dma_rx_buf;
1510 	for_each_sg(dma_sgt.sgl, s, dma_sgt.nents, i) {
1511 		size_t bytes = min_t(size_t, spi_s_len, sram_period);
1512 
1513 		sg_dma_len(s) = bytes;
1514 		sg_dma_address(s) = dma_buf;
1515 		spi_s_len -= bytes;
1516 
1517 		if (!spi_s_len && sg_next(spi_s)) {
1518 			spi_s = sg_next(spi_s);
1519 			spi_s_len = sg_dma_len(spi_s);
1520 			dma_buf = spi->sram_dma_rx_buf;
1521 		} else { /* DMA configured in DBM: it will swap between the SRAM periods */
1522 			if (i & 1)
1523 				dma_buf += sram_period;
1524 			else
1525 				dma_buf = spi->sram_dma_rx_buf;
1526 		}
1527 	}
1528 
1529 	_dma_desc = dmaengine_prep_slave_sg(spi->dma_rx, dma_sgt.sgl,
1530 					    dma_sgt.nents, rx_dma_conf->direction,
1531 					    DMA_PREP_INTERRUPT);
1532 	sg_free_table(&dma_sgt);
1533 
1534 	if (!_dma_desc)
1535 		return -EINVAL;
1536 
1537 	/* Prepare MDMA slave_sg transfer MEM_TO_MEM (SRAM>DDR) */
1538 	ret = sg_alloc_table(&mdma_sgt, nents, GFP_ATOMIC);
1539 	if (ret) {
1540 		_dma_desc = NULL;
1541 		return ret;
1542 	}
1543 
1544 	spi_s = xfer->rx_sg.sgl;
1545 	spi_s_len = sg_dma_len(spi_s);
1546 	dma_buf = sg_dma_address(spi_s);
1547 	for_each_sg(mdma_sgt.sgl, s, mdma_sgt.nents, i) {
1548 		size_t bytes = min_t(size_t, spi_s_len, sram_period);
1549 
1550 		sg_dma_len(s) = bytes;
1551 		sg_dma_address(s) = dma_buf;
1552 		spi_s_len -= bytes;
1553 
1554 		if (!spi_s_len && sg_next(spi_s)) {
1555 			spi_s = sg_next(spi_s);
1556 			spi_s_len = sg_dma_len(spi_s);
1557 			dma_buf = sg_dma_address(spi_s);
1558 		} else {
1559 			dma_buf += bytes;
1560 		}
1561 	}
1562 
1563 	_mdma_desc = dmaengine_prep_slave_sg(spi->mdma_rx, mdma_sgt.sgl,
1564 					     mdma_sgt.nents, rx_mdma_conf.direction,
1565 					     DMA_PREP_INTERRUPT);
1566 	sg_free_table(&mdma_sgt);
1567 
1568 	if (!_mdma_desc) {
1569 		_dma_desc = NULL;
1570 		return -EINVAL;
1571 	}
1572 
1573 	return 0;
1574 }
1575 
1576 /**
1577  * stm32_spi_transfer_one_dma - transfer a single spi_transfer using DMA
1578  * @spi: pointer to the spi controller data structure
1579  * @xfer: pointer to the spi_transfer structure
1580  *
1581  * It must returns 0 if the transfer is finished or 1 if the transfer is still
1582  * in progress.
1583  */
stm32_spi_transfer_one_dma(struct stm32_spi * spi,struct spi_transfer * xfer)1584 static int stm32_spi_transfer_one_dma(struct stm32_spi *spi,
1585 				      struct spi_transfer *xfer)
1586 {
1587 	struct dma_async_tx_descriptor *rx_mdma_desc = NULL, *rx_dma_desc = NULL;
1588 	struct dma_async_tx_descriptor *tx_dma_desc = NULL;
1589 	struct dma_slave_config tx_dma_conf, rx_dma_conf;
1590 	unsigned long flags;
1591 	int ret = 0;
1592 
1593 	spin_lock_irqsave(&spi->lock, flags);
1594 
1595 	if (spi->rx_buf && spi->dma_rx) {
1596 		stm32_spi_dma_config(spi, spi->dma_rx, &rx_dma_conf, DMA_DEV_TO_MEM);
1597 		if (spi->mdma_rx) {
1598 			rx_dma_conf.peripheral_size = 1;
1599 			dmaengine_slave_config(spi->dma_rx, &rx_dma_conf);
1600 
1601 			ret = stm32_spi_prepare_rx_dma_mdma_chaining(spi, xfer, &rx_dma_conf,
1602 								     &rx_dma_desc, &rx_mdma_desc);
1603 			if (ret) { /* RX DMA MDMA chaining not possible, fallback to DMA only */
1604 				rx_dma_conf.peripheral_config = 0;
1605 				rx_dma_desc = NULL;
1606 			}
1607 		}
1608 		if (!rx_dma_desc) {
1609 			dmaengine_slave_config(spi->dma_rx, &rx_dma_conf);
1610 			rx_dma_desc = dmaengine_prep_slave_sg(spi->dma_rx, xfer->rx_sg.sgl,
1611 							      xfer->rx_sg.nents,
1612 							      rx_dma_conf.direction,
1613 							      DMA_PREP_INTERRUPT);
1614 		}
1615 	}
1616 
1617 	if (spi->tx_buf && spi->dma_tx) {
1618 		stm32_spi_dma_config(spi, spi->dma_tx, &tx_dma_conf, DMA_MEM_TO_DEV);
1619 		dmaengine_slave_config(spi->dma_tx, &tx_dma_conf);
1620 		tx_dma_desc = dmaengine_prep_slave_sg(spi->dma_tx, xfer->tx_sg.sgl,
1621 						      xfer->tx_sg.nents,
1622 						      tx_dma_conf.direction,
1623 						      DMA_PREP_INTERRUPT);
1624 	}
1625 
1626 	if ((spi->tx_buf && spi->dma_tx && !tx_dma_desc) ||
1627 	    (spi->rx_buf && spi->dma_rx && !rx_dma_desc))
1628 		goto dma_desc_error;
1629 
1630 	if (spi->cur_comm == SPI_FULL_DUPLEX && (!tx_dma_desc || !rx_dma_desc))
1631 		goto dma_desc_error;
1632 
1633 	if (rx_dma_desc) {
1634 		if (rx_mdma_desc) {
1635 			rx_mdma_desc->callback = spi->cfg->dma_rx_cb;
1636 			rx_mdma_desc->callback_param = spi;
1637 		} else {
1638 			rx_dma_desc->callback = spi->cfg->dma_rx_cb;
1639 			rx_dma_desc->callback_param = spi;
1640 		}
1641 
1642 		/* Enable Rx DMA request */
1643 		stm32_spi_set_bits(spi, spi->cfg->regs->dma_rx_en.reg,
1644 				   spi->cfg->regs->dma_rx_en.mask);
1645 		if (rx_mdma_desc) {
1646 			if (dma_submit_error(dmaengine_submit(rx_mdma_desc))) {
1647 				dev_err(spi->dev, "Rx MDMA submit failed\n");
1648 				goto dma_desc_error;
1649 			}
1650 			/* Enable Rx MDMA channel */
1651 			dma_async_issue_pending(spi->mdma_rx);
1652 		}
1653 		if (dma_submit_error(dmaengine_submit(rx_dma_desc))) {
1654 			dev_err(spi->dev, "Rx DMA submit failed\n");
1655 			goto dma_desc_error;
1656 		}
1657 		/* Enable Rx DMA channel */
1658 		dma_async_issue_pending(spi->dma_rx);
1659 	}
1660 
1661 	if (tx_dma_desc) {
1662 		if (spi->cur_comm == SPI_SIMPLEX_TX ||
1663 		    spi->cur_comm == SPI_3WIRE_TX) {
1664 			tx_dma_desc->callback = spi->cfg->dma_tx_cb;
1665 			tx_dma_desc->callback_param = spi;
1666 		}
1667 
1668 		if (dma_submit_error(dmaengine_submit(tx_dma_desc))) {
1669 			dev_err(spi->dev, "Tx DMA submit failed\n");
1670 			goto dma_submit_error;
1671 		}
1672 		/* Enable Tx DMA channel */
1673 		dma_async_issue_pending(spi->dma_tx);
1674 
1675 		/* Enable Tx DMA request */
1676 		stm32_spi_set_bits(spi, spi->cfg->regs->dma_tx_en.reg,
1677 				   spi->cfg->regs->dma_tx_en.mask);
1678 	}
1679 
1680 	spi->cfg->transfer_one_dma_start(spi);
1681 
1682 	spin_unlock_irqrestore(&spi->lock, flags);
1683 
1684 	return 1;
1685 
1686 dma_submit_error:
1687 	if (spi->mdma_rx)
1688 		dmaengine_terminate_sync(spi->mdma_rx);
1689 	if (spi->dma_rx)
1690 		dmaengine_terminate_sync(spi->dma_rx);
1691 
1692 dma_desc_error:
1693 	stm32_spi_clr_bits(spi, spi->cfg->regs->dma_rx_en.reg,
1694 			   spi->cfg->regs->dma_rx_en.mask);
1695 
1696 	spin_unlock_irqrestore(&spi->lock, flags);
1697 
1698 	dev_info(spi->dev, "DMA issue: fall back to irq transfer\n");
1699 
1700 	if (spi->sram_rx_buf)
1701 		memset(spi->sram_rx_buf, 0, spi->sram_rx_buf_size);
1702 
1703 	spi->cur_usedma = false;
1704 	return spi->cfg->transfer_one_irq(spi);
1705 }
1706 
1707 /**
1708  * stm32f4_spi_set_bpw - Configure bits per word
1709  * @spi: pointer to the spi controller data structure
1710  */
stm32f4_spi_set_bpw(struct stm32_spi * spi)1711 static void stm32f4_spi_set_bpw(struct stm32_spi *spi)
1712 {
1713 	if (spi->cur_bpw == 16)
1714 		stm32_spi_set_bits(spi, STM32FX_SPI_CR1, STM32F4_SPI_CR1_DFF);
1715 	else
1716 		stm32_spi_clr_bits(spi, STM32FX_SPI_CR1, STM32F4_SPI_CR1_DFF);
1717 }
1718 
1719 /**
1720  * stm32f7_spi_set_bpw - Configure bits per word
1721  * @spi: pointer to the spi controller data structure
1722  */
stm32f7_spi_set_bpw(struct stm32_spi * spi)1723 static void stm32f7_spi_set_bpw(struct stm32_spi *spi)
1724 {
1725 	u32 bpw;
1726 	u32 cr2_clrb = 0, cr2_setb = 0;
1727 
1728 	bpw = spi->cur_bpw - 1;
1729 
1730 	cr2_clrb |= STM32F7_SPI_CR2_DS;
1731 	cr2_setb |= FIELD_PREP(STM32F7_SPI_CR2_DS, bpw);
1732 
1733 	if (spi->rx_len >= sizeof(u16))
1734 		cr2_clrb |= STM32F7_SPI_CR2_FRXTH;
1735 	else
1736 		cr2_setb |= STM32F7_SPI_CR2_FRXTH;
1737 
1738 	writel_relaxed(
1739 		(readl_relaxed(spi->base + STM32FX_SPI_CR2) &
1740 		 ~cr2_clrb) | cr2_setb,
1741 		spi->base + STM32FX_SPI_CR2);
1742 }
1743 
1744 /**
1745  * stm32h7_spi_set_bpw - configure bits per word
1746  * @spi: pointer to the spi controller data structure
1747  */
stm32h7_spi_set_bpw(struct stm32_spi * spi)1748 static void stm32h7_spi_set_bpw(struct stm32_spi *spi)
1749 {
1750 	u32 bpw, fthlv;
1751 	u32 cfg1_clrb = 0, cfg1_setb = 0;
1752 
1753 	bpw = spi->cur_bpw - 1;
1754 
1755 	cfg1_clrb |= STM32H7_SPI_CFG1_DSIZE;
1756 	cfg1_setb |= FIELD_PREP(STM32H7_SPI_CFG1_DSIZE, bpw);
1757 
1758 	spi->cur_fthlv = stm32h7_spi_prepare_fthlv(spi, spi->cur_xferlen);
1759 	fthlv = spi->cur_fthlv - 1;
1760 
1761 	cfg1_clrb |= STM32H7_SPI_CFG1_FTHLV;
1762 	cfg1_setb |= FIELD_PREP(STM32H7_SPI_CFG1_FTHLV, fthlv);
1763 
1764 	writel_relaxed(
1765 		(readl_relaxed(spi->base + STM32H7_SPI_CFG1) &
1766 		 ~cfg1_clrb) | cfg1_setb,
1767 		spi->base + STM32H7_SPI_CFG1);
1768 }
1769 
1770 /**
1771  * stm32_spi_set_mbr - Configure baud rate divisor in host mode
1772  * @spi: pointer to the spi controller data structure
1773  * @mbrdiv: baud rate divisor value
1774  */
stm32_spi_set_mbr(struct stm32_spi * spi,u32 mbrdiv)1775 static void stm32_spi_set_mbr(struct stm32_spi *spi, u32 mbrdiv)
1776 {
1777 	u32 clrb = 0, setb = 0;
1778 
1779 	clrb |= spi->cfg->regs->br.mask;
1780 	setb |= (mbrdiv << spi->cfg->regs->br.shift) & spi->cfg->regs->br.mask;
1781 
1782 	writel_relaxed((readl_relaxed(spi->base + spi->cfg->regs->br.reg) &
1783 			~clrb) | setb,
1784 		       spi->base + spi->cfg->regs->br.reg);
1785 }
1786 
1787 /**
1788  * stm32_spi_communication_type - return transfer communication type
1789  * @spi_dev: pointer to the spi device
1790  * @transfer: pointer to spi transfer
1791  */
stm32_spi_communication_type(struct spi_device * spi_dev,struct spi_transfer * transfer)1792 static unsigned int stm32_spi_communication_type(struct spi_device *spi_dev,
1793 						 struct spi_transfer *transfer)
1794 {
1795 	unsigned int type = SPI_FULL_DUPLEX;
1796 
1797 	if (spi_dev->mode & SPI_3WIRE) { /* MISO/MOSI signals shared */
1798 		/*
1799 		 * SPI_3WIRE and xfer->tx_buf != NULL and xfer->rx_buf != NULL
1800 		 * is forbidden and unvalidated by SPI subsystem so depending
1801 		 * on the valid buffer, we can determine the direction of the
1802 		 * transfer.
1803 		 */
1804 		if (!transfer->tx_buf)
1805 			type = SPI_3WIRE_RX;
1806 		else
1807 			type = SPI_3WIRE_TX;
1808 	} else {
1809 		if (!transfer->tx_buf)
1810 			type = SPI_SIMPLEX_RX;
1811 		else if (!transfer->rx_buf)
1812 			type = SPI_SIMPLEX_TX;
1813 	}
1814 
1815 	return type;
1816 }
1817 
1818 /**
1819  * stm32fx_spi_set_mode - configure communication mode
1820  * @spi: pointer to the spi controller data structure
1821  * @comm_type: type of communication to configure
1822  */
stm32fx_spi_set_mode(struct stm32_spi * spi,unsigned int comm_type)1823 static int stm32fx_spi_set_mode(struct stm32_spi *spi, unsigned int comm_type)
1824 {
1825 	if (comm_type == SPI_3WIRE_TX || comm_type == SPI_SIMPLEX_TX) {
1826 		stm32_spi_set_bits(spi, STM32FX_SPI_CR1,
1827 					STM32FX_SPI_CR1_BIDIMODE |
1828 					STM32FX_SPI_CR1_BIDIOE);
1829 	} else if (comm_type == SPI_FULL_DUPLEX ||
1830 				comm_type == SPI_SIMPLEX_RX) {
1831 		stm32_spi_clr_bits(spi, STM32FX_SPI_CR1,
1832 					STM32FX_SPI_CR1_BIDIMODE |
1833 					STM32FX_SPI_CR1_BIDIOE);
1834 	} else if (comm_type == SPI_3WIRE_RX) {
1835 		stm32_spi_set_bits(spi, STM32FX_SPI_CR1,
1836 					STM32FX_SPI_CR1_BIDIMODE);
1837 		stm32_spi_clr_bits(spi, STM32FX_SPI_CR1,
1838 					STM32FX_SPI_CR1_BIDIOE);
1839 	} else {
1840 		return -EINVAL;
1841 	}
1842 
1843 	return 0;
1844 }
1845 
1846 /**
1847  * stm32h7_spi_set_mode - configure communication mode
1848  * @spi: pointer to the spi controller data structure
1849  * @comm_type: type of communication to configure
1850  */
stm32h7_spi_set_mode(struct stm32_spi * spi,unsigned int comm_type)1851 static int stm32h7_spi_set_mode(struct stm32_spi *spi, unsigned int comm_type)
1852 {
1853 	u32 mode;
1854 	u32 cfg2_clrb = 0, cfg2_setb = 0;
1855 
1856 	if (comm_type == SPI_3WIRE_RX) {
1857 		mode = STM32H7_SPI_HALF_DUPLEX;
1858 		stm32_spi_clr_bits(spi, STM32H7_SPI_CR1, STM32H7_SPI_CR1_HDDIR);
1859 	} else if (comm_type == SPI_3WIRE_TX) {
1860 		mode = STM32H7_SPI_HALF_DUPLEX;
1861 		stm32_spi_set_bits(spi, STM32H7_SPI_CR1, STM32H7_SPI_CR1_HDDIR);
1862 	} else if (comm_type == SPI_SIMPLEX_RX) {
1863 		mode = STM32H7_SPI_SIMPLEX_RX;
1864 	} else if (comm_type == SPI_SIMPLEX_TX) {
1865 		mode = STM32H7_SPI_SIMPLEX_TX;
1866 	} else {
1867 		mode = STM32H7_SPI_FULL_DUPLEX;
1868 	}
1869 
1870 	cfg2_clrb |= STM32H7_SPI_CFG2_COMM;
1871 	cfg2_setb |= FIELD_PREP(STM32H7_SPI_CFG2_COMM, mode);
1872 
1873 	writel_relaxed(
1874 		(readl_relaxed(spi->base + STM32H7_SPI_CFG2) &
1875 		 ~cfg2_clrb) | cfg2_setb,
1876 		spi->base + STM32H7_SPI_CFG2);
1877 
1878 	return 0;
1879 }
1880 
1881 /**
1882  * stm32h7_spi_data_idleness - configure minimum time delay inserted between two
1883  *			       consecutive data frames in host mode
1884  * @spi: pointer to the spi controller data structure
1885  * @xfer: pointer to spi transfer
1886  */
stm32h7_spi_data_idleness(struct stm32_spi * spi,struct spi_transfer * xfer)1887 static void stm32h7_spi_data_idleness(struct stm32_spi *spi, struct spi_transfer *xfer)
1888 {
1889 	u32 cfg2_clrb = 0, cfg2_setb = 0;
1890 	u32 len = xfer->len;
1891 	u32 spi_delay_ns;
1892 
1893 	spi_delay_ns = spi_delay_to_ns(&xfer->word_delay, xfer);
1894 
1895 	if (spi->cur_midi != 0) {
1896 		dev_warn(spi->dev, "st,spi-midi-ns DT property is deprecated\n");
1897 		if (spi_delay_ns) {
1898 			dev_warn(spi->dev, "Overriding st,spi-midi-ns with word_delay_ns %d\n",
1899 				 spi_delay_ns);
1900 			spi->cur_midi = spi_delay_ns;
1901 		}
1902 	} else {
1903 		spi->cur_midi = spi_delay_ns;
1904 	}
1905 
1906 	cfg2_clrb |= STM32H7_SPI_CFG2_MIDI;
1907 	if ((len > 1) && (spi->cur_midi > 0)) {
1908 		u32 sck_period_ns = DIV_ROUND_UP(NSEC_PER_SEC, spi->cur_speed);
1909 		u32 midi = min_t(u32,
1910 				 DIV_ROUND_UP(spi->cur_midi, sck_period_ns),
1911 				 FIELD_GET(STM32H7_SPI_CFG2_MIDI,
1912 				 STM32H7_SPI_CFG2_MIDI));
1913 
1914 
1915 		dev_dbg(spi->dev, "period=%dns, midi=%d(=%dns)\n",
1916 			sck_period_ns, midi, midi * sck_period_ns);
1917 		cfg2_setb |= FIELD_PREP(STM32H7_SPI_CFG2_MIDI, midi);
1918 	}
1919 
1920 	writel_relaxed((readl_relaxed(spi->base + STM32H7_SPI_CFG2) &
1921 			~cfg2_clrb) | cfg2_setb,
1922 		       spi->base + STM32H7_SPI_CFG2);
1923 }
1924 
1925 /**
1926  * stm32h7_spi_number_of_data - configure number of data at current transfer
1927  * @spi: pointer to the spi controller data structure
1928  * @nb_words: transfer length (in words)
1929  */
stm32h7_spi_number_of_data(struct stm32_spi * spi,u32 nb_words)1930 static int stm32h7_spi_number_of_data(struct stm32_spi *spi, u32 nb_words)
1931 {
1932 	if (nb_words <= spi->t_size_max) {
1933 		writel_relaxed(FIELD_PREP(STM32H7_SPI_CR2_TSIZE, nb_words),
1934 			       spi->base + STM32H7_SPI_CR2);
1935 	} else {
1936 		return -EMSGSIZE;
1937 	}
1938 
1939 	return 0;
1940 }
1941 
1942 /**
1943  * stm32_spi_transfer_one_setup - common setup to transfer a single
1944  *				  spi_transfer either using DMA or
1945  *				  interrupts.
1946  * @spi: pointer to the spi controller data structure
1947  * @spi_dev: pointer to the spi device
1948  * @transfer: pointer to spi transfer
1949  */
stm32_spi_transfer_one_setup(struct stm32_spi * spi,struct spi_device * spi_dev,struct spi_transfer * transfer)1950 static int stm32_spi_transfer_one_setup(struct stm32_spi *spi,
1951 					struct spi_device *spi_dev,
1952 					struct spi_transfer *transfer)
1953 {
1954 	unsigned long flags;
1955 	unsigned int comm_type;
1956 	int nb_words, ret = 0;
1957 	int mbr;
1958 
1959 	spin_lock_irqsave(&spi->lock, flags);
1960 
1961 	spi->cur_xferlen = transfer->len;
1962 
1963 	spi->cur_bpw = transfer->bits_per_word;
1964 	spi->cfg->set_bpw(spi);
1965 
1966 	if (spi_dev->mode & SPI_READY && spi->cur_bpw < 8) {
1967 		writel_relaxed(readl_relaxed(spi->base + spi->cfg->regs->rdy_en.reg) &
1968 				~spi->cfg->regs->rdy_en.mask,
1969 					spi->base + spi->cfg->regs->rdy_en.reg);
1970 		dev_dbg(spi->dev, "RDY logic disabled as bits per word < 8\n");
1971 	}
1972 
1973 	/* Update spi->cur_speed with real clock speed */
1974 	if (STM32_SPI_HOST_MODE(spi)) {
1975 		mbr = stm32_spi_prepare_mbr(spi, transfer->speed_hz,
1976 					    spi->cfg->baud_rate_div_min,
1977 					    spi->cfg->baud_rate_div_max);
1978 		if (mbr < 0) {
1979 			ret = mbr;
1980 			goto out;
1981 		}
1982 
1983 		transfer->speed_hz = spi->cur_speed;
1984 		stm32_spi_set_mbr(spi, mbr);
1985 	}
1986 
1987 	comm_type = stm32_spi_communication_type(spi_dev, transfer);
1988 	ret = spi->cfg->set_mode(spi, comm_type);
1989 	if (ret < 0)
1990 		goto out;
1991 
1992 	spi->cur_comm = comm_type;
1993 
1994 	if (STM32_SPI_HOST_MODE(spi) && spi->cfg->set_data_idleness)
1995 		spi->cfg->set_data_idleness(spi, transfer);
1996 
1997 	if (spi->cur_bpw <= 8)
1998 		nb_words = transfer->len;
1999 	else if (spi->cur_bpw <= 16)
2000 		nb_words = DIV_ROUND_UP(transfer->len * 8, 16);
2001 	else
2002 		nb_words = DIV_ROUND_UP(transfer->len * 8, 32);
2003 
2004 	if (spi->cfg->set_number_of_data) {
2005 		ret = spi->cfg->set_number_of_data(spi, nb_words);
2006 		if (ret < 0)
2007 			goto out;
2008 	}
2009 
2010 	dev_dbg(spi->dev, "transfer communication mode set to %d\n",
2011 		spi->cur_comm);
2012 	dev_dbg(spi->dev,
2013 		"data frame of %d-bit, data packet of %d data frames\n",
2014 		spi->cur_bpw, spi->cur_fthlv);
2015 	if (STM32_SPI_HOST_MODE(spi))
2016 		dev_dbg(spi->dev, "speed set to %dHz\n", spi->cur_speed);
2017 	dev_dbg(spi->dev, "transfer of %d bytes (%d data frames)\n",
2018 		spi->cur_xferlen, nb_words);
2019 	dev_dbg(spi->dev, "dma %s\n",
2020 		(spi->cur_usedma) ? "enabled" : "disabled");
2021 
2022 out:
2023 	spin_unlock_irqrestore(&spi->lock, flags);
2024 
2025 	return ret;
2026 }
2027 
2028 /**
2029  * stm32_spi_transfer_one - transfer a single spi_transfer
2030  * @ctrl: controller interface
2031  * @spi_dev: pointer to the spi device
2032  * @transfer: pointer to spi transfer
2033  *
2034  * It must return 0 if the transfer is finished or 1 if the transfer is still
2035  * in progress.
2036  */
stm32_spi_transfer_one(struct spi_controller * ctrl,struct spi_device * spi_dev,struct spi_transfer * transfer)2037 static int stm32_spi_transfer_one(struct spi_controller *ctrl,
2038 				  struct spi_device *spi_dev,
2039 				  struct spi_transfer *transfer)
2040 {
2041 	struct stm32_spi *spi = spi_controller_get_devdata(ctrl);
2042 	int ret;
2043 
2044 	spi->tx_buf = transfer->tx_buf;
2045 	spi->rx_buf = transfer->rx_buf;
2046 	spi->tx_len = spi->tx_buf ? transfer->len : 0;
2047 	spi->rx_len = spi->rx_buf ? transfer->len : 0;
2048 
2049 	spi->cur_usedma = (ctrl->can_dma &&
2050 			   ctrl->can_dma(ctrl, spi_dev, transfer));
2051 
2052 	ret = stm32_spi_transfer_one_setup(spi, spi_dev, transfer);
2053 	if (ret) {
2054 		dev_err(spi->dev, "SPI transfer setup failed\n");
2055 		return ret;
2056 	}
2057 
2058 	if (spi->cur_usedma)
2059 		return stm32_spi_transfer_one_dma(spi, transfer);
2060 	else
2061 		return spi->cfg->transfer_one_irq(spi);
2062 }
2063 
2064 /**
2065  * stm32_spi_unprepare_msg - relax the hardware
2066  * @ctrl: controller interface
2067  * @msg: pointer to the spi message
2068  */
stm32_spi_unprepare_msg(struct spi_controller * ctrl,struct spi_message * msg)2069 static int stm32_spi_unprepare_msg(struct spi_controller *ctrl,
2070 				   struct spi_message *msg)
2071 {
2072 	struct stm32_spi *spi = spi_controller_get_devdata(ctrl);
2073 
2074 	spi->cfg->disable(spi);
2075 
2076 	if (spi->sram_rx_buf)
2077 		memset(spi->sram_rx_buf, 0, spi->sram_rx_buf_size);
2078 
2079 	return 0;
2080 }
2081 
2082 /**
2083  * stm32fx_spi_config - Configure SPI controller as SPI host
2084  * @spi: pointer to the spi controller data structure
2085  */
stm32fx_spi_config(struct stm32_spi * spi)2086 static int stm32fx_spi_config(struct stm32_spi *spi)
2087 {
2088 	unsigned long flags;
2089 
2090 	spin_lock_irqsave(&spi->lock, flags);
2091 
2092 	/* Ensure I2SMOD bit is kept cleared */
2093 	stm32_spi_clr_bits(spi, STM32FX_SPI_I2SCFGR,
2094 			   STM32FX_SPI_I2SCFGR_I2SMOD);
2095 
2096 	/*
2097 	 * - SS input value high
2098 	 * - transmitter half duplex direction
2099 	 * - Set the host mode (default Motorola mode)
2100 	 * - Consider 1 host/n targets configuration and
2101 	 *   SS input value is determined by the SSI bit
2102 	 */
2103 	stm32_spi_set_bits(spi, STM32FX_SPI_CR1, STM32FX_SPI_CR1_SSI |
2104 						 STM32FX_SPI_CR1_BIDIOE |
2105 						 STM32FX_SPI_CR1_MSTR |
2106 						 STM32FX_SPI_CR1_SSM);
2107 
2108 	spin_unlock_irqrestore(&spi->lock, flags);
2109 
2110 	return 0;
2111 }
2112 
2113 /**
2114  * stm32h7_spi_config - Configure SPI controller
2115  * @spi: pointer to the spi controller data structure
2116  */
stm32h7_spi_config(struct stm32_spi * spi)2117 static int stm32h7_spi_config(struct stm32_spi *spi)
2118 {
2119 	unsigned long flags;
2120 	u32 cr1 = 0, cfg2 = 0;
2121 
2122 	spin_lock_irqsave(&spi->lock, flags);
2123 
2124 	/* Ensure I2SMOD bit is kept cleared */
2125 	stm32_spi_clr_bits(spi, STM32H7_SPI_I2SCFGR,
2126 			   STM32H7_SPI_I2SCFGR_I2SMOD);
2127 
2128 	if (STM32_SPI_DEVICE_MODE(spi)) {
2129 		/* Use native device select */
2130 		cfg2 &= ~STM32H7_SPI_CFG2_SSM;
2131 	} else {
2132 		/*
2133 		 * - Transmitter half duplex direction
2134 		 * - Automatic communication suspend when RX-Fifo is full
2135 		 * - SS input value high
2136 		 */
2137 		cr1 |= STM32H7_SPI_CR1_HDDIR | STM32H7_SPI_CR1_MASRX | STM32H7_SPI_CR1_SSI;
2138 
2139 		/*
2140 		 * - Set the host mode (default Motorola mode)
2141 		 * - Consider 1 host/n devices configuration and
2142 		 *   SS input value is determined by the SSI bit
2143 		 * - keep control of all associated GPIOs
2144 		 */
2145 		cfg2 |= STM32H7_SPI_CFG2_MASTER | STM32H7_SPI_CFG2_SSM | STM32H7_SPI_CFG2_AFCNTR;
2146 	}
2147 
2148 	stm32_spi_set_bits(spi, STM32H7_SPI_CR1, cr1);
2149 	stm32_spi_set_bits(spi, STM32H7_SPI_CFG2, cfg2);
2150 
2151 	spin_unlock_irqrestore(&spi->lock, flags);
2152 
2153 	return 0;
2154 }
2155 
2156 static const struct stm32_spi_cfg stm32f4_spi_cfg = {
2157 	.regs = &stm32fx_spi_regspec,
2158 	.get_bpw_mask = stm32f4_spi_get_bpw_mask,
2159 	.disable = stm32fx_spi_disable,
2160 	.config = stm32fx_spi_config,
2161 	.set_bpw = stm32f4_spi_set_bpw,
2162 	.set_mode = stm32fx_spi_set_mode,
2163 	.write_tx = stm32f4_spi_write_tx,
2164 	.read_rx = stm32f4_spi_read_rx,
2165 	.transfer_one_dma_start = stm32fx_spi_transfer_one_dma_start,
2166 	.dma_tx_cb = stm32fx_spi_dma_tx_cb,
2167 	.dma_rx_cb = stm32_spi_dma_rx_cb,
2168 	.transfer_one_irq = stm32fx_spi_transfer_one_irq,
2169 	.irq_handler_event = stm32fx_spi_irq_event,
2170 	.irq_handler_thread = stm32fx_spi_irq_thread,
2171 	.baud_rate_div_min = STM32FX_SPI_BR_DIV_MIN,
2172 	.baud_rate_div_max = STM32FX_SPI_BR_DIV_MAX,
2173 	.has_fifo = false,
2174 	.has_device_mode = false,
2175 	.flags = SPI_CONTROLLER_MUST_TX,
2176 };
2177 
2178 static const struct stm32_spi_cfg stm32f7_spi_cfg = {
2179 	.regs = &stm32fx_spi_regspec,
2180 	.get_bpw_mask = stm32f7_spi_get_bpw_mask,
2181 	.disable = stm32fx_spi_disable,
2182 	.config = stm32fx_spi_config,
2183 	.set_bpw = stm32f7_spi_set_bpw,
2184 	.set_mode = stm32fx_spi_set_mode,
2185 	.write_tx = stm32f7_spi_write_tx,
2186 	.read_rx = stm32f7_spi_read_rx,
2187 	.transfer_one_dma_start = stm32f7_spi_transfer_one_dma_start,
2188 	.dma_tx_cb = stm32fx_spi_dma_tx_cb,
2189 	.dma_rx_cb = stm32_spi_dma_rx_cb,
2190 	.transfer_one_irq = stm32fx_spi_transfer_one_irq,
2191 	.irq_handler_event = stm32fx_spi_irq_event,
2192 	.irq_handler_thread = stm32fx_spi_irq_thread,
2193 	.baud_rate_div_min = STM32FX_SPI_BR_DIV_MIN,
2194 	.baud_rate_div_max = STM32FX_SPI_BR_DIV_MAX,
2195 	.has_fifo = false,
2196 	.flags = SPI_CONTROLLER_MUST_TX,
2197 };
2198 
2199 static const struct stm32_spi_cfg stm32h7_spi_cfg = {
2200 	.regs = &stm32h7_spi_regspec,
2201 	.get_fifo_size = stm32h7_spi_get_fifo_size,
2202 	.get_bpw_mask = stm32h7_spi_get_bpw_mask,
2203 	.disable = stm32h7_spi_disable,
2204 	.config = stm32h7_spi_config,
2205 	.set_bpw = stm32h7_spi_set_bpw,
2206 	.set_mode = stm32h7_spi_set_mode,
2207 	.set_data_idleness = stm32h7_spi_data_idleness,
2208 	.set_number_of_data = stm32h7_spi_number_of_data,
2209 	.write_tx = stm32h7_spi_write_txfifo,
2210 	.read_rx = stm32h7_spi_read_rxfifo,
2211 	.transfer_one_dma_start = stm32h7_spi_transfer_one_dma_start,
2212 	.dma_rx_cb = stm32_spi_dma_rx_cb,
2213 	/*
2214 	 * dma_tx_cb is not necessary since in case of TX, dma is followed by
2215 	 * SPI access hence handling is performed within the SPI interrupt
2216 	 */
2217 	.transfer_one_irq = stm32h7_spi_transfer_one_irq,
2218 	.irq_handler_thread = stm32h7_spi_irq_thread,
2219 	.baud_rate_div_min = STM32H7_SPI_MBR_DIV_MIN,
2220 	.baud_rate_div_max = STM32H7_SPI_MBR_DIV_MAX,
2221 	.has_fifo = true,
2222 	.has_device_mode = true,
2223 };
2224 
2225 /*
2226  * STM32MP2 is compatible with the STM32H7 except:
2227  * - enforce the DMA maxburst value to 1
2228  * - spi8 have limited feature set (TSIZE_MAX = 1024, BPW of 8 OR 16)
2229  */
2230 static const struct stm32_spi_cfg stm32mp25_spi_cfg = {
2231 	.regs = &stm32mp25_spi_regspec,
2232 	.get_fifo_size = stm32h7_spi_get_fifo_size,
2233 	.get_bpw_mask = stm32mp25_spi_get_bpw_mask,
2234 	.disable = stm32h7_spi_disable,
2235 	.config = stm32h7_spi_config,
2236 	.set_bpw = stm32h7_spi_set_bpw,
2237 	.set_mode = stm32h7_spi_set_mode,
2238 	.set_data_idleness = stm32h7_spi_data_idleness,
2239 	.set_number_of_data = stm32h7_spi_number_of_data,
2240 	.transfer_one_dma_start = stm32h7_spi_transfer_one_dma_start,
2241 	.dma_rx_cb = stm32_spi_dma_rx_cb,
2242 	/*
2243 	 * dma_tx_cb is not necessary since in case of TX, dma is followed by
2244 	 * SPI access hence handling is performed within the SPI interrupt
2245 	 */
2246 	.transfer_one_irq = stm32h7_spi_transfer_one_irq,
2247 	.irq_handler_thread = stm32h7_spi_irq_thread,
2248 	.baud_rate_div_min = STM32H7_SPI_MBR_DIV_MIN,
2249 	.baud_rate_div_max = STM32H7_SPI_MBR_DIV_MAX,
2250 	.has_fifo = true,
2251 	.prevent_dma_burst = true,
2252 	.has_device_mode = true,
2253 };
2254 
2255 static const struct of_device_id stm32_spi_of_match[] = {
2256 	{ .compatible = "st,stm32mp25-spi", .data = (void *)&stm32mp25_spi_cfg },
2257 	{ .compatible = "st,stm32h7-spi", .data = (void *)&stm32h7_spi_cfg },
2258 	{ .compatible = "st,stm32f4-spi", .data = (void *)&stm32f4_spi_cfg },
2259 	{ .compatible = "st,stm32f7-spi", .data = (void *)&stm32f7_spi_cfg },
2260 	{},
2261 };
2262 MODULE_DEVICE_TABLE(of, stm32_spi_of_match);
2263 
stm32h7_spi_device_abort(struct spi_controller * ctrl)2264 static int stm32h7_spi_device_abort(struct spi_controller *ctrl)
2265 {
2266 	spi_finalize_current_transfer(ctrl);
2267 	return 0;
2268 }
2269 
stm32_spi_probe(struct platform_device * pdev)2270 static int stm32_spi_probe(struct platform_device *pdev)
2271 {
2272 	struct spi_controller *ctrl;
2273 	struct stm32_spi *spi;
2274 	struct resource *res;
2275 	struct reset_control *rst;
2276 	struct device_node *np = pdev->dev.of_node;
2277 	const struct stm32_spi_cfg *cfg;
2278 	bool device_mode;
2279 	int ret;
2280 
2281 	cfg = of_device_get_match_data(&pdev->dev);
2282 	if (!cfg) {
2283 		dev_err(&pdev->dev, "Failed to get match data for platform\n");
2284 		return -ENODEV;
2285 	}
2286 
2287 	device_mode = of_property_read_bool(np, "spi-slave");
2288 	if (!cfg->has_device_mode && device_mode) {
2289 		dev_err(&pdev->dev, "spi-slave not supported\n");
2290 		return -EPERM;
2291 	}
2292 
2293 	if (device_mode)
2294 		ctrl = devm_spi_alloc_target(&pdev->dev, sizeof(struct stm32_spi));
2295 	else
2296 		ctrl = devm_spi_alloc_host(&pdev->dev, sizeof(struct stm32_spi));
2297 	if (!ctrl) {
2298 		dev_err(&pdev->dev, "spi controller allocation failed\n");
2299 		return -ENOMEM;
2300 	}
2301 	platform_set_drvdata(pdev, ctrl);
2302 
2303 	spi = spi_controller_get_devdata(ctrl);
2304 	spi->dev = &pdev->dev;
2305 	spi->ctrl = ctrl;
2306 	spi->device_mode = device_mode;
2307 	spin_lock_init(&spi->lock);
2308 
2309 	spi->cfg = cfg;
2310 
2311 	spi->base = devm_platform_get_and_ioremap_resource(pdev, 0, &res);
2312 	if (IS_ERR(spi->base))
2313 		return PTR_ERR(spi->base);
2314 
2315 	spi->phys_addr = (dma_addr_t)res->start;
2316 
2317 	spi->irq = platform_get_irq(pdev, 0);
2318 	if (spi->irq <= 0)
2319 		return spi->irq;
2320 
2321 	ret = devm_request_threaded_irq(&pdev->dev, spi->irq,
2322 					spi->cfg->irq_handler_event,
2323 					spi->cfg->irq_handler_thread,
2324 					IRQF_ONESHOT, pdev->name, ctrl);
2325 	if (ret) {
2326 		dev_err(&pdev->dev, "irq%d request failed: %d\n", spi->irq,
2327 			ret);
2328 		return ret;
2329 	}
2330 
2331 	spi->clk = devm_clk_get(&pdev->dev, NULL);
2332 	if (IS_ERR(spi->clk)) {
2333 		ret = PTR_ERR(spi->clk);
2334 		dev_err(&pdev->dev, "clk get failed: %d\n", ret);
2335 		return ret;
2336 	}
2337 
2338 	ret = clk_prepare_enable(spi->clk);
2339 	if (ret) {
2340 		dev_err(&pdev->dev, "clk enable failed: %d\n", ret);
2341 		return ret;
2342 	}
2343 	spi->clk_rate = clk_get_rate(spi->clk);
2344 	if (!spi->clk_rate) {
2345 		dev_err(&pdev->dev, "clk rate = 0\n");
2346 		ret = -EINVAL;
2347 		goto err_clk_disable;
2348 	}
2349 
2350 	rst = devm_reset_control_get_optional_exclusive(&pdev->dev, NULL);
2351 	if (rst) {
2352 		if (IS_ERR(rst)) {
2353 			ret = dev_err_probe(&pdev->dev, PTR_ERR(rst),
2354 					    "failed to get reset\n");
2355 			goto err_clk_disable;
2356 		}
2357 
2358 		reset_control_assert(rst);
2359 		udelay(2);
2360 		reset_control_deassert(rst);
2361 	}
2362 
2363 	if (spi->cfg->has_fifo)
2364 		spi->fifo_size = spi->cfg->get_fifo_size(spi);
2365 
2366 	spi->feature_set = STM32_SPI_FEATURE_FULL;
2367 	if (spi->cfg->regs->fullcfg.reg) {
2368 		spi->feature_set =
2369 			FIELD_GET(STM32MP25_SPI_HWCFGR1_FULLCFG,
2370 				  readl_relaxed(spi->base + spi->cfg->regs->fullcfg.reg));
2371 
2372 		dev_dbg(spi->dev, "%s feature set\n",
2373 			spi->feature_set == STM32_SPI_FEATURE_FULL ? "full" : "limited");
2374 	}
2375 
2376 	/* Only for STM32H7 and after */
2377 	spi->t_size_max = spi->feature_set == STM32_SPI_FEATURE_FULL ?
2378 				STM32H7_SPI_TSIZE_MAX :
2379 				STM32MP25_SPI_TSIZE_MAX_LIMITED;
2380 	dev_dbg(spi->dev, "one message max size %d\n", spi->t_size_max);
2381 
2382 	ret = spi->cfg->config(spi);
2383 	if (ret) {
2384 		dev_err(&pdev->dev, "controller configuration failed: %d\n",
2385 			ret);
2386 		goto err_clk_disable;
2387 	}
2388 
2389 	ctrl->dev.of_node = pdev->dev.of_node;
2390 	ctrl->auto_runtime_pm = true;
2391 	ctrl->bus_num = pdev->id;
2392 	ctrl->mode_bits = SPI_CPHA | SPI_CPOL | SPI_CS_HIGH | SPI_LSB_FIRST |
2393 			  SPI_3WIRE | SPI_READY;
2394 	ctrl->bits_per_word_mask = spi->cfg->get_bpw_mask(spi);
2395 	ctrl->max_speed_hz = spi->clk_rate / spi->cfg->baud_rate_div_min;
2396 	ctrl->min_speed_hz = spi->clk_rate / spi->cfg->baud_rate_div_max;
2397 	ctrl->use_gpio_descriptors = true;
2398 	ctrl->optimize_message = stm32_spi_optimize_message;
2399 	ctrl->prepare_message = stm32_spi_prepare_msg;
2400 	ctrl->transfer_one = stm32_spi_transfer_one;
2401 	ctrl->unprepare_message = stm32_spi_unprepare_msg;
2402 	ctrl->flags = spi->cfg->flags;
2403 	if (STM32_SPI_DEVICE_MODE(spi))
2404 		ctrl->target_abort = stm32h7_spi_device_abort;
2405 
2406 	spi->dma_tx = dma_request_chan(spi->dev, "tx");
2407 	if (IS_ERR(spi->dma_tx)) {
2408 		ret = PTR_ERR(spi->dma_tx);
2409 		spi->dma_tx = NULL;
2410 		if (ret == -EPROBE_DEFER)
2411 			goto err_clk_disable;
2412 
2413 		dev_warn(&pdev->dev, "failed to request tx dma channel\n");
2414 	} else {
2415 		ctrl->dma_tx = spi->dma_tx;
2416 	}
2417 
2418 	spi->dma_rx = dma_request_chan(spi->dev, "rx");
2419 	if (IS_ERR(spi->dma_rx)) {
2420 		ret = PTR_ERR(spi->dma_rx);
2421 		spi->dma_rx = NULL;
2422 		if (ret == -EPROBE_DEFER)
2423 			goto err_dma_release;
2424 
2425 		dev_warn(&pdev->dev, "failed to request rx dma channel\n");
2426 	} else {
2427 		ctrl->dma_rx = spi->dma_rx;
2428 	}
2429 
2430 	if (spi->dma_tx || spi->dma_rx)
2431 		ctrl->can_dma = stm32_spi_can_dma;
2432 
2433 	spi->sram_pool = of_gen_pool_get(pdev->dev.of_node, "sram", 0);
2434 	if (spi->sram_pool) {
2435 		spi->sram_rx_buf_size = gen_pool_size(spi->sram_pool);
2436 		dev_info(&pdev->dev, "SRAM pool: %zu KiB for RX DMA/MDMA chaining\n",
2437 			 spi->sram_rx_buf_size / 1024);
2438 		spi->sram_rx_buf = gen_pool_dma_zalloc(spi->sram_pool, spi->sram_rx_buf_size,
2439 						       &spi->sram_dma_rx_buf);
2440 		if (!spi->sram_rx_buf) {
2441 			dev_err(&pdev->dev, "failed to allocate SRAM buffer\n");
2442 		} else {
2443 			spi->mdma_rx = dma_request_chan(spi->dev, "rxm2m");
2444 			if (IS_ERR(spi->mdma_rx)) {
2445 				ret = PTR_ERR(spi->mdma_rx);
2446 				spi->mdma_rx = NULL;
2447 				if (ret == -EPROBE_DEFER) {
2448 					goto err_pool_free;
2449 				} else {
2450 					gen_pool_free(spi->sram_pool,
2451 						      (unsigned long)spi->sram_rx_buf,
2452 						      spi->sram_rx_buf_size);
2453 					dev_warn(&pdev->dev,
2454 						 "failed to request rx mdma channel, DMA only\n");
2455 				}
2456 			}
2457 		}
2458 	}
2459 
2460 	pm_runtime_set_autosuspend_delay(&pdev->dev,
2461 					 STM32_SPI_AUTOSUSPEND_DELAY);
2462 	pm_runtime_use_autosuspend(&pdev->dev);
2463 	pm_runtime_set_active(&pdev->dev);
2464 	pm_runtime_get_noresume(&pdev->dev);
2465 	pm_runtime_enable(&pdev->dev);
2466 
2467 	ret = spi_register_controller(ctrl);
2468 	if (ret) {
2469 		dev_err(&pdev->dev, "spi controller registration failed: %d\n",
2470 			ret);
2471 		goto err_pm_disable;
2472 	}
2473 
2474 	pm_runtime_put_autosuspend(&pdev->dev);
2475 
2476 	dev_info(&pdev->dev, "driver initialized (%s mode)\n",
2477 		 STM32_SPI_HOST_MODE(spi) ? "host" : "device");
2478 
2479 	return 0;
2480 
2481 err_pm_disable:
2482 	pm_runtime_disable(&pdev->dev);
2483 	pm_runtime_put_noidle(&pdev->dev);
2484 	pm_runtime_set_suspended(&pdev->dev);
2485 	pm_runtime_dont_use_autosuspend(&pdev->dev);
2486 
2487 	if (spi->mdma_rx)
2488 		dma_release_channel(spi->mdma_rx);
2489 err_pool_free:
2490 	if (spi->sram_pool)
2491 		gen_pool_free(spi->sram_pool, (unsigned long)spi->sram_rx_buf,
2492 			      spi->sram_rx_buf_size);
2493 err_dma_release:
2494 	if (spi->dma_tx)
2495 		dma_release_channel(spi->dma_tx);
2496 	if (spi->dma_rx)
2497 		dma_release_channel(spi->dma_rx);
2498 err_clk_disable:
2499 	clk_disable_unprepare(spi->clk);
2500 
2501 	return ret;
2502 }
2503 
stm32_spi_remove(struct platform_device * pdev)2504 static void stm32_spi_remove(struct platform_device *pdev)
2505 {
2506 	struct spi_controller *ctrl = platform_get_drvdata(pdev);
2507 	struct stm32_spi *spi = spi_controller_get_devdata(ctrl);
2508 
2509 	pm_runtime_get_sync(&pdev->dev);
2510 
2511 	spi_unregister_controller(ctrl);
2512 	spi->cfg->disable(spi);
2513 
2514 	pm_runtime_disable(&pdev->dev);
2515 	pm_runtime_put_noidle(&pdev->dev);
2516 	pm_runtime_set_suspended(&pdev->dev);
2517 	pm_runtime_dont_use_autosuspend(&pdev->dev);
2518 
2519 	if (ctrl->dma_tx)
2520 		dma_release_channel(ctrl->dma_tx);
2521 	if (ctrl->dma_rx)
2522 		dma_release_channel(ctrl->dma_rx);
2523 	if (spi->mdma_rx)
2524 		dma_release_channel(spi->mdma_rx);
2525 	if (spi->sram_rx_buf)
2526 		gen_pool_free(spi->sram_pool, (unsigned long)spi->sram_rx_buf,
2527 			      spi->sram_rx_buf_size);
2528 
2529 	clk_disable_unprepare(spi->clk);
2530 
2531 
2532 	pinctrl_pm_select_sleep_state(&pdev->dev);
2533 }
2534 
stm32_spi_runtime_suspend(struct device * dev)2535 static int __maybe_unused stm32_spi_runtime_suspend(struct device *dev)
2536 {
2537 	struct spi_controller *ctrl = dev_get_drvdata(dev);
2538 	struct stm32_spi *spi = spi_controller_get_devdata(ctrl);
2539 
2540 	clk_disable_unprepare(spi->clk);
2541 
2542 	return pinctrl_pm_select_sleep_state(dev);
2543 }
2544 
stm32_spi_runtime_resume(struct device * dev)2545 static int __maybe_unused stm32_spi_runtime_resume(struct device *dev)
2546 {
2547 	struct spi_controller *ctrl = dev_get_drvdata(dev);
2548 	struct stm32_spi *spi = spi_controller_get_devdata(ctrl);
2549 	int ret;
2550 
2551 	ret = pinctrl_pm_select_default_state(dev);
2552 	if (ret)
2553 		return ret;
2554 
2555 	return clk_prepare_enable(spi->clk);
2556 }
2557 
stm32_spi_suspend(struct device * dev)2558 static int __maybe_unused stm32_spi_suspend(struct device *dev)
2559 {
2560 	struct spi_controller *ctrl = dev_get_drvdata(dev);
2561 	int ret;
2562 
2563 	ret = spi_controller_suspend(ctrl);
2564 	if (ret)
2565 		return ret;
2566 
2567 	return pm_runtime_force_suspend(dev);
2568 }
2569 
stm32_spi_resume(struct device * dev)2570 static int __maybe_unused stm32_spi_resume(struct device *dev)
2571 {
2572 	struct spi_controller *ctrl = dev_get_drvdata(dev);
2573 	struct stm32_spi *spi = spi_controller_get_devdata(ctrl);
2574 	int ret;
2575 
2576 	ret = pm_runtime_force_resume(dev);
2577 	if (ret)
2578 		return ret;
2579 
2580 	ret = spi_controller_resume(ctrl);
2581 	if (ret) {
2582 		clk_disable_unprepare(spi->clk);
2583 		return ret;
2584 	}
2585 
2586 	ret = pm_runtime_resume_and_get(dev);
2587 	if (ret < 0) {
2588 		dev_err(dev, "Unable to power device:%d\n", ret);
2589 		return ret;
2590 	}
2591 
2592 	spi->cfg->config(spi);
2593 
2594 	pm_runtime_put_autosuspend(dev);
2595 
2596 	return 0;
2597 }
2598 
2599 static const struct dev_pm_ops stm32_spi_pm_ops = {
2600 	SET_SYSTEM_SLEEP_PM_OPS(stm32_spi_suspend, stm32_spi_resume)
2601 	SET_RUNTIME_PM_OPS(stm32_spi_runtime_suspend,
2602 			   stm32_spi_runtime_resume, NULL)
2603 };
2604 
2605 static struct platform_driver stm32_spi_driver = {
2606 	.probe = stm32_spi_probe,
2607 	.remove = stm32_spi_remove,
2608 	.driver = {
2609 		.name = DRIVER_NAME,
2610 		.pm = &stm32_spi_pm_ops,
2611 		.of_match_table = stm32_spi_of_match,
2612 	},
2613 };
2614 
2615 module_platform_driver(stm32_spi_driver);
2616 
2617 MODULE_ALIAS("platform:" DRIVER_NAME);
2618 MODULE_DESCRIPTION("STMicroelectronics STM32 SPI Controller driver");
2619 MODULE_AUTHOR("Amelie Delaunay <amelie.delaunay@st.com>");
2620 MODULE_LICENSE("GPL v2");
2621