1 // SPDX-License-Identifier: GPL-2.0 2 // 3 // STMicroelectronics STM32 SPI Controller driver 4 // 5 // Copyright (C) 2017, STMicroelectronics - All Rights Reserved 6 // Author(s): Amelie Delaunay <amelie.delaunay@st.com> for STMicroelectronics. 7 8 #include <linux/bitfield.h> 9 #include <linux/debugfs.h> 10 #include <linux/clk.h> 11 #include <linux/delay.h> 12 #include <linux/dmaengine.h> 13 #include <linux/interrupt.h> 14 #include <linux/iopoll.h> 15 #include <linux/module.h> 16 #include <linux/of.h> 17 #include <linux/platform_device.h> 18 #include <linux/pinctrl/consumer.h> 19 #include <linux/pm_runtime.h> 20 #include <linux/reset.h> 21 #include <linux/spi/spi.h> 22 23 #define DRIVER_NAME "spi_stm32" 24 25 /* STM32F4/7 SPI registers */ 26 #define STM32FX_SPI_CR1 0x00 27 #define STM32FX_SPI_CR2 0x04 28 #define STM32FX_SPI_SR 0x08 29 #define STM32FX_SPI_DR 0x0C 30 #define STM32FX_SPI_I2SCFGR 0x1C 31 32 /* STM32FX_SPI_CR1 bit fields */ 33 #define STM32FX_SPI_CR1_CPHA BIT(0) 34 #define STM32FX_SPI_CR1_CPOL BIT(1) 35 #define STM32FX_SPI_CR1_MSTR BIT(2) 36 #define STM32FX_SPI_CR1_BR_SHIFT 3 37 #define STM32FX_SPI_CR1_BR GENMASK(5, 3) 38 #define STM32FX_SPI_CR1_SPE BIT(6) 39 #define STM32FX_SPI_CR1_LSBFRST BIT(7) 40 #define STM32FX_SPI_CR1_SSI BIT(8) 41 #define STM32FX_SPI_CR1_SSM BIT(9) 42 #define STM32FX_SPI_CR1_RXONLY BIT(10) 43 #define STM32F4_SPI_CR1_DFF BIT(11) 44 #define STM32F7_SPI_CR1_CRCL BIT(11) 45 #define STM32FX_SPI_CR1_CRCNEXT BIT(12) 46 #define STM32FX_SPI_CR1_CRCEN BIT(13) 47 #define STM32FX_SPI_CR1_BIDIOE BIT(14) 48 #define STM32FX_SPI_CR1_BIDIMODE BIT(15) 49 #define STM32FX_SPI_CR1_BR_MIN 0 50 #define STM32FX_SPI_CR1_BR_MAX (GENMASK(5, 3) >> 3) 51 52 /* STM32FX_SPI_CR2 bit fields */ 53 #define STM32FX_SPI_CR2_RXDMAEN BIT(0) 54 #define STM32FX_SPI_CR2_TXDMAEN BIT(1) 55 #define STM32FX_SPI_CR2_SSOE BIT(2) 56 #define STM32FX_SPI_CR2_FRF BIT(4) 57 #define STM32FX_SPI_CR2_ERRIE BIT(5) 58 #define STM32FX_SPI_CR2_RXNEIE BIT(6) 59 #define STM32FX_SPI_CR2_TXEIE BIT(7) 60 #define STM32F7_SPI_CR2_DS GENMASK(11, 8) 61 #define STM32F7_SPI_CR2_FRXTH BIT(12) 62 #define STM32F7_SPI_CR2_LDMA_RX BIT(13) 63 #define STM32F7_SPI_CR2_LDMA_TX BIT(14) 64 65 /* STM32FX_SPI_SR bit fields */ 66 #define STM32FX_SPI_SR_RXNE BIT(0) 67 #define STM32FX_SPI_SR_TXE BIT(1) 68 #define STM32FX_SPI_SR_CHSIDE BIT(2) 69 #define STM32FX_SPI_SR_UDR BIT(3) 70 #define STM32FX_SPI_SR_CRCERR BIT(4) 71 #define STM32FX_SPI_SR_MODF BIT(5) 72 #define STM32FX_SPI_SR_OVR BIT(6) 73 #define STM32FX_SPI_SR_BSY BIT(7) 74 #define STM32FX_SPI_SR_FRE BIT(8) 75 #define STM32F7_SPI_SR_FRLVL GENMASK(10, 9) 76 #define STM32F7_SPI_SR_FTLVL GENMASK(12, 11) 77 78 /* STM32FX_SPI_I2SCFGR bit fields */ 79 #define STM32FX_SPI_I2SCFGR_I2SMOD BIT(11) 80 81 /* STM32F4 SPI Baud Rate min/max divisor */ 82 #define STM32FX_SPI_BR_DIV_MIN (2 << STM32FX_SPI_CR1_BR_MIN) 83 #define STM32FX_SPI_BR_DIV_MAX (2 << STM32FX_SPI_CR1_BR_MAX) 84 85 /* STM32H7 SPI registers */ 86 #define STM32H7_SPI_CR1 0x00 87 #define STM32H7_SPI_CR2 0x04 88 #define STM32H7_SPI_CFG1 0x08 89 #define STM32H7_SPI_CFG2 0x0C 90 #define STM32H7_SPI_IER 0x10 91 #define STM32H7_SPI_SR 0x14 92 #define STM32H7_SPI_IFCR 0x18 93 #define STM32H7_SPI_TXDR 0x20 94 #define STM32H7_SPI_RXDR 0x30 95 #define STM32H7_SPI_I2SCFGR 0x50 96 97 /* STM32H7_SPI_CR1 bit fields */ 98 #define STM32H7_SPI_CR1_SPE BIT(0) 99 #define STM32H7_SPI_CR1_MASRX BIT(8) 100 #define STM32H7_SPI_CR1_CSTART BIT(9) 101 #define STM32H7_SPI_CR1_CSUSP BIT(10) 102 #define STM32H7_SPI_CR1_HDDIR BIT(11) 103 #define STM32H7_SPI_CR1_SSI BIT(12) 104 105 /* STM32H7_SPI_CR2 bit fields */ 106 #define STM32H7_SPI_CR2_TSIZE GENMASK(15, 0) 107 #define STM32H7_SPI_TSIZE_MAX GENMASK(15, 0) 108 109 /* STM32H7_SPI_CFG1 bit fields */ 110 #define STM32H7_SPI_CFG1_DSIZE GENMASK(4, 0) 111 #define STM32H7_SPI_CFG1_FTHLV GENMASK(8, 5) 112 #define STM32H7_SPI_CFG1_RXDMAEN BIT(14) 113 #define STM32H7_SPI_CFG1_TXDMAEN BIT(15) 114 #define STM32H7_SPI_CFG1_MBR GENMASK(30, 28) 115 #define STM32H7_SPI_CFG1_MBR_SHIFT 28 116 #define STM32H7_SPI_CFG1_MBR_MIN 0 117 #define STM32H7_SPI_CFG1_MBR_MAX (GENMASK(30, 28) >> 28) 118 119 /* STM32H7_SPI_CFG2 bit fields */ 120 #define STM32H7_SPI_CFG2_MIDI GENMASK(7, 4) 121 #define STM32H7_SPI_CFG2_COMM GENMASK(18, 17) 122 #define STM32H7_SPI_CFG2_SP GENMASK(21, 19) 123 #define STM32H7_SPI_CFG2_MASTER BIT(22) 124 #define STM32H7_SPI_CFG2_LSBFRST BIT(23) 125 #define STM32H7_SPI_CFG2_CPHA BIT(24) 126 #define STM32H7_SPI_CFG2_CPOL BIT(25) 127 #define STM32H7_SPI_CFG2_SSM BIT(26) 128 #define STM32H7_SPI_CFG2_SSIOP BIT(28) 129 #define STM32H7_SPI_CFG2_AFCNTR BIT(31) 130 131 /* STM32H7_SPI_IER bit fields */ 132 #define STM32H7_SPI_IER_RXPIE BIT(0) 133 #define STM32H7_SPI_IER_TXPIE BIT(1) 134 #define STM32H7_SPI_IER_DXPIE BIT(2) 135 #define STM32H7_SPI_IER_EOTIE BIT(3) 136 #define STM32H7_SPI_IER_TXTFIE BIT(4) 137 #define STM32H7_SPI_IER_OVRIE BIT(6) 138 #define STM32H7_SPI_IER_MODFIE BIT(9) 139 #define STM32H7_SPI_IER_ALL GENMASK(10, 0) 140 141 /* STM32H7_SPI_SR bit fields */ 142 #define STM32H7_SPI_SR_RXP BIT(0) 143 #define STM32H7_SPI_SR_TXP BIT(1) 144 #define STM32H7_SPI_SR_EOT BIT(3) 145 #define STM32H7_SPI_SR_OVR BIT(6) 146 #define STM32H7_SPI_SR_MODF BIT(9) 147 #define STM32H7_SPI_SR_SUSP BIT(11) 148 #define STM32H7_SPI_SR_RXPLVL GENMASK(14, 13) 149 #define STM32H7_SPI_SR_RXWNE BIT(15) 150 151 /* STM32H7_SPI_IFCR bit fields */ 152 #define STM32H7_SPI_IFCR_ALL GENMASK(11, 3) 153 154 /* STM32H7_SPI_I2SCFGR bit fields */ 155 #define STM32H7_SPI_I2SCFGR_I2SMOD BIT(0) 156 157 /* STM32MP25 SPI registers bit fields */ 158 #define STM32MP25_SPI_HWCFGR1 0x3F0 159 160 /* STM32MP25_SPI_CR2 bit fields */ 161 #define STM32MP25_SPI_TSIZE_MAX_LIMITED GENMASK(9, 0) 162 163 /* STM32MP25_SPI_HWCFGR1 */ 164 #define STM32MP25_SPI_HWCFGR1_FULLCFG GENMASK(27, 24) 165 #define STM32MP25_SPI_HWCFGR1_FULLCFG_LIMITED 0x0 166 #define STM32MP25_SPI_HWCFGR1_FULLCFG_FULL 0x1 167 #define STM32MP25_SPI_HWCFGR1_DSCFG GENMASK(19, 16) 168 #define STM32MP25_SPI_HWCFGR1_DSCFG_16_B 0x0 169 #define STM32MP25_SPI_HWCFGR1_DSCFG_32_B 0x1 170 171 /* STM32H7 SPI Master Baud Rate min/max divisor */ 172 #define STM32H7_SPI_MBR_DIV_MIN (2 << STM32H7_SPI_CFG1_MBR_MIN) 173 #define STM32H7_SPI_MBR_DIV_MAX (2 << STM32H7_SPI_CFG1_MBR_MAX) 174 175 /* STM32H7 SPI Communication mode */ 176 #define STM32H7_SPI_FULL_DUPLEX 0 177 #define STM32H7_SPI_SIMPLEX_TX 1 178 #define STM32H7_SPI_SIMPLEX_RX 2 179 #define STM32H7_SPI_HALF_DUPLEX 3 180 181 /* SPI Communication type */ 182 #define SPI_FULL_DUPLEX 0 183 #define SPI_SIMPLEX_TX 1 184 #define SPI_SIMPLEX_RX 2 185 #define SPI_3WIRE_TX 3 186 #define SPI_3WIRE_RX 4 187 188 #define STM32_SPI_AUTOSUSPEND_DELAY 1 /* 1 ms */ 189 190 /* 191 * use PIO for small transfers, avoiding DMA setup/teardown overhead for drivers 192 * without fifo buffers. 193 */ 194 #define SPI_DMA_MIN_BYTES 16 195 196 /* STM32 SPI driver helpers */ 197 #define STM32_SPI_HOST_MODE(stm32_spi) (!(stm32_spi)->device_mode) 198 #define STM32_SPI_DEVICE_MODE(stm32_spi) ((stm32_spi)->device_mode) 199 200 /** 201 * struct stm32_spi_reg - stm32 SPI register & bitfield desc 202 * @reg: register offset 203 * @mask: bitfield mask 204 * @shift: left shift 205 */ 206 struct stm32_spi_reg { 207 int reg; 208 int mask; 209 int shift; 210 }; 211 212 /** 213 * struct stm32_spi_regspec - stm32 registers definition, compatible dependent data 214 * @en: enable register and SPI enable bit 215 * @dma_rx_en: SPI DMA RX enable register end SPI DMA RX enable bit 216 * @dma_tx_en: SPI DMA TX enable register end SPI DMA TX enable bit 217 * @cpol: clock polarity register and polarity bit 218 * @cpha: clock phase register and phase bit 219 * @lsb_first: LSB transmitted first register and bit 220 * @cs_high: chips select active value 221 * @br: baud rate register and bitfields 222 * @rx: SPI RX data register 223 * @tx: SPI TX data register 224 * @fullcfg: SPI full or limited feature set register 225 */ 226 struct stm32_spi_regspec { 227 const struct stm32_spi_reg en; 228 const struct stm32_spi_reg dma_rx_en; 229 const struct stm32_spi_reg dma_tx_en; 230 const struct stm32_spi_reg cpol; 231 const struct stm32_spi_reg cpha; 232 const struct stm32_spi_reg lsb_first; 233 const struct stm32_spi_reg cs_high; 234 const struct stm32_spi_reg br; 235 const struct stm32_spi_reg rx; 236 const struct stm32_spi_reg tx; 237 const struct stm32_spi_reg fullcfg; 238 }; 239 240 struct stm32_spi; 241 242 /** 243 * struct stm32_spi_cfg - stm32 compatible configuration data 244 * @regs: registers descriptions 245 * @get_fifo_size: routine to get fifo size 246 * @get_bpw_mask: routine to get bits per word mask 247 * @disable: routine to disable controller 248 * @config: routine to configure controller as SPI Host 249 * @set_bpw: routine to configure registers to for bits per word 250 * @set_mode: routine to configure registers to desired mode 251 * @set_data_idleness: optional routine to configure registers to desired idle 252 * time between frames (if driver has this functionality) 253 * @set_number_of_data: optional routine to configure registers to desired 254 * number of data (if driver has this functionality) 255 * @write_tx: routine to write to transmit register/FIFO 256 * @read_rx: routine to read from receive register/FIFO 257 * @transfer_one_dma_start: routine to start transfer a single spi_transfer 258 * using DMA 259 * @dma_rx_cb: routine to call after DMA RX channel operation is complete 260 * @dma_tx_cb: routine to call after DMA TX channel operation is complete 261 * @transfer_one_irq: routine to configure interrupts for driver 262 * @irq_handler_event: Interrupt handler for SPI controller events 263 * @irq_handler_thread: thread of interrupt handler for SPI controller 264 * @baud_rate_div_min: minimum baud rate divisor 265 * @baud_rate_div_max: maximum baud rate divisor 266 * @has_fifo: boolean to know if fifo is used for driver 267 * @has_device_mode: is this compatible capable to switch on device mode 268 * @flags: compatible specific SPI controller flags used at registration time 269 * @prevent_dma_burst: boolean to indicate to prevent DMA burst 270 */ 271 struct stm32_spi_cfg { 272 const struct stm32_spi_regspec *regs; 273 int (*get_fifo_size)(struct stm32_spi *spi); 274 int (*get_bpw_mask)(struct stm32_spi *spi); 275 void (*disable)(struct stm32_spi *spi); 276 int (*config)(struct stm32_spi *spi); 277 void (*set_bpw)(struct stm32_spi *spi); 278 int (*set_mode)(struct stm32_spi *spi, unsigned int comm_type); 279 void (*set_data_idleness)(struct stm32_spi *spi, u32 length); 280 int (*set_number_of_data)(struct stm32_spi *spi, u32 length); 281 void (*write_tx)(struct stm32_spi *spi); 282 void (*read_rx)(struct stm32_spi *spi); 283 void (*transfer_one_dma_start)(struct stm32_spi *spi); 284 void (*dma_rx_cb)(void *data); 285 void (*dma_tx_cb)(void *data); 286 int (*transfer_one_irq)(struct stm32_spi *spi); 287 irqreturn_t (*irq_handler_event)(int irq, void *dev_id); 288 irqreturn_t (*irq_handler_thread)(int irq, void *dev_id); 289 unsigned int baud_rate_div_min; 290 unsigned int baud_rate_div_max; 291 bool has_fifo; 292 bool has_device_mode; 293 u16 flags; 294 bool prevent_dma_burst; 295 }; 296 297 /** 298 * struct stm32_spi - private data of the SPI controller 299 * @dev: driver model representation of the controller 300 * @ctrl: controller interface 301 * @cfg: compatible configuration data 302 * @base: virtual memory area 303 * @clk: hw kernel clock feeding the SPI clock generator 304 * @clk_rate: rate of the hw kernel clock feeding the SPI clock generator 305 * @lock: prevent I/O concurrent access 306 * @irq: SPI controller interrupt line 307 * @fifo_size: size of the embedded fifo in bytes 308 * @t_size_max: maximum number of data of one transfer 309 * @feature_set: SPI full or limited feature set 310 * @cur_midi: host inter-data idleness in ns 311 * @cur_speed: speed configured in Hz 312 * @cur_half_period: time of a half bit in us 313 * @cur_bpw: number of bits in a single SPI data frame 314 * @cur_fthlv: fifo threshold level (data frames in a single data packet) 315 * @cur_comm: SPI communication mode 316 * @cur_xferlen: current transfer length in bytes 317 * @cur_usedma: boolean to know if dma is used in current transfer 318 * @tx_buf: data to be written, or NULL 319 * @rx_buf: data to be read, or NULL 320 * @tx_len: number of data to be written in bytes 321 * @rx_len: number of data to be read in bytes 322 * @dma_tx: dma channel for TX transfer 323 * @dma_rx: dma channel for RX transfer 324 * @phys_addr: SPI registers physical base address 325 * @device_mode: the controller is configured as SPI device 326 */ 327 struct stm32_spi { 328 struct device *dev; 329 struct spi_controller *ctrl; 330 const struct stm32_spi_cfg *cfg; 331 void __iomem *base; 332 struct clk *clk; 333 u32 clk_rate; 334 spinlock_t lock; /* prevent I/O concurrent access */ 335 int irq; 336 unsigned int fifo_size; 337 unsigned int t_size_max; 338 unsigned int feature_set; 339 #define STM32_SPI_FEATURE_LIMITED STM32MP25_SPI_HWCFGR1_FULLCFG_LIMITED /* 0x0 */ 340 #define STM32_SPI_FEATURE_FULL STM32MP25_SPI_HWCFGR1_FULLCFG_FULL /* 0x1 */ 341 342 unsigned int cur_midi; 343 unsigned int cur_speed; 344 unsigned int cur_half_period; 345 unsigned int cur_bpw; 346 unsigned int cur_fthlv; 347 unsigned int cur_comm; 348 unsigned int cur_xferlen; 349 bool cur_usedma; 350 351 const void *tx_buf; 352 void *rx_buf; 353 int tx_len; 354 int rx_len; 355 struct dma_chan *dma_tx; 356 struct dma_chan *dma_rx; 357 dma_addr_t phys_addr; 358 359 bool device_mode; 360 }; 361 362 static const struct stm32_spi_regspec stm32fx_spi_regspec = { 363 .en = { STM32FX_SPI_CR1, STM32FX_SPI_CR1_SPE }, 364 365 .dma_rx_en = { STM32FX_SPI_CR2, STM32FX_SPI_CR2_RXDMAEN }, 366 .dma_tx_en = { STM32FX_SPI_CR2, STM32FX_SPI_CR2_TXDMAEN }, 367 368 .cpol = { STM32FX_SPI_CR1, STM32FX_SPI_CR1_CPOL }, 369 .cpha = { STM32FX_SPI_CR1, STM32FX_SPI_CR1_CPHA }, 370 .lsb_first = { STM32FX_SPI_CR1, STM32FX_SPI_CR1_LSBFRST }, 371 .cs_high = {}, 372 .br = { STM32FX_SPI_CR1, STM32FX_SPI_CR1_BR, STM32FX_SPI_CR1_BR_SHIFT }, 373 374 .rx = { STM32FX_SPI_DR }, 375 .tx = { STM32FX_SPI_DR }, 376 }; 377 378 static const struct stm32_spi_regspec stm32h7_spi_regspec = { 379 /* SPI data transfer is enabled but spi_ker_ck is idle. 380 * CFG1 and CFG2 registers are write protected when SPE is enabled. 381 */ 382 .en = { STM32H7_SPI_CR1, STM32H7_SPI_CR1_SPE }, 383 384 .dma_rx_en = { STM32H7_SPI_CFG1, STM32H7_SPI_CFG1_RXDMAEN }, 385 .dma_tx_en = { STM32H7_SPI_CFG1, STM32H7_SPI_CFG1_TXDMAEN }, 386 387 .cpol = { STM32H7_SPI_CFG2, STM32H7_SPI_CFG2_CPOL }, 388 .cpha = { STM32H7_SPI_CFG2, STM32H7_SPI_CFG2_CPHA }, 389 .lsb_first = { STM32H7_SPI_CFG2, STM32H7_SPI_CFG2_LSBFRST }, 390 .cs_high = { STM32H7_SPI_CFG2, STM32H7_SPI_CFG2_SSIOP }, 391 .br = { STM32H7_SPI_CFG1, STM32H7_SPI_CFG1_MBR, 392 STM32H7_SPI_CFG1_MBR_SHIFT }, 393 394 .rx = { STM32H7_SPI_RXDR }, 395 .tx = { STM32H7_SPI_TXDR }, 396 }; 397 398 static const struct stm32_spi_regspec stm32mp25_spi_regspec = { 399 /* SPI data transfer is enabled but spi_ker_ck is idle. 400 * CFG1 and CFG2 registers are write protected when SPE is enabled. 401 */ 402 .en = { STM32H7_SPI_CR1, STM32H7_SPI_CR1_SPE }, 403 404 .dma_rx_en = { STM32H7_SPI_CFG1, STM32H7_SPI_CFG1_RXDMAEN }, 405 .dma_tx_en = { STM32H7_SPI_CFG1, STM32H7_SPI_CFG1_TXDMAEN }, 406 407 .cpol = { STM32H7_SPI_CFG2, STM32H7_SPI_CFG2_CPOL }, 408 .cpha = { STM32H7_SPI_CFG2, STM32H7_SPI_CFG2_CPHA }, 409 .lsb_first = { STM32H7_SPI_CFG2, STM32H7_SPI_CFG2_LSBFRST }, 410 .cs_high = { STM32H7_SPI_CFG2, STM32H7_SPI_CFG2_SSIOP }, 411 .br = { STM32H7_SPI_CFG1, STM32H7_SPI_CFG1_MBR, 412 STM32H7_SPI_CFG1_MBR_SHIFT }, 413 414 .rx = { STM32H7_SPI_RXDR }, 415 .tx = { STM32H7_SPI_TXDR }, 416 417 .fullcfg = { STM32MP25_SPI_HWCFGR1, STM32MP25_SPI_HWCFGR1_FULLCFG }, 418 }; 419 420 static inline void stm32_spi_set_bits(struct stm32_spi *spi, 421 u32 offset, u32 bits) 422 { 423 writel_relaxed(readl_relaxed(spi->base + offset) | bits, 424 spi->base + offset); 425 } 426 427 static inline void stm32_spi_clr_bits(struct stm32_spi *spi, 428 u32 offset, u32 bits) 429 { 430 writel_relaxed(readl_relaxed(spi->base + offset) & ~bits, 431 spi->base + offset); 432 } 433 434 /** 435 * stm32h7_spi_get_fifo_size - Return fifo size 436 * @spi: pointer to the spi controller data structure 437 */ 438 static int stm32h7_spi_get_fifo_size(struct stm32_spi *spi) 439 { 440 unsigned long flags; 441 u32 count = 0; 442 443 spin_lock_irqsave(&spi->lock, flags); 444 445 stm32_spi_set_bits(spi, STM32H7_SPI_CR1, STM32H7_SPI_CR1_SPE); 446 447 while (readl_relaxed(spi->base + STM32H7_SPI_SR) & STM32H7_SPI_SR_TXP) 448 writeb_relaxed(++count, spi->base + STM32H7_SPI_TXDR); 449 450 stm32_spi_clr_bits(spi, STM32H7_SPI_CR1, STM32H7_SPI_CR1_SPE); 451 452 spin_unlock_irqrestore(&spi->lock, flags); 453 454 dev_dbg(spi->dev, "%d x 8-bit fifo size\n", count); 455 456 return count; 457 } 458 459 /** 460 * stm32f4_spi_get_bpw_mask - Return bits per word mask 461 * @spi: pointer to the spi controller data structure 462 */ 463 static int stm32f4_spi_get_bpw_mask(struct stm32_spi *spi) 464 { 465 dev_dbg(spi->dev, "8-bit or 16-bit data frame supported\n"); 466 return SPI_BPW_MASK(8) | SPI_BPW_MASK(16); 467 } 468 469 /** 470 * stm32f7_spi_get_bpw_mask - Return bits per word mask 471 * @spi: pointer to the spi controller data structure 472 */ 473 static int stm32f7_spi_get_bpw_mask(struct stm32_spi *spi) 474 { 475 dev_dbg(spi->dev, "16-bit maximum data frame\n"); 476 return SPI_BPW_RANGE_MASK(4, 16); 477 } 478 479 /** 480 * stm32h7_spi_get_bpw_mask - Return bits per word mask 481 * @spi: pointer to the spi controller data structure 482 */ 483 static int stm32h7_spi_get_bpw_mask(struct stm32_spi *spi) 484 { 485 unsigned long flags; 486 u32 cfg1, max_bpw; 487 488 spin_lock_irqsave(&spi->lock, flags); 489 490 /* 491 * The most significant bit at DSIZE bit field is reserved when the 492 * maximum data size of periperal instances is limited to 16-bit 493 */ 494 stm32_spi_set_bits(spi, STM32H7_SPI_CFG1, STM32H7_SPI_CFG1_DSIZE); 495 496 cfg1 = readl_relaxed(spi->base + STM32H7_SPI_CFG1); 497 max_bpw = FIELD_GET(STM32H7_SPI_CFG1_DSIZE, cfg1) + 1; 498 499 spin_unlock_irqrestore(&spi->lock, flags); 500 501 dev_dbg(spi->dev, "%d-bit maximum data frame\n", max_bpw); 502 503 return SPI_BPW_RANGE_MASK(4, max_bpw); 504 } 505 506 /** 507 * stm32mp25_spi_get_bpw_mask - Return bits per word mask 508 * @spi: pointer to the spi controller data structure 509 */ 510 static int stm32mp25_spi_get_bpw_mask(struct stm32_spi *spi) 511 { 512 u32 dscfg, max_bpw; 513 514 if (spi->feature_set == STM32_SPI_FEATURE_LIMITED) { 515 dev_dbg(spi->dev, "8-bit or 16-bit data frame supported\n"); 516 return SPI_BPW_MASK(8) | SPI_BPW_MASK(16); 517 } 518 519 dscfg = FIELD_GET(STM32MP25_SPI_HWCFGR1_DSCFG, 520 readl_relaxed(spi->base + STM32MP25_SPI_HWCFGR1)); 521 max_bpw = 16; 522 if (dscfg == STM32MP25_SPI_HWCFGR1_DSCFG_32_B) 523 max_bpw = 32; 524 dev_dbg(spi->dev, "%d-bit maximum data frame\n", max_bpw); 525 return SPI_BPW_RANGE_MASK(4, max_bpw); 526 } 527 528 /** 529 * stm32_spi_prepare_mbr - Determine baud rate divisor value 530 * @spi: pointer to the spi controller data structure 531 * @speed_hz: requested speed 532 * @min_div: minimum baud rate divisor 533 * @max_div: maximum baud rate divisor 534 * 535 * Return baud rate divisor value in case of success or -EINVAL 536 */ 537 static int stm32_spi_prepare_mbr(struct stm32_spi *spi, u32 speed_hz, 538 u32 min_div, u32 max_div) 539 { 540 u32 div, mbrdiv; 541 542 /* Ensure spi->clk_rate is even */ 543 div = DIV_ROUND_CLOSEST(spi->clk_rate & ~0x1, speed_hz); 544 545 /* 546 * SPI framework set xfer->speed_hz to ctrl->max_speed_hz if 547 * xfer->speed_hz is greater than ctrl->max_speed_hz, and it returns 548 * an error when xfer->speed_hz is lower than ctrl->min_speed_hz, so 549 * no need to check it there. 550 * However, we need to ensure the following calculations. 551 */ 552 if ((div < min_div) || (div > max_div)) 553 return -EINVAL; 554 555 /* Determine the first power of 2 greater than or equal to div */ 556 if (div & (div - 1)) 557 mbrdiv = fls(div); 558 else 559 mbrdiv = fls(div) - 1; 560 561 spi->cur_speed = spi->clk_rate / (1 << mbrdiv); 562 563 spi->cur_half_period = DIV_ROUND_CLOSEST(USEC_PER_SEC, 2 * spi->cur_speed); 564 565 return mbrdiv - 1; 566 } 567 568 /** 569 * stm32h7_spi_prepare_fthlv - Determine FIFO threshold level 570 * @spi: pointer to the spi controller data structure 571 * @xfer_len: length of the message to be transferred 572 */ 573 static u32 stm32h7_spi_prepare_fthlv(struct stm32_spi *spi, u32 xfer_len) 574 { 575 u32 packet, bpw; 576 577 /* data packet should not exceed 1/2 of fifo space */ 578 packet = clamp(xfer_len, 1U, spi->fifo_size / 2); 579 580 /* align packet size with data registers access */ 581 bpw = DIV_ROUND_UP(spi->cur_bpw, 8); 582 return DIV_ROUND_UP(packet, bpw); 583 } 584 585 /** 586 * stm32f4_spi_write_tx - Write bytes to Transmit Data Register 587 * @spi: pointer to the spi controller data structure 588 * 589 * Read from tx_buf depends on remaining bytes to avoid to read beyond 590 * tx_buf end. 591 */ 592 static void stm32f4_spi_write_tx(struct stm32_spi *spi) 593 { 594 if ((spi->tx_len > 0) && (readl_relaxed(spi->base + STM32FX_SPI_SR) & 595 STM32FX_SPI_SR_TXE)) { 596 u32 offs = spi->cur_xferlen - spi->tx_len; 597 598 if (spi->cur_bpw == 16) { 599 const u16 *tx_buf16 = (const u16 *)(spi->tx_buf + offs); 600 601 writew_relaxed(*tx_buf16, spi->base + STM32FX_SPI_DR); 602 spi->tx_len -= sizeof(u16); 603 } else { 604 const u8 *tx_buf8 = (const u8 *)(spi->tx_buf + offs); 605 606 writeb_relaxed(*tx_buf8, spi->base + STM32FX_SPI_DR); 607 spi->tx_len -= sizeof(u8); 608 } 609 } 610 611 dev_dbg(spi->dev, "%s: %d bytes left\n", __func__, spi->tx_len); 612 } 613 614 /** 615 * stm32f7_spi_write_tx - Write bytes to Transmit Data Register 616 * @spi: pointer to the spi controller data structure 617 * 618 * Read from tx_buf depends on remaining bytes to avoid to read beyond 619 * tx_buf end. 620 */ 621 static void stm32f7_spi_write_tx(struct stm32_spi *spi) 622 { 623 if ((spi->tx_len > 0) && (readl_relaxed(spi->base + STM32FX_SPI_SR) & 624 STM32FX_SPI_SR_TXE)) { 625 u32 offs = spi->cur_xferlen - spi->tx_len; 626 627 if (spi->tx_len >= sizeof(u16)) { 628 const u16 *tx_buf16 = (const u16 *)(spi->tx_buf + offs); 629 630 writew_relaxed(*tx_buf16, spi->base + STM32FX_SPI_DR); 631 spi->tx_len -= sizeof(u16); 632 } else { 633 const u8 *tx_buf8 = (const u8 *)(spi->tx_buf + offs); 634 635 writeb_relaxed(*tx_buf8, spi->base + STM32FX_SPI_DR); 636 spi->tx_len -= sizeof(u8); 637 } 638 } 639 640 dev_dbg(spi->dev, "%s: %d bytes left\n", __func__, spi->tx_len); 641 } 642 643 /** 644 * stm32h7_spi_write_txfifo - Write bytes in Transmit Data Register 645 * @spi: pointer to the spi controller data structure 646 * 647 * Read from tx_buf depends on remaining bytes to avoid to read beyond 648 * tx_buf end. 649 */ 650 static void stm32h7_spi_write_txfifo(struct stm32_spi *spi) 651 { 652 while ((spi->tx_len > 0) && 653 (readl_relaxed(spi->base + STM32H7_SPI_SR) & 654 STM32H7_SPI_SR_TXP)) { 655 u32 offs = spi->cur_xferlen - spi->tx_len; 656 657 if (spi->tx_len >= sizeof(u32)) { 658 const u32 *tx_buf32 = (const u32 *)(spi->tx_buf + offs); 659 660 writel_relaxed(*tx_buf32, spi->base + STM32H7_SPI_TXDR); 661 spi->tx_len -= sizeof(u32); 662 } else if (spi->tx_len >= sizeof(u16)) { 663 const u16 *tx_buf16 = (const u16 *)(spi->tx_buf + offs); 664 665 writew_relaxed(*tx_buf16, spi->base + STM32H7_SPI_TXDR); 666 spi->tx_len -= sizeof(u16); 667 } else { 668 const u8 *tx_buf8 = (const u8 *)(spi->tx_buf + offs); 669 670 writeb_relaxed(*tx_buf8, spi->base + STM32H7_SPI_TXDR); 671 spi->tx_len -= sizeof(u8); 672 } 673 } 674 675 dev_dbg(spi->dev, "%s: %d bytes left\n", __func__, spi->tx_len); 676 } 677 678 /** 679 * stm32f4_spi_read_rx - Read bytes from Receive Data Register 680 * @spi: pointer to the spi controller data structure 681 * 682 * Write in rx_buf depends on remaining bytes to avoid to write beyond 683 * rx_buf end. 684 */ 685 static void stm32f4_spi_read_rx(struct stm32_spi *spi) 686 { 687 if ((spi->rx_len > 0) && (readl_relaxed(spi->base + STM32FX_SPI_SR) & 688 STM32FX_SPI_SR_RXNE)) { 689 u32 offs = spi->cur_xferlen - spi->rx_len; 690 691 if (spi->cur_bpw == 16) { 692 u16 *rx_buf16 = (u16 *)(spi->rx_buf + offs); 693 694 *rx_buf16 = readw_relaxed(spi->base + STM32FX_SPI_DR); 695 spi->rx_len -= sizeof(u16); 696 } else { 697 u8 *rx_buf8 = (u8 *)(spi->rx_buf + offs); 698 699 *rx_buf8 = readb_relaxed(spi->base + STM32FX_SPI_DR); 700 spi->rx_len -= sizeof(u8); 701 } 702 } 703 704 dev_dbg(spi->dev, "%s: %d bytes left\n", __func__, spi->rx_len); 705 } 706 707 /** 708 * stm32f7_spi_read_rx - Read bytes from Receive Data Register 709 * @spi: pointer to the spi controller data structure 710 * 711 * Write in rx_buf depends on remaining bytes to avoid to write beyond 712 * rx_buf end. 713 */ 714 static void stm32f7_spi_read_rx(struct stm32_spi *spi) 715 { 716 u32 sr = readl_relaxed(spi->base + STM32FX_SPI_SR); 717 u32 frlvl = FIELD_GET(STM32F7_SPI_SR_FRLVL, sr); 718 719 while ((spi->rx_len > 0) && (frlvl > 0)) { 720 u32 offs = spi->cur_xferlen - spi->rx_len; 721 722 if ((spi->rx_len >= sizeof(u16)) && (frlvl >= 2)) { 723 u16 *rx_buf16 = (u16 *)(spi->rx_buf + offs); 724 725 *rx_buf16 = readw_relaxed(spi->base + STM32FX_SPI_DR); 726 spi->rx_len -= sizeof(u16); 727 } else { 728 u8 *rx_buf8 = (u8 *)(spi->rx_buf + offs); 729 730 *rx_buf8 = readb_relaxed(spi->base + STM32FX_SPI_DR); 731 spi->rx_len -= sizeof(u8); 732 } 733 734 sr = readl_relaxed(spi->base + STM32FX_SPI_SR); 735 frlvl = FIELD_GET(STM32F7_SPI_SR_FRLVL, sr); 736 } 737 738 if (spi->rx_len >= sizeof(u16)) 739 stm32_spi_clr_bits(spi, STM32FX_SPI_CR2, STM32F7_SPI_CR2_FRXTH); 740 else 741 stm32_spi_set_bits(spi, STM32FX_SPI_CR2, STM32F7_SPI_CR2_FRXTH); 742 743 dev_dbg(spi->dev, "%s: %d bytes left (sr=%08x)\n", 744 __func__, spi->rx_len, sr); 745 } 746 747 /** 748 * stm32h7_spi_read_rxfifo - Read bytes in Receive Data Register 749 * @spi: pointer to the spi controller data structure 750 * 751 * Write in rx_buf depends on remaining bytes to avoid to write beyond 752 * rx_buf end. 753 */ 754 static void stm32h7_spi_read_rxfifo(struct stm32_spi *spi) 755 { 756 u32 sr = readl_relaxed(spi->base + STM32H7_SPI_SR); 757 u32 rxplvl = FIELD_GET(STM32H7_SPI_SR_RXPLVL, sr); 758 759 while ((spi->rx_len > 0) && 760 ((sr & STM32H7_SPI_SR_RXP) || 761 ((sr & STM32H7_SPI_SR_EOT) && 762 ((sr & STM32H7_SPI_SR_RXWNE) || (rxplvl > 0))))) { 763 u32 offs = spi->cur_xferlen - spi->rx_len; 764 765 if ((spi->rx_len >= sizeof(u32)) || 766 (sr & STM32H7_SPI_SR_RXWNE)) { 767 u32 *rx_buf32 = (u32 *)(spi->rx_buf + offs); 768 769 *rx_buf32 = readl_relaxed(spi->base + STM32H7_SPI_RXDR); 770 spi->rx_len -= sizeof(u32); 771 } else if ((spi->rx_len >= sizeof(u16)) || 772 (!(sr & STM32H7_SPI_SR_RXWNE) && 773 (rxplvl >= 2 || spi->cur_bpw > 8))) { 774 u16 *rx_buf16 = (u16 *)(spi->rx_buf + offs); 775 776 *rx_buf16 = readw_relaxed(spi->base + STM32H7_SPI_RXDR); 777 spi->rx_len -= sizeof(u16); 778 } else { 779 u8 *rx_buf8 = (u8 *)(spi->rx_buf + offs); 780 781 *rx_buf8 = readb_relaxed(spi->base + STM32H7_SPI_RXDR); 782 spi->rx_len -= sizeof(u8); 783 } 784 785 sr = readl_relaxed(spi->base + STM32H7_SPI_SR); 786 rxplvl = FIELD_GET(STM32H7_SPI_SR_RXPLVL, sr); 787 } 788 789 dev_dbg(spi->dev, "%s: %d bytes left (sr=%08x)\n", 790 __func__, spi->rx_len, sr); 791 } 792 793 /** 794 * stm32_spi_enable - Enable SPI controller 795 * @spi: pointer to the spi controller data structure 796 */ 797 static void stm32_spi_enable(struct stm32_spi *spi) 798 { 799 dev_dbg(spi->dev, "enable controller\n"); 800 801 stm32_spi_set_bits(spi, spi->cfg->regs->en.reg, 802 spi->cfg->regs->en.mask); 803 } 804 805 /** 806 * stm32fx_spi_disable - Disable SPI controller 807 * @spi: pointer to the spi controller data structure 808 */ 809 static void stm32fx_spi_disable(struct stm32_spi *spi) 810 { 811 unsigned long flags; 812 u32 sr; 813 814 dev_dbg(spi->dev, "disable controller\n"); 815 816 spin_lock_irqsave(&spi->lock, flags); 817 818 if (!(readl_relaxed(spi->base + STM32FX_SPI_CR1) & 819 STM32FX_SPI_CR1_SPE)) { 820 spin_unlock_irqrestore(&spi->lock, flags); 821 return; 822 } 823 824 /* Disable interrupts */ 825 stm32_spi_clr_bits(spi, STM32FX_SPI_CR2, STM32FX_SPI_CR2_TXEIE | 826 STM32FX_SPI_CR2_RXNEIE | 827 STM32FX_SPI_CR2_ERRIE); 828 829 /* Wait until BSY = 0 */ 830 if (readl_relaxed_poll_timeout_atomic(spi->base + STM32FX_SPI_SR, 831 sr, !(sr & STM32FX_SPI_SR_BSY), 832 10, 100000) < 0) { 833 dev_warn(spi->dev, "disabling condition timeout\n"); 834 } 835 836 if (spi->cur_usedma && spi->dma_tx) 837 dmaengine_terminate_async(spi->dma_tx); 838 if (spi->cur_usedma && spi->dma_rx) 839 dmaengine_terminate_async(spi->dma_rx); 840 841 stm32_spi_clr_bits(spi, STM32FX_SPI_CR1, STM32FX_SPI_CR1_SPE); 842 843 stm32_spi_clr_bits(spi, STM32FX_SPI_CR2, STM32FX_SPI_CR2_TXDMAEN | 844 STM32FX_SPI_CR2_RXDMAEN); 845 846 /* Sequence to clear OVR flag */ 847 readl_relaxed(spi->base + STM32FX_SPI_DR); 848 readl_relaxed(spi->base + STM32FX_SPI_SR); 849 850 spin_unlock_irqrestore(&spi->lock, flags); 851 } 852 853 /** 854 * stm32h7_spi_disable - Disable SPI controller 855 * @spi: pointer to the spi controller data structure 856 * 857 * RX-Fifo is flushed when SPI controller is disabled. 858 */ 859 static void stm32h7_spi_disable(struct stm32_spi *spi) 860 { 861 unsigned long flags; 862 u32 cr1; 863 864 dev_dbg(spi->dev, "disable controller\n"); 865 866 spin_lock_irqsave(&spi->lock, flags); 867 868 cr1 = readl_relaxed(spi->base + STM32H7_SPI_CR1); 869 870 if (!(cr1 & STM32H7_SPI_CR1_SPE)) { 871 spin_unlock_irqrestore(&spi->lock, flags); 872 return; 873 } 874 875 /* Add a delay to make sure that transmission is ended. */ 876 if (spi->cur_half_period) 877 udelay(spi->cur_half_period); 878 879 if (spi->cur_usedma && spi->dma_tx) 880 dmaengine_terminate_async(spi->dma_tx); 881 if (spi->cur_usedma && spi->dma_rx) 882 dmaengine_terminate_async(spi->dma_rx); 883 884 stm32_spi_clr_bits(spi, STM32H7_SPI_CR1, STM32H7_SPI_CR1_SPE); 885 886 stm32_spi_clr_bits(spi, STM32H7_SPI_CFG1, STM32H7_SPI_CFG1_TXDMAEN | 887 STM32H7_SPI_CFG1_RXDMAEN); 888 889 /* Disable interrupts and clear status flags */ 890 writel_relaxed(0, spi->base + STM32H7_SPI_IER); 891 writel_relaxed(STM32H7_SPI_IFCR_ALL, spi->base + STM32H7_SPI_IFCR); 892 893 spin_unlock_irqrestore(&spi->lock, flags); 894 } 895 896 /** 897 * stm32_spi_can_dma - Determine if the transfer is eligible for DMA use 898 * @ctrl: controller interface 899 * @spi_dev: pointer to the spi device 900 * @transfer: pointer to spi transfer 901 * 902 * If driver has fifo and the current transfer size is greater than fifo size, 903 * use DMA. Otherwise use DMA for transfer longer than defined DMA min bytes. 904 */ 905 static bool stm32_spi_can_dma(struct spi_controller *ctrl, 906 struct spi_device *spi_dev, 907 struct spi_transfer *transfer) 908 { 909 unsigned int dma_size; 910 struct stm32_spi *spi = spi_controller_get_devdata(ctrl); 911 912 if (spi->cfg->has_fifo) 913 dma_size = spi->fifo_size; 914 else 915 dma_size = SPI_DMA_MIN_BYTES; 916 917 dev_dbg(spi->dev, "%s: %s\n", __func__, 918 (transfer->len > dma_size) ? "true" : "false"); 919 920 return (transfer->len > dma_size); 921 } 922 923 /** 924 * stm32fx_spi_irq_event - Interrupt handler for SPI controller events 925 * @irq: interrupt line 926 * @dev_id: SPI controller ctrl interface 927 */ 928 static irqreturn_t stm32fx_spi_irq_event(int irq, void *dev_id) 929 { 930 struct spi_controller *ctrl = dev_id; 931 struct stm32_spi *spi = spi_controller_get_devdata(ctrl); 932 u32 sr, mask = 0; 933 bool end = false; 934 935 spin_lock(&spi->lock); 936 937 sr = readl_relaxed(spi->base + STM32FX_SPI_SR); 938 /* 939 * BSY flag is not handled in interrupt but it is normal behavior when 940 * this flag is set. 941 */ 942 sr &= ~STM32FX_SPI_SR_BSY; 943 944 if (!spi->cur_usedma && (spi->cur_comm == SPI_SIMPLEX_TX || 945 spi->cur_comm == SPI_3WIRE_TX)) { 946 /* OVR flag shouldn't be handled for TX only mode */ 947 sr &= ~(STM32FX_SPI_SR_OVR | STM32FX_SPI_SR_RXNE); 948 mask |= STM32FX_SPI_SR_TXE; 949 } 950 951 if (!spi->cur_usedma && (spi->cur_comm == SPI_FULL_DUPLEX || 952 spi->cur_comm == SPI_SIMPLEX_RX || 953 spi->cur_comm == SPI_3WIRE_RX)) { 954 /* TXE flag is set and is handled when RXNE flag occurs */ 955 sr &= ~STM32FX_SPI_SR_TXE; 956 mask |= STM32FX_SPI_SR_RXNE | STM32FX_SPI_SR_OVR; 957 } 958 959 if (!(sr & mask)) { 960 dev_dbg(spi->dev, "spurious IT (sr=0x%08x)\n", sr); 961 spin_unlock(&spi->lock); 962 return IRQ_NONE; 963 } 964 965 if (sr & STM32FX_SPI_SR_OVR) { 966 dev_warn(spi->dev, "Overrun: received value discarded\n"); 967 968 /* Sequence to clear OVR flag */ 969 readl_relaxed(spi->base + STM32FX_SPI_DR); 970 readl_relaxed(spi->base + STM32FX_SPI_SR); 971 972 /* 973 * If overrun is detected, it means that something went wrong, 974 * so stop the current transfer. Transfer can wait for next 975 * RXNE but DR is already read and end never happens. 976 */ 977 end = true; 978 goto end_irq; 979 } 980 981 if (sr & STM32FX_SPI_SR_TXE) { 982 if (spi->tx_buf) 983 spi->cfg->write_tx(spi); 984 if (spi->tx_len == 0) 985 end = true; 986 } 987 988 if (sr & STM32FX_SPI_SR_RXNE) { 989 spi->cfg->read_rx(spi); 990 if (spi->rx_len == 0) 991 end = true; 992 else if (spi->tx_buf)/* Load data for discontinuous mode */ 993 spi->cfg->write_tx(spi); 994 } 995 996 end_irq: 997 if (end) { 998 /* Immediately disable interrupts to do not generate new one */ 999 stm32_spi_clr_bits(spi, STM32FX_SPI_CR2, 1000 STM32FX_SPI_CR2_TXEIE | 1001 STM32FX_SPI_CR2_RXNEIE | 1002 STM32FX_SPI_CR2_ERRIE); 1003 spin_unlock(&spi->lock); 1004 return IRQ_WAKE_THREAD; 1005 } 1006 1007 spin_unlock(&spi->lock); 1008 return IRQ_HANDLED; 1009 } 1010 1011 /** 1012 * stm32fx_spi_irq_thread - Thread of interrupt handler for SPI controller 1013 * @irq: interrupt line 1014 * @dev_id: SPI controller interface 1015 */ 1016 static irqreturn_t stm32fx_spi_irq_thread(int irq, void *dev_id) 1017 { 1018 struct spi_controller *ctrl = dev_id; 1019 struct stm32_spi *spi = spi_controller_get_devdata(ctrl); 1020 1021 spi_finalize_current_transfer(ctrl); 1022 stm32fx_spi_disable(spi); 1023 1024 return IRQ_HANDLED; 1025 } 1026 1027 /** 1028 * stm32h7_spi_irq_thread - Thread of interrupt handler for SPI controller 1029 * @irq: interrupt line 1030 * @dev_id: SPI controller interface 1031 */ 1032 static irqreturn_t stm32h7_spi_irq_thread(int irq, void *dev_id) 1033 { 1034 struct spi_controller *ctrl = dev_id; 1035 struct stm32_spi *spi = spi_controller_get_devdata(ctrl); 1036 u32 sr, ier, mask; 1037 unsigned long flags; 1038 bool end = false; 1039 1040 spin_lock_irqsave(&spi->lock, flags); 1041 1042 sr = readl_relaxed(spi->base + STM32H7_SPI_SR); 1043 ier = readl_relaxed(spi->base + STM32H7_SPI_IER); 1044 1045 mask = ier; 1046 /* 1047 * EOTIE enables irq from EOT, SUSP and TXC events. We need to set 1048 * SUSP to acknowledge it later. TXC is automatically cleared 1049 */ 1050 1051 mask |= STM32H7_SPI_SR_SUSP; 1052 /* 1053 * DXPIE is set in Full-Duplex, one IT will be raised if TXP and RXP 1054 * are set. So in case of Full-Duplex, need to poll TXP and RXP event. 1055 */ 1056 if ((spi->cur_comm == SPI_FULL_DUPLEX) && !spi->cur_usedma) 1057 mask |= STM32H7_SPI_SR_TXP | STM32H7_SPI_SR_RXP; 1058 1059 if (!(sr & mask)) { 1060 dev_warn(spi->dev, "spurious IT (sr=0x%08x, ier=0x%08x)\n", 1061 sr, ier); 1062 spin_unlock_irqrestore(&spi->lock, flags); 1063 return IRQ_NONE; 1064 } 1065 1066 if (sr & STM32H7_SPI_SR_SUSP) { 1067 static DEFINE_RATELIMIT_STATE(rs, 1068 DEFAULT_RATELIMIT_INTERVAL * 10, 1069 1); 1070 ratelimit_set_flags(&rs, RATELIMIT_MSG_ON_RELEASE); 1071 if (__ratelimit(&rs)) 1072 dev_dbg_ratelimited(spi->dev, "Communication suspended\n"); 1073 if (!spi->cur_usedma && (spi->rx_buf && (spi->rx_len > 0))) 1074 stm32h7_spi_read_rxfifo(spi); 1075 /* 1076 * If communication is suspended while using DMA, it means 1077 * that something went wrong, so stop the current transfer 1078 */ 1079 if (spi->cur_usedma) 1080 end = true; 1081 } 1082 1083 if (sr & STM32H7_SPI_SR_MODF) { 1084 dev_warn(spi->dev, "Mode fault: transfer aborted\n"); 1085 end = true; 1086 } 1087 1088 if (sr & STM32H7_SPI_SR_OVR) { 1089 dev_err(spi->dev, "Overrun: RX data lost\n"); 1090 end = true; 1091 } 1092 1093 if (sr & STM32H7_SPI_SR_EOT) { 1094 if (!spi->cur_usedma && (spi->rx_buf && (spi->rx_len > 0))) 1095 stm32h7_spi_read_rxfifo(spi); 1096 if (!spi->cur_usedma || 1097 (spi->cur_comm == SPI_SIMPLEX_TX || spi->cur_comm == SPI_3WIRE_TX)) 1098 end = true; 1099 } 1100 1101 if (sr & STM32H7_SPI_SR_TXP) 1102 if (!spi->cur_usedma && (spi->tx_buf && (spi->tx_len > 0))) 1103 stm32h7_spi_write_txfifo(spi); 1104 1105 if (sr & STM32H7_SPI_SR_RXP) 1106 if (!spi->cur_usedma && (spi->rx_buf && (spi->rx_len > 0))) 1107 stm32h7_spi_read_rxfifo(spi); 1108 1109 writel_relaxed(sr & mask, spi->base + STM32H7_SPI_IFCR); 1110 1111 spin_unlock_irqrestore(&spi->lock, flags); 1112 1113 if (end) { 1114 stm32h7_spi_disable(spi); 1115 spi_finalize_current_transfer(ctrl); 1116 } 1117 1118 return IRQ_HANDLED; 1119 } 1120 1121 static int stm32_spi_optimize_message(struct spi_message *msg) 1122 { 1123 struct spi_controller *ctrl = msg->spi->controller; 1124 struct stm32_spi *spi = spi_controller_get_devdata(ctrl); 1125 1126 /* On STM32H7, messages should not exceed a maximum size set 1127 * later via the set_number_of_data function. In order to 1128 * ensure that, split large messages into several messages 1129 */ 1130 if (spi->cfg->set_number_of_data) 1131 return spi_split_transfers_maxwords(ctrl, msg, spi->t_size_max); 1132 1133 return 0; 1134 } 1135 1136 /** 1137 * stm32_spi_prepare_msg - set up the controller to transfer a single message 1138 * @ctrl: controller interface 1139 * @msg: pointer to spi message 1140 */ 1141 static int stm32_spi_prepare_msg(struct spi_controller *ctrl, 1142 struct spi_message *msg) 1143 { 1144 struct stm32_spi *spi = spi_controller_get_devdata(ctrl); 1145 struct spi_device *spi_dev = msg->spi; 1146 struct device_node *np = spi_dev->dev.of_node; 1147 unsigned long flags; 1148 u32 clrb = 0, setb = 0; 1149 1150 /* SPI target device may need time between data frames */ 1151 spi->cur_midi = 0; 1152 if (np && !of_property_read_u32(np, "st,spi-midi-ns", &spi->cur_midi)) 1153 dev_dbg(spi->dev, "%dns inter-data idleness\n", spi->cur_midi); 1154 1155 if (spi_dev->mode & SPI_CPOL) 1156 setb |= spi->cfg->regs->cpol.mask; 1157 else 1158 clrb |= spi->cfg->regs->cpol.mask; 1159 1160 if (spi_dev->mode & SPI_CPHA) 1161 setb |= spi->cfg->regs->cpha.mask; 1162 else 1163 clrb |= spi->cfg->regs->cpha.mask; 1164 1165 if (spi_dev->mode & SPI_LSB_FIRST) 1166 setb |= spi->cfg->regs->lsb_first.mask; 1167 else 1168 clrb |= spi->cfg->regs->lsb_first.mask; 1169 1170 if (STM32_SPI_DEVICE_MODE(spi) && spi_dev->mode & SPI_CS_HIGH) 1171 setb |= spi->cfg->regs->cs_high.mask; 1172 else 1173 clrb |= spi->cfg->regs->cs_high.mask; 1174 1175 dev_dbg(spi->dev, "cpol=%d cpha=%d lsb_first=%d cs_high=%d\n", 1176 !!(spi_dev->mode & SPI_CPOL), 1177 !!(spi_dev->mode & SPI_CPHA), 1178 !!(spi_dev->mode & SPI_LSB_FIRST), 1179 !!(spi_dev->mode & SPI_CS_HIGH)); 1180 1181 spin_lock_irqsave(&spi->lock, flags); 1182 1183 /* CPOL, CPHA and LSB FIRST bits have common register */ 1184 if (clrb || setb) 1185 writel_relaxed( 1186 (readl_relaxed(spi->base + spi->cfg->regs->cpol.reg) & 1187 ~clrb) | setb, 1188 spi->base + spi->cfg->regs->cpol.reg); 1189 1190 spin_unlock_irqrestore(&spi->lock, flags); 1191 1192 return 0; 1193 } 1194 1195 /** 1196 * stm32fx_spi_dma_tx_cb - dma callback 1197 * @data: pointer to the spi controller data structure 1198 * 1199 * DMA callback is called when the transfer is complete for DMA TX channel. 1200 */ 1201 static void stm32fx_spi_dma_tx_cb(void *data) 1202 { 1203 struct stm32_spi *spi = data; 1204 1205 if (spi->cur_comm == SPI_SIMPLEX_TX || spi->cur_comm == SPI_3WIRE_TX) { 1206 spi_finalize_current_transfer(spi->ctrl); 1207 stm32fx_spi_disable(spi); 1208 } 1209 } 1210 1211 /** 1212 * stm32_spi_dma_rx_cb - dma callback 1213 * @data: pointer to the spi controller data structure 1214 * 1215 * DMA callback is called when the transfer is complete for DMA RX channel. 1216 */ 1217 static void stm32_spi_dma_rx_cb(void *data) 1218 { 1219 struct stm32_spi *spi = data; 1220 1221 spi_finalize_current_transfer(spi->ctrl); 1222 spi->cfg->disable(spi); 1223 } 1224 1225 /** 1226 * stm32_spi_dma_config - configure dma slave channel depending on current 1227 * transfer bits_per_word. 1228 * @spi: pointer to the spi controller data structure 1229 * @dma_chan: pointer to the DMA channel 1230 * @dma_conf: pointer to the dma_slave_config structure 1231 * @dir: direction of the dma transfer 1232 */ 1233 static void stm32_spi_dma_config(struct stm32_spi *spi, 1234 struct dma_chan *dma_chan, 1235 struct dma_slave_config *dma_conf, 1236 enum dma_transfer_direction dir) 1237 { 1238 enum dma_slave_buswidth buswidth; 1239 struct dma_slave_caps caps; 1240 u32 maxburst = 1; 1241 int ret; 1242 1243 if (spi->cur_bpw <= 8) 1244 buswidth = DMA_SLAVE_BUSWIDTH_1_BYTE; 1245 else if (spi->cur_bpw <= 16) 1246 buswidth = DMA_SLAVE_BUSWIDTH_2_BYTES; 1247 else 1248 buswidth = DMA_SLAVE_BUSWIDTH_4_BYTES; 1249 1250 /* Valid for DMA Half or Full Fifo threshold */ 1251 if (!spi->cfg->prevent_dma_burst && spi->cfg->has_fifo && spi->cur_fthlv != 2) 1252 maxburst = spi->cur_fthlv; 1253 1254 /* Get the DMA channel caps, and adjust maxburst if possible */ 1255 ret = dma_get_slave_caps(dma_chan, &caps); 1256 if (!ret) 1257 maxburst = min(maxburst, caps.max_burst); 1258 1259 memset(dma_conf, 0, sizeof(struct dma_slave_config)); 1260 dma_conf->direction = dir; 1261 if (dma_conf->direction == DMA_DEV_TO_MEM) { /* RX */ 1262 dma_conf->src_addr = spi->phys_addr + spi->cfg->regs->rx.reg; 1263 dma_conf->src_addr_width = buswidth; 1264 dma_conf->src_maxburst = maxburst; 1265 1266 dev_dbg(spi->dev, "Rx DMA config buswidth=%d, maxburst=%d\n", 1267 buswidth, maxburst); 1268 } else if (dma_conf->direction == DMA_MEM_TO_DEV) { /* TX */ 1269 dma_conf->dst_addr = spi->phys_addr + spi->cfg->regs->tx.reg; 1270 dma_conf->dst_addr_width = buswidth; 1271 dma_conf->dst_maxburst = maxburst; 1272 1273 dev_dbg(spi->dev, "Tx DMA config buswidth=%d, maxburst=%d\n", 1274 buswidth, maxburst); 1275 } 1276 } 1277 1278 /** 1279 * stm32fx_spi_transfer_one_irq - transfer a single spi_transfer using 1280 * interrupts 1281 * @spi: pointer to the spi controller data structure 1282 * 1283 * It must returns 0 if the transfer is finished or 1 if the transfer is still 1284 * in progress. 1285 */ 1286 static int stm32fx_spi_transfer_one_irq(struct stm32_spi *spi) 1287 { 1288 unsigned long flags; 1289 u32 cr2 = 0; 1290 1291 /* Enable the interrupts relative to the current communication mode */ 1292 if (spi->cur_comm == SPI_SIMPLEX_TX || spi->cur_comm == SPI_3WIRE_TX) { 1293 cr2 |= STM32FX_SPI_CR2_TXEIE; 1294 } else if (spi->cur_comm == SPI_FULL_DUPLEX || 1295 spi->cur_comm == SPI_SIMPLEX_RX || 1296 spi->cur_comm == SPI_3WIRE_RX) { 1297 /* In transmit-only mode, the OVR flag is set in the SR register 1298 * since the received data are never read. Therefore set OVR 1299 * interrupt only when rx buffer is available. 1300 */ 1301 cr2 |= STM32FX_SPI_CR2_RXNEIE | STM32FX_SPI_CR2_ERRIE; 1302 } else { 1303 return -EINVAL; 1304 } 1305 1306 spin_lock_irqsave(&spi->lock, flags); 1307 1308 stm32_spi_set_bits(spi, STM32FX_SPI_CR2, cr2); 1309 1310 stm32_spi_enable(spi); 1311 1312 /* starting data transfer when buffer is loaded */ 1313 if (spi->tx_buf) 1314 spi->cfg->write_tx(spi); 1315 1316 spin_unlock_irqrestore(&spi->lock, flags); 1317 1318 return 1; 1319 } 1320 1321 /** 1322 * stm32h7_spi_transfer_one_irq - transfer a single spi_transfer using 1323 * interrupts 1324 * @spi: pointer to the spi controller data structure 1325 * 1326 * It must returns 0 if the transfer is finished or 1 if the transfer is still 1327 * in progress. 1328 */ 1329 static int stm32h7_spi_transfer_one_irq(struct stm32_spi *spi) 1330 { 1331 unsigned long flags; 1332 u32 ier = 0; 1333 1334 /* Enable the interrupts relative to the current communication mode */ 1335 if (spi->tx_buf && spi->rx_buf) /* Full Duplex */ 1336 ier |= STM32H7_SPI_IER_DXPIE; 1337 else if (spi->tx_buf) /* Half-Duplex TX dir or Simplex TX */ 1338 ier |= STM32H7_SPI_IER_TXPIE; 1339 else if (spi->rx_buf) /* Half-Duplex RX dir or Simplex RX */ 1340 ier |= STM32H7_SPI_IER_RXPIE; 1341 1342 /* Enable the interrupts relative to the end of transfer */ 1343 ier |= STM32H7_SPI_IER_EOTIE | STM32H7_SPI_IER_TXTFIE | 1344 STM32H7_SPI_IER_OVRIE | STM32H7_SPI_IER_MODFIE; 1345 1346 spin_lock_irqsave(&spi->lock, flags); 1347 1348 stm32_spi_enable(spi); 1349 1350 /* Be sure to have data in fifo before starting data transfer */ 1351 if (spi->tx_buf) 1352 stm32h7_spi_write_txfifo(spi); 1353 1354 if (STM32_SPI_HOST_MODE(spi)) 1355 stm32_spi_set_bits(spi, STM32H7_SPI_CR1, STM32H7_SPI_CR1_CSTART); 1356 1357 writel_relaxed(ier, spi->base + STM32H7_SPI_IER); 1358 1359 spin_unlock_irqrestore(&spi->lock, flags); 1360 1361 return 1; 1362 } 1363 1364 /** 1365 * stm32fx_spi_transfer_one_dma_start - Set SPI driver registers to start 1366 * transfer using DMA 1367 * @spi: pointer to the spi controller data structure 1368 */ 1369 static void stm32fx_spi_transfer_one_dma_start(struct stm32_spi *spi) 1370 { 1371 /* In DMA mode end of transfer is handled by DMA TX or RX callback. */ 1372 if (spi->cur_comm == SPI_SIMPLEX_RX || spi->cur_comm == SPI_3WIRE_RX || 1373 spi->cur_comm == SPI_FULL_DUPLEX) { 1374 /* 1375 * In transmit-only mode, the OVR flag is set in the SR register 1376 * since the received data are never read. Therefore set OVR 1377 * interrupt only when rx buffer is available. 1378 */ 1379 stm32_spi_set_bits(spi, STM32FX_SPI_CR2, STM32FX_SPI_CR2_ERRIE); 1380 } 1381 1382 stm32_spi_enable(spi); 1383 } 1384 1385 /** 1386 * stm32f7_spi_transfer_one_dma_start - Set SPI driver registers to start 1387 * transfer using DMA 1388 * @spi: pointer to the spi controller data structure 1389 */ 1390 static void stm32f7_spi_transfer_one_dma_start(struct stm32_spi *spi) 1391 { 1392 /* Configure DMA request trigger threshold according to DMA width */ 1393 if (spi->cur_bpw <= 8) 1394 stm32_spi_set_bits(spi, STM32FX_SPI_CR2, STM32F7_SPI_CR2_FRXTH); 1395 else 1396 stm32_spi_clr_bits(spi, STM32FX_SPI_CR2, STM32F7_SPI_CR2_FRXTH); 1397 1398 stm32fx_spi_transfer_one_dma_start(spi); 1399 } 1400 1401 /** 1402 * stm32h7_spi_transfer_one_dma_start - Set SPI driver registers to start 1403 * transfer using DMA 1404 * @spi: pointer to the spi controller data structure 1405 */ 1406 static void stm32h7_spi_transfer_one_dma_start(struct stm32_spi *spi) 1407 { 1408 uint32_t ier = STM32H7_SPI_IER_OVRIE | STM32H7_SPI_IER_MODFIE; 1409 1410 /* Enable the interrupts */ 1411 if (spi->cur_comm == SPI_SIMPLEX_TX || spi->cur_comm == SPI_3WIRE_TX) 1412 ier |= STM32H7_SPI_IER_EOTIE | STM32H7_SPI_IER_TXTFIE; 1413 1414 stm32_spi_set_bits(spi, STM32H7_SPI_IER, ier); 1415 1416 stm32_spi_enable(spi); 1417 1418 if (STM32_SPI_HOST_MODE(spi)) 1419 stm32_spi_set_bits(spi, STM32H7_SPI_CR1, STM32H7_SPI_CR1_CSTART); 1420 } 1421 1422 /** 1423 * stm32_spi_transfer_one_dma - transfer a single spi_transfer using DMA 1424 * @spi: pointer to the spi controller data structure 1425 * @xfer: pointer to the spi_transfer structure 1426 * 1427 * It must returns 0 if the transfer is finished or 1 if the transfer is still 1428 * in progress. 1429 */ 1430 static int stm32_spi_transfer_one_dma(struct stm32_spi *spi, 1431 struct spi_transfer *xfer) 1432 { 1433 struct dma_slave_config tx_dma_conf, rx_dma_conf; 1434 struct dma_async_tx_descriptor *tx_dma_desc, *rx_dma_desc; 1435 unsigned long flags; 1436 1437 spin_lock_irqsave(&spi->lock, flags); 1438 1439 rx_dma_desc = NULL; 1440 if (spi->rx_buf && spi->dma_rx) { 1441 stm32_spi_dma_config(spi, spi->dma_rx, &rx_dma_conf, DMA_DEV_TO_MEM); 1442 dmaengine_slave_config(spi->dma_rx, &rx_dma_conf); 1443 1444 /* Enable Rx DMA request */ 1445 stm32_spi_set_bits(spi, spi->cfg->regs->dma_rx_en.reg, 1446 spi->cfg->regs->dma_rx_en.mask); 1447 1448 rx_dma_desc = dmaengine_prep_slave_sg( 1449 spi->dma_rx, xfer->rx_sg.sgl, 1450 xfer->rx_sg.nents, 1451 rx_dma_conf.direction, 1452 DMA_PREP_INTERRUPT); 1453 } 1454 1455 tx_dma_desc = NULL; 1456 if (spi->tx_buf && spi->dma_tx) { 1457 stm32_spi_dma_config(spi, spi->dma_tx, &tx_dma_conf, DMA_MEM_TO_DEV); 1458 dmaengine_slave_config(spi->dma_tx, &tx_dma_conf); 1459 1460 tx_dma_desc = dmaengine_prep_slave_sg( 1461 spi->dma_tx, xfer->tx_sg.sgl, 1462 xfer->tx_sg.nents, 1463 tx_dma_conf.direction, 1464 DMA_PREP_INTERRUPT); 1465 } 1466 1467 if ((spi->tx_buf && spi->dma_tx && !tx_dma_desc) || 1468 (spi->rx_buf && spi->dma_rx && !rx_dma_desc)) 1469 goto dma_desc_error; 1470 1471 if (spi->cur_comm == SPI_FULL_DUPLEX && (!tx_dma_desc || !rx_dma_desc)) 1472 goto dma_desc_error; 1473 1474 if (rx_dma_desc) { 1475 rx_dma_desc->callback = spi->cfg->dma_rx_cb; 1476 rx_dma_desc->callback_param = spi; 1477 1478 if (dma_submit_error(dmaengine_submit(rx_dma_desc))) { 1479 dev_err(spi->dev, "Rx DMA submit failed\n"); 1480 goto dma_desc_error; 1481 } 1482 /* Enable Rx DMA channel */ 1483 dma_async_issue_pending(spi->dma_rx); 1484 } 1485 1486 if (tx_dma_desc) { 1487 if (spi->cur_comm == SPI_SIMPLEX_TX || 1488 spi->cur_comm == SPI_3WIRE_TX) { 1489 tx_dma_desc->callback = spi->cfg->dma_tx_cb; 1490 tx_dma_desc->callback_param = spi; 1491 } 1492 1493 if (dma_submit_error(dmaengine_submit(tx_dma_desc))) { 1494 dev_err(spi->dev, "Tx DMA submit failed\n"); 1495 goto dma_submit_error; 1496 } 1497 /* Enable Tx DMA channel */ 1498 dma_async_issue_pending(spi->dma_tx); 1499 1500 /* Enable Tx DMA request */ 1501 stm32_spi_set_bits(spi, spi->cfg->regs->dma_tx_en.reg, 1502 spi->cfg->regs->dma_tx_en.mask); 1503 } 1504 1505 spi->cfg->transfer_one_dma_start(spi); 1506 1507 spin_unlock_irqrestore(&spi->lock, flags); 1508 1509 return 1; 1510 1511 dma_submit_error: 1512 if (spi->dma_rx) 1513 dmaengine_terminate_sync(spi->dma_rx); 1514 1515 dma_desc_error: 1516 stm32_spi_clr_bits(spi, spi->cfg->regs->dma_rx_en.reg, 1517 spi->cfg->regs->dma_rx_en.mask); 1518 1519 spin_unlock_irqrestore(&spi->lock, flags); 1520 1521 dev_info(spi->dev, "DMA issue: fall back to irq transfer\n"); 1522 1523 spi->cur_usedma = false; 1524 return spi->cfg->transfer_one_irq(spi); 1525 } 1526 1527 /** 1528 * stm32f4_spi_set_bpw - Configure bits per word 1529 * @spi: pointer to the spi controller data structure 1530 */ 1531 static void stm32f4_spi_set_bpw(struct stm32_spi *spi) 1532 { 1533 if (spi->cur_bpw == 16) 1534 stm32_spi_set_bits(spi, STM32FX_SPI_CR1, STM32F4_SPI_CR1_DFF); 1535 else 1536 stm32_spi_clr_bits(spi, STM32FX_SPI_CR1, STM32F4_SPI_CR1_DFF); 1537 } 1538 1539 /** 1540 * stm32f7_spi_set_bpw - Configure bits per word 1541 * @spi: pointer to the spi controller data structure 1542 */ 1543 static void stm32f7_spi_set_bpw(struct stm32_spi *spi) 1544 { 1545 u32 bpw; 1546 u32 cr2_clrb = 0, cr2_setb = 0; 1547 1548 bpw = spi->cur_bpw - 1; 1549 1550 cr2_clrb |= STM32F7_SPI_CR2_DS; 1551 cr2_setb |= FIELD_PREP(STM32F7_SPI_CR2_DS, bpw); 1552 1553 if (spi->rx_len >= sizeof(u16)) 1554 cr2_clrb |= STM32F7_SPI_CR2_FRXTH; 1555 else 1556 cr2_setb |= STM32F7_SPI_CR2_FRXTH; 1557 1558 writel_relaxed( 1559 (readl_relaxed(spi->base + STM32FX_SPI_CR2) & 1560 ~cr2_clrb) | cr2_setb, 1561 spi->base + STM32FX_SPI_CR2); 1562 } 1563 1564 /** 1565 * stm32h7_spi_set_bpw - configure bits per word 1566 * @spi: pointer to the spi controller data structure 1567 */ 1568 static void stm32h7_spi_set_bpw(struct stm32_spi *spi) 1569 { 1570 u32 bpw, fthlv; 1571 u32 cfg1_clrb = 0, cfg1_setb = 0; 1572 1573 bpw = spi->cur_bpw - 1; 1574 1575 cfg1_clrb |= STM32H7_SPI_CFG1_DSIZE; 1576 cfg1_setb |= FIELD_PREP(STM32H7_SPI_CFG1_DSIZE, bpw); 1577 1578 spi->cur_fthlv = stm32h7_spi_prepare_fthlv(spi, spi->cur_xferlen); 1579 fthlv = spi->cur_fthlv - 1; 1580 1581 cfg1_clrb |= STM32H7_SPI_CFG1_FTHLV; 1582 cfg1_setb |= FIELD_PREP(STM32H7_SPI_CFG1_FTHLV, fthlv); 1583 1584 writel_relaxed( 1585 (readl_relaxed(spi->base + STM32H7_SPI_CFG1) & 1586 ~cfg1_clrb) | cfg1_setb, 1587 spi->base + STM32H7_SPI_CFG1); 1588 } 1589 1590 /** 1591 * stm32_spi_set_mbr - Configure baud rate divisor in host mode 1592 * @spi: pointer to the spi controller data structure 1593 * @mbrdiv: baud rate divisor value 1594 */ 1595 static void stm32_spi_set_mbr(struct stm32_spi *spi, u32 mbrdiv) 1596 { 1597 u32 clrb = 0, setb = 0; 1598 1599 clrb |= spi->cfg->regs->br.mask; 1600 setb |= (mbrdiv << spi->cfg->regs->br.shift) & spi->cfg->regs->br.mask; 1601 1602 writel_relaxed((readl_relaxed(spi->base + spi->cfg->regs->br.reg) & 1603 ~clrb) | setb, 1604 spi->base + spi->cfg->regs->br.reg); 1605 } 1606 1607 /** 1608 * stm32_spi_communication_type - return transfer communication type 1609 * @spi_dev: pointer to the spi device 1610 * @transfer: pointer to spi transfer 1611 */ 1612 static unsigned int stm32_spi_communication_type(struct spi_device *spi_dev, 1613 struct spi_transfer *transfer) 1614 { 1615 unsigned int type = SPI_FULL_DUPLEX; 1616 1617 if (spi_dev->mode & SPI_3WIRE) { /* MISO/MOSI signals shared */ 1618 /* 1619 * SPI_3WIRE and xfer->tx_buf != NULL and xfer->rx_buf != NULL 1620 * is forbidden and unvalidated by SPI subsystem so depending 1621 * on the valid buffer, we can determine the direction of the 1622 * transfer. 1623 */ 1624 if (!transfer->tx_buf) 1625 type = SPI_3WIRE_RX; 1626 else 1627 type = SPI_3WIRE_TX; 1628 } else { 1629 if (!transfer->tx_buf) 1630 type = SPI_SIMPLEX_RX; 1631 else if (!transfer->rx_buf) 1632 type = SPI_SIMPLEX_TX; 1633 } 1634 1635 return type; 1636 } 1637 1638 /** 1639 * stm32fx_spi_set_mode - configure communication mode 1640 * @spi: pointer to the spi controller data structure 1641 * @comm_type: type of communication to configure 1642 */ 1643 static int stm32fx_spi_set_mode(struct stm32_spi *spi, unsigned int comm_type) 1644 { 1645 if (comm_type == SPI_3WIRE_TX || comm_type == SPI_SIMPLEX_TX) { 1646 stm32_spi_set_bits(spi, STM32FX_SPI_CR1, 1647 STM32FX_SPI_CR1_BIDIMODE | 1648 STM32FX_SPI_CR1_BIDIOE); 1649 } else if (comm_type == SPI_FULL_DUPLEX || 1650 comm_type == SPI_SIMPLEX_RX) { 1651 stm32_spi_clr_bits(spi, STM32FX_SPI_CR1, 1652 STM32FX_SPI_CR1_BIDIMODE | 1653 STM32FX_SPI_CR1_BIDIOE); 1654 } else if (comm_type == SPI_3WIRE_RX) { 1655 stm32_spi_set_bits(spi, STM32FX_SPI_CR1, 1656 STM32FX_SPI_CR1_BIDIMODE); 1657 stm32_spi_clr_bits(spi, STM32FX_SPI_CR1, 1658 STM32FX_SPI_CR1_BIDIOE); 1659 } else { 1660 return -EINVAL; 1661 } 1662 1663 return 0; 1664 } 1665 1666 /** 1667 * stm32h7_spi_set_mode - configure communication mode 1668 * @spi: pointer to the spi controller data structure 1669 * @comm_type: type of communication to configure 1670 */ 1671 static int stm32h7_spi_set_mode(struct stm32_spi *spi, unsigned int comm_type) 1672 { 1673 u32 mode; 1674 u32 cfg2_clrb = 0, cfg2_setb = 0; 1675 1676 if (comm_type == SPI_3WIRE_RX) { 1677 mode = STM32H7_SPI_HALF_DUPLEX; 1678 stm32_spi_clr_bits(spi, STM32H7_SPI_CR1, STM32H7_SPI_CR1_HDDIR); 1679 } else if (comm_type == SPI_3WIRE_TX) { 1680 mode = STM32H7_SPI_HALF_DUPLEX; 1681 stm32_spi_set_bits(spi, STM32H7_SPI_CR1, STM32H7_SPI_CR1_HDDIR); 1682 } else if (comm_type == SPI_SIMPLEX_RX) { 1683 mode = STM32H7_SPI_SIMPLEX_RX; 1684 } else if (comm_type == SPI_SIMPLEX_TX) { 1685 mode = STM32H7_SPI_SIMPLEX_TX; 1686 } else { 1687 mode = STM32H7_SPI_FULL_DUPLEX; 1688 } 1689 1690 cfg2_clrb |= STM32H7_SPI_CFG2_COMM; 1691 cfg2_setb |= FIELD_PREP(STM32H7_SPI_CFG2_COMM, mode); 1692 1693 writel_relaxed( 1694 (readl_relaxed(spi->base + STM32H7_SPI_CFG2) & 1695 ~cfg2_clrb) | cfg2_setb, 1696 spi->base + STM32H7_SPI_CFG2); 1697 1698 return 0; 1699 } 1700 1701 /** 1702 * stm32h7_spi_data_idleness - configure minimum time delay inserted between two 1703 * consecutive data frames in host mode 1704 * @spi: pointer to the spi controller data structure 1705 * @len: transfer len 1706 */ 1707 static void stm32h7_spi_data_idleness(struct stm32_spi *spi, u32 len) 1708 { 1709 u32 cfg2_clrb = 0, cfg2_setb = 0; 1710 1711 cfg2_clrb |= STM32H7_SPI_CFG2_MIDI; 1712 if ((len > 1) && (spi->cur_midi > 0)) { 1713 u32 sck_period_ns = DIV_ROUND_UP(NSEC_PER_SEC, spi->cur_speed); 1714 u32 midi = min_t(u32, 1715 DIV_ROUND_UP(spi->cur_midi, sck_period_ns), 1716 FIELD_GET(STM32H7_SPI_CFG2_MIDI, 1717 STM32H7_SPI_CFG2_MIDI)); 1718 1719 1720 dev_dbg(spi->dev, "period=%dns, midi=%d(=%dns)\n", 1721 sck_period_ns, midi, midi * sck_period_ns); 1722 cfg2_setb |= FIELD_PREP(STM32H7_SPI_CFG2_MIDI, midi); 1723 } 1724 1725 writel_relaxed((readl_relaxed(spi->base + STM32H7_SPI_CFG2) & 1726 ~cfg2_clrb) | cfg2_setb, 1727 spi->base + STM32H7_SPI_CFG2); 1728 } 1729 1730 /** 1731 * stm32h7_spi_number_of_data - configure number of data at current transfer 1732 * @spi: pointer to the spi controller data structure 1733 * @nb_words: transfer length (in words) 1734 */ 1735 static int stm32h7_spi_number_of_data(struct stm32_spi *spi, u32 nb_words) 1736 { 1737 if (nb_words <= spi->t_size_max) { 1738 writel_relaxed(FIELD_PREP(STM32H7_SPI_CR2_TSIZE, nb_words), 1739 spi->base + STM32H7_SPI_CR2); 1740 } else { 1741 return -EMSGSIZE; 1742 } 1743 1744 return 0; 1745 } 1746 1747 /** 1748 * stm32_spi_transfer_one_setup - common setup to transfer a single 1749 * spi_transfer either using DMA or 1750 * interrupts. 1751 * @spi: pointer to the spi controller data structure 1752 * @spi_dev: pointer to the spi device 1753 * @transfer: pointer to spi transfer 1754 */ 1755 static int stm32_spi_transfer_one_setup(struct stm32_spi *spi, 1756 struct spi_device *spi_dev, 1757 struct spi_transfer *transfer) 1758 { 1759 unsigned long flags; 1760 unsigned int comm_type; 1761 int nb_words, ret = 0; 1762 int mbr; 1763 1764 spin_lock_irqsave(&spi->lock, flags); 1765 1766 spi->cur_xferlen = transfer->len; 1767 1768 spi->cur_bpw = transfer->bits_per_word; 1769 spi->cfg->set_bpw(spi); 1770 1771 /* Update spi->cur_speed with real clock speed */ 1772 if (STM32_SPI_HOST_MODE(spi)) { 1773 mbr = stm32_spi_prepare_mbr(spi, transfer->speed_hz, 1774 spi->cfg->baud_rate_div_min, 1775 spi->cfg->baud_rate_div_max); 1776 if (mbr < 0) { 1777 ret = mbr; 1778 goto out; 1779 } 1780 1781 transfer->speed_hz = spi->cur_speed; 1782 stm32_spi_set_mbr(spi, mbr); 1783 } 1784 1785 comm_type = stm32_spi_communication_type(spi_dev, transfer); 1786 ret = spi->cfg->set_mode(spi, comm_type); 1787 if (ret < 0) 1788 goto out; 1789 1790 spi->cur_comm = comm_type; 1791 1792 if (STM32_SPI_HOST_MODE(spi) && spi->cfg->set_data_idleness) 1793 spi->cfg->set_data_idleness(spi, transfer->len); 1794 1795 if (spi->cur_bpw <= 8) 1796 nb_words = transfer->len; 1797 else if (spi->cur_bpw <= 16) 1798 nb_words = DIV_ROUND_UP(transfer->len * 8, 16); 1799 else 1800 nb_words = DIV_ROUND_UP(transfer->len * 8, 32); 1801 1802 if (spi->cfg->set_number_of_data) { 1803 ret = spi->cfg->set_number_of_data(spi, nb_words); 1804 if (ret < 0) 1805 goto out; 1806 } 1807 1808 dev_dbg(spi->dev, "transfer communication mode set to %d\n", 1809 spi->cur_comm); 1810 dev_dbg(spi->dev, 1811 "data frame of %d-bit, data packet of %d data frames\n", 1812 spi->cur_bpw, spi->cur_fthlv); 1813 if (STM32_SPI_HOST_MODE(spi)) 1814 dev_dbg(spi->dev, "speed set to %dHz\n", spi->cur_speed); 1815 dev_dbg(spi->dev, "transfer of %d bytes (%d data frames)\n", 1816 spi->cur_xferlen, nb_words); 1817 dev_dbg(spi->dev, "dma %s\n", 1818 (spi->cur_usedma) ? "enabled" : "disabled"); 1819 1820 out: 1821 spin_unlock_irqrestore(&spi->lock, flags); 1822 1823 return ret; 1824 } 1825 1826 /** 1827 * stm32_spi_transfer_one - transfer a single spi_transfer 1828 * @ctrl: controller interface 1829 * @spi_dev: pointer to the spi device 1830 * @transfer: pointer to spi transfer 1831 * 1832 * It must return 0 if the transfer is finished or 1 if the transfer is still 1833 * in progress. 1834 */ 1835 static int stm32_spi_transfer_one(struct spi_controller *ctrl, 1836 struct spi_device *spi_dev, 1837 struct spi_transfer *transfer) 1838 { 1839 struct stm32_spi *spi = spi_controller_get_devdata(ctrl); 1840 int ret; 1841 1842 spi->tx_buf = transfer->tx_buf; 1843 spi->rx_buf = transfer->rx_buf; 1844 spi->tx_len = spi->tx_buf ? transfer->len : 0; 1845 spi->rx_len = spi->rx_buf ? transfer->len : 0; 1846 1847 spi->cur_usedma = (ctrl->can_dma && 1848 ctrl->can_dma(ctrl, spi_dev, transfer)); 1849 1850 ret = stm32_spi_transfer_one_setup(spi, spi_dev, transfer); 1851 if (ret) { 1852 dev_err(spi->dev, "SPI transfer setup failed\n"); 1853 return ret; 1854 } 1855 1856 if (spi->cur_usedma) 1857 return stm32_spi_transfer_one_dma(spi, transfer); 1858 else 1859 return spi->cfg->transfer_one_irq(spi); 1860 } 1861 1862 /** 1863 * stm32_spi_unprepare_msg - relax the hardware 1864 * @ctrl: controller interface 1865 * @msg: pointer to the spi message 1866 */ 1867 static int stm32_spi_unprepare_msg(struct spi_controller *ctrl, 1868 struct spi_message *msg) 1869 { 1870 struct stm32_spi *spi = spi_controller_get_devdata(ctrl); 1871 1872 spi->cfg->disable(spi); 1873 1874 return 0; 1875 } 1876 1877 /** 1878 * stm32fx_spi_config - Configure SPI controller as SPI host 1879 * @spi: pointer to the spi controller data structure 1880 */ 1881 static int stm32fx_spi_config(struct stm32_spi *spi) 1882 { 1883 unsigned long flags; 1884 1885 spin_lock_irqsave(&spi->lock, flags); 1886 1887 /* Ensure I2SMOD bit is kept cleared */ 1888 stm32_spi_clr_bits(spi, STM32FX_SPI_I2SCFGR, 1889 STM32FX_SPI_I2SCFGR_I2SMOD); 1890 1891 /* 1892 * - SS input value high 1893 * - transmitter half duplex direction 1894 * - Set the host mode (default Motorola mode) 1895 * - Consider 1 host/n targets configuration and 1896 * SS input value is determined by the SSI bit 1897 */ 1898 stm32_spi_set_bits(spi, STM32FX_SPI_CR1, STM32FX_SPI_CR1_SSI | 1899 STM32FX_SPI_CR1_BIDIOE | 1900 STM32FX_SPI_CR1_MSTR | 1901 STM32FX_SPI_CR1_SSM); 1902 1903 spin_unlock_irqrestore(&spi->lock, flags); 1904 1905 return 0; 1906 } 1907 1908 /** 1909 * stm32h7_spi_config - Configure SPI controller 1910 * @spi: pointer to the spi controller data structure 1911 */ 1912 static int stm32h7_spi_config(struct stm32_spi *spi) 1913 { 1914 unsigned long flags; 1915 u32 cr1 = 0, cfg2 = 0; 1916 1917 spin_lock_irqsave(&spi->lock, flags); 1918 1919 /* Ensure I2SMOD bit is kept cleared */ 1920 stm32_spi_clr_bits(spi, STM32H7_SPI_I2SCFGR, 1921 STM32H7_SPI_I2SCFGR_I2SMOD); 1922 1923 if (STM32_SPI_DEVICE_MODE(spi)) { 1924 /* Use native device select */ 1925 cfg2 &= ~STM32H7_SPI_CFG2_SSM; 1926 } else { 1927 /* 1928 * - Transmitter half duplex direction 1929 * - Automatic communication suspend when RX-Fifo is full 1930 * - SS input value high 1931 */ 1932 cr1 |= STM32H7_SPI_CR1_HDDIR | STM32H7_SPI_CR1_MASRX | STM32H7_SPI_CR1_SSI; 1933 1934 /* 1935 * - Set the host mode (default Motorola mode) 1936 * - Consider 1 host/n devices configuration and 1937 * SS input value is determined by the SSI bit 1938 * - keep control of all associated GPIOs 1939 */ 1940 cfg2 |= STM32H7_SPI_CFG2_MASTER | STM32H7_SPI_CFG2_SSM | STM32H7_SPI_CFG2_AFCNTR; 1941 } 1942 1943 stm32_spi_set_bits(spi, STM32H7_SPI_CR1, cr1); 1944 stm32_spi_set_bits(spi, STM32H7_SPI_CFG2, cfg2); 1945 1946 spin_unlock_irqrestore(&spi->lock, flags); 1947 1948 return 0; 1949 } 1950 1951 static const struct stm32_spi_cfg stm32f4_spi_cfg = { 1952 .regs = &stm32fx_spi_regspec, 1953 .get_bpw_mask = stm32f4_spi_get_bpw_mask, 1954 .disable = stm32fx_spi_disable, 1955 .config = stm32fx_spi_config, 1956 .set_bpw = stm32f4_spi_set_bpw, 1957 .set_mode = stm32fx_spi_set_mode, 1958 .write_tx = stm32f4_spi_write_tx, 1959 .read_rx = stm32f4_spi_read_rx, 1960 .transfer_one_dma_start = stm32fx_spi_transfer_one_dma_start, 1961 .dma_tx_cb = stm32fx_spi_dma_tx_cb, 1962 .dma_rx_cb = stm32_spi_dma_rx_cb, 1963 .transfer_one_irq = stm32fx_spi_transfer_one_irq, 1964 .irq_handler_event = stm32fx_spi_irq_event, 1965 .irq_handler_thread = stm32fx_spi_irq_thread, 1966 .baud_rate_div_min = STM32FX_SPI_BR_DIV_MIN, 1967 .baud_rate_div_max = STM32FX_SPI_BR_DIV_MAX, 1968 .has_fifo = false, 1969 .has_device_mode = false, 1970 .flags = SPI_CONTROLLER_MUST_TX, 1971 }; 1972 1973 static const struct stm32_spi_cfg stm32f7_spi_cfg = { 1974 .regs = &stm32fx_spi_regspec, 1975 .get_bpw_mask = stm32f7_spi_get_bpw_mask, 1976 .disable = stm32fx_spi_disable, 1977 .config = stm32fx_spi_config, 1978 .set_bpw = stm32f7_spi_set_bpw, 1979 .set_mode = stm32fx_spi_set_mode, 1980 .write_tx = stm32f7_spi_write_tx, 1981 .read_rx = stm32f7_spi_read_rx, 1982 .transfer_one_dma_start = stm32f7_spi_transfer_one_dma_start, 1983 .dma_tx_cb = stm32fx_spi_dma_tx_cb, 1984 .dma_rx_cb = stm32_spi_dma_rx_cb, 1985 .transfer_one_irq = stm32fx_spi_transfer_one_irq, 1986 .irq_handler_event = stm32fx_spi_irq_event, 1987 .irq_handler_thread = stm32fx_spi_irq_thread, 1988 .baud_rate_div_min = STM32FX_SPI_BR_DIV_MIN, 1989 .baud_rate_div_max = STM32FX_SPI_BR_DIV_MAX, 1990 .has_fifo = false, 1991 .flags = SPI_CONTROLLER_MUST_TX, 1992 }; 1993 1994 static const struct stm32_spi_cfg stm32h7_spi_cfg = { 1995 .regs = &stm32h7_spi_regspec, 1996 .get_fifo_size = stm32h7_spi_get_fifo_size, 1997 .get_bpw_mask = stm32h7_spi_get_bpw_mask, 1998 .disable = stm32h7_spi_disable, 1999 .config = stm32h7_spi_config, 2000 .set_bpw = stm32h7_spi_set_bpw, 2001 .set_mode = stm32h7_spi_set_mode, 2002 .set_data_idleness = stm32h7_spi_data_idleness, 2003 .set_number_of_data = stm32h7_spi_number_of_data, 2004 .write_tx = stm32h7_spi_write_txfifo, 2005 .read_rx = stm32h7_spi_read_rxfifo, 2006 .transfer_one_dma_start = stm32h7_spi_transfer_one_dma_start, 2007 .dma_rx_cb = stm32_spi_dma_rx_cb, 2008 /* 2009 * dma_tx_cb is not necessary since in case of TX, dma is followed by 2010 * SPI access hence handling is performed within the SPI interrupt 2011 */ 2012 .transfer_one_irq = stm32h7_spi_transfer_one_irq, 2013 .irq_handler_thread = stm32h7_spi_irq_thread, 2014 .baud_rate_div_min = STM32H7_SPI_MBR_DIV_MIN, 2015 .baud_rate_div_max = STM32H7_SPI_MBR_DIV_MAX, 2016 .has_fifo = true, 2017 .has_device_mode = true, 2018 }; 2019 2020 /* 2021 * STM32MP2 is compatible with the STM32H7 except: 2022 * - enforce the DMA maxburst value to 1 2023 * - spi8 have limited feature set (TSIZE_MAX = 1024, BPW of 8 OR 16) 2024 */ 2025 static const struct stm32_spi_cfg stm32mp25_spi_cfg = { 2026 .regs = &stm32mp25_spi_regspec, 2027 .get_fifo_size = stm32h7_spi_get_fifo_size, 2028 .get_bpw_mask = stm32mp25_spi_get_bpw_mask, 2029 .disable = stm32h7_spi_disable, 2030 .config = stm32h7_spi_config, 2031 .set_bpw = stm32h7_spi_set_bpw, 2032 .set_mode = stm32h7_spi_set_mode, 2033 .set_data_idleness = stm32h7_spi_data_idleness, 2034 .set_number_of_data = stm32h7_spi_number_of_data, 2035 .transfer_one_dma_start = stm32h7_spi_transfer_one_dma_start, 2036 .dma_rx_cb = stm32_spi_dma_rx_cb, 2037 /* 2038 * dma_tx_cb is not necessary since in case of TX, dma is followed by 2039 * SPI access hence handling is performed within the SPI interrupt 2040 */ 2041 .transfer_one_irq = stm32h7_spi_transfer_one_irq, 2042 .irq_handler_thread = stm32h7_spi_irq_thread, 2043 .baud_rate_div_min = STM32H7_SPI_MBR_DIV_MIN, 2044 .baud_rate_div_max = STM32H7_SPI_MBR_DIV_MAX, 2045 .has_fifo = true, 2046 .prevent_dma_burst = true, 2047 }; 2048 2049 static const struct of_device_id stm32_spi_of_match[] = { 2050 { .compatible = "st,stm32mp25-spi", .data = (void *)&stm32mp25_spi_cfg }, 2051 { .compatible = "st,stm32h7-spi", .data = (void *)&stm32h7_spi_cfg }, 2052 { .compatible = "st,stm32f4-spi", .data = (void *)&stm32f4_spi_cfg }, 2053 { .compatible = "st,stm32f7-spi", .data = (void *)&stm32f7_spi_cfg }, 2054 {}, 2055 }; 2056 MODULE_DEVICE_TABLE(of, stm32_spi_of_match); 2057 2058 static int stm32h7_spi_device_abort(struct spi_controller *ctrl) 2059 { 2060 spi_finalize_current_transfer(ctrl); 2061 return 0; 2062 } 2063 2064 static int stm32_spi_probe(struct platform_device *pdev) 2065 { 2066 struct spi_controller *ctrl; 2067 struct stm32_spi *spi; 2068 struct resource *res; 2069 struct reset_control *rst; 2070 struct device_node *np = pdev->dev.of_node; 2071 bool device_mode; 2072 int ret; 2073 const struct stm32_spi_cfg *cfg = of_device_get_match_data(&pdev->dev); 2074 2075 device_mode = of_property_read_bool(np, "spi-slave"); 2076 if (!cfg->has_device_mode && device_mode) { 2077 dev_err(&pdev->dev, "spi-slave not supported\n"); 2078 return -EPERM; 2079 } 2080 2081 if (device_mode) 2082 ctrl = devm_spi_alloc_target(&pdev->dev, sizeof(struct stm32_spi)); 2083 else 2084 ctrl = devm_spi_alloc_host(&pdev->dev, sizeof(struct stm32_spi)); 2085 if (!ctrl) { 2086 dev_err(&pdev->dev, "spi controller allocation failed\n"); 2087 return -ENOMEM; 2088 } 2089 platform_set_drvdata(pdev, ctrl); 2090 2091 spi = spi_controller_get_devdata(ctrl); 2092 spi->dev = &pdev->dev; 2093 spi->ctrl = ctrl; 2094 spi->device_mode = device_mode; 2095 spin_lock_init(&spi->lock); 2096 2097 spi->cfg = cfg; 2098 2099 spi->base = devm_platform_get_and_ioremap_resource(pdev, 0, &res); 2100 if (IS_ERR(spi->base)) 2101 return PTR_ERR(spi->base); 2102 2103 spi->phys_addr = (dma_addr_t)res->start; 2104 2105 spi->irq = platform_get_irq(pdev, 0); 2106 if (spi->irq <= 0) 2107 return spi->irq; 2108 2109 ret = devm_request_threaded_irq(&pdev->dev, spi->irq, 2110 spi->cfg->irq_handler_event, 2111 spi->cfg->irq_handler_thread, 2112 IRQF_ONESHOT, pdev->name, ctrl); 2113 if (ret) { 2114 dev_err(&pdev->dev, "irq%d request failed: %d\n", spi->irq, 2115 ret); 2116 return ret; 2117 } 2118 2119 spi->clk = devm_clk_get(&pdev->dev, NULL); 2120 if (IS_ERR(spi->clk)) { 2121 ret = PTR_ERR(spi->clk); 2122 dev_err(&pdev->dev, "clk get failed: %d\n", ret); 2123 return ret; 2124 } 2125 2126 ret = clk_prepare_enable(spi->clk); 2127 if (ret) { 2128 dev_err(&pdev->dev, "clk enable failed: %d\n", ret); 2129 return ret; 2130 } 2131 spi->clk_rate = clk_get_rate(spi->clk); 2132 if (!spi->clk_rate) { 2133 dev_err(&pdev->dev, "clk rate = 0\n"); 2134 ret = -EINVAL; 2135 goto err_clk_disable; 2136 } 2137 2138 rst = devm_reset_control_get_optional_exclusive(&pdev->dev, NULL); 2139 if (rst) { 2140 if (IS_ERR(rst)) { 2141 ret = dev_err_probe(&pdev->dev, PTR_ERR(rst), 2142 "failed to get reset\n"); 2143 goto err_clk_disable; 2144 } 2145 2146 reset_control_assert(rst); 2147 udelay(2); 2148 reset_control_deassert(rst); 2149 } 2150 2151 if (spi->cfg->has_fifo) 2152 spi->fifo_size = spi->cfg->get_fifo_size(spi); 2153 2154 spi->feature_set = STM32_SPI_FEATURE_FULL; 2155 if (spi->cfg->regs->fullcfg.reg) { 2156 spi->feature_set = 2157 FIELD_GET(STM32MP25_SPI_HWCFGR1_FULLCFG, 2158 readl_relaxed(spi->base + spi->cfg->regs->fullcfg.reg)); 2159 2160 dev_dbg(spi->dev, "%s feature set\n", 2161 spi->feature_set == STM32_SPI_FEATURE_FULL ? "full" : "limited"); 2162 } 2163 2164 /* Only for STM32H7 and after */ 2165 spi->t_size_max = spi->feature_set == STM32_SPI_FEATURE_FULL ? 2166 STM32H7_SPI_TSIZE_MAX : 2167 STM32MP25_SPI_TSIZE_MAX_LIMITED; 2168 dev_dbg(spi->dev, "one message max size %d\n", spi->t_size_max); 2169 2170 ret = spi->cfg->config(spi); 2171 if (ret) { 2172 dev_err(&pdev->dev, "controller configuration failed: %d\n", 2173 ret); 2174 goto err_clk_disable; 2175 } 2176 2177 ctrl->dev.of_node = pdev->dev.of_node; 2178 ctrl->auto_runtime_pm = true; 2179 ctrl->bus_num = pdev->id; 2180 ctrl->mode_bits = SPI_CPHA | SPI_CPOL | SPI_CS_HIGH | SPI_LSB_FIRST | 2181 SPI_3WIRE; 2182 ctrl->bits_per_word_mask = spi->cfg->get_bpw_mask(spi); 2183 ctrl->max_speed_hz = spi->clk_rate / spi->cfg->baud_rate_div_min; 2184 ctrl->min_speed_hz = spi->clk_rate / spi->cfg->baud_rate_div_max; 2185 ctrl->use_gpio_descriptors = true; 2186 ctrl->optimize_message = stm32_spi_optimize_message; 2187 ctrl->prepare_message = stm32_spi_prepare_msg; 2188 ctrl->transfer_one = stm32_spi_transfer_one; 2189 ctrl->unprepare_message = stm32_spi_unprepare_msg; 2190 ctrl->flags = spi->cfg->flags; 2191 if (STM32_SPI_DEVICE_MODE(spi)) 2192 ctrl->target_abort = stm32h7_spi_device_abort; 2193 2194 spi->dma_tx = dma_request_chan(spi->dev, "tx"); 2195 if (IS_ERR(spi->dma_tx)) { 2196 ret = PTR_ERR(spi->dma_tx); 2197 spi->dma_tx = NULL; 2198 if (ret == -EPROBE_DEFER) 2199 goto err_clk_disable; 2200 2201 dev_warn(&pdev->dev, "failed to request tx dma channel\n"); 2202 } else { 2203 ctrl->dma_tx = spi->dma_tx; 2204 } 2205 2206 spi->dma_rx = dma_request_chan(spi->dev, "rx"); 2207 if (IS_ERR(spi->dma_rx)) { 2208 ret = PTR_ERR(spi->dma_rx); 2209 spi->dma_rx = NULL; 2210 if (ret == -EPROBE_DEFER) 2211 goto err_dma_release; 2212 2213 dev_warn(&pdev->dev, "failed to request rx dma channel\n"); 2214 } else { 2215 ctrl->dma_rx = spi->dma_rx; 2216 } 2217 2218 if (spi->dma_tx || spi->dma_rx) 2219 ctrl->can_dma = stm32_spi_can_dma; 2220 2221 pm_runtime_set_autosuspend_delay(&pdev->dev, 2222 STM32_SPI_AUTOSUSPEND_DELAY); 2223 pm_runtime_use_autosuspend(&pdev->dev); 2224 pm_runtime_set_active(&pdev->dev); 2225 pm_runtime_get_noresume(&pdev->dev); 2226 pm_runtime_enable(&pdev->dev); 2227 2228 ret = spi_register_controller(ctrl); 2229 if (ret) { 2230 dev_err(&pdev->dev, "spi controller registration failed: %d\n", 2231 ret); 2232 goto err_pm_disable; 2233 } 2234 2235 pm_runtime_mark_last_busy(&pdev->dev); 2236 pm_runtime_put_autosuspend(&pdev->dev); 2237 2238 dev_info(&pdev->dev, "driver initialized (%s mode)\n", 2239 STM32_SPI_HOST_MODE(spi) ? "host" : "device"); 2240 2241 return 0; 2242 2243 err_pm_disable: 2244 pm_runtime_disable(&pdev->dev); 2245 pm_runtime_put_noidle(&pdev->dev); 2246 pm_runtime_set_suspended(&pdev->dev); 2247 pm_runtime_dont_use_autosuspend(&pdev->dev); 2248 err_dma_release: 2249 if (spi->dma_tx) 2250 dma_release_channel(spi->dma_tx); 2251 if (spi->dma_rx) 2252 dma_release_channel(spi->dma_rx); 2253 err_clk_disable: 2254 clk_disable_unprepare(spi->clk); 2255 2256 return ret; 2257 } 2258 2259 static void stm32_spi_remove(struct platform_device *pdev) 2260 { 2261 struct spi_controller *ctrl = platform_get_drvdata(pdev); 2262 struct stm32_spi *spi = spi_controller_get_devdata(ctrl); 2263 2264 pm_runtime_get_sync(&pdev->dev); 2265 2266 spi_unregister_controller(ctrl); 2267 spi->cfg->disable(spi); 2268 2269 pm_runtime_disable(&pdev->dev); 2270 pm_runtime_put_noidle(&pdev->dev); 2271 pm_runtime_set_suspended(&pdev->dev); 2272 pm_runtime_dont_use_autosuspend(&pdev->dev); 2273 2274 if (ctrl->dma_tx) 2275 dma_release_channel(ctrl->dma_tx); 2276 if (ctrl->dma_rx) 2277 dma_release_channel(ctrl->dma_rx); 2278 2279 clk_disable_unprepare(spi->clk); 2280 2281 2282 pinctrl_pm_select_sleep_state(&pdev->dev); 2283 } 2284 2285 static int __maybe_unused stm32_spi_runtime_suspend(struct device *dev) 2286 { 2287 struct spi_controller *ctrl = dev_get_drvdata(dev); 2288 struct stm32_spi *spi = spi_controller_get_devdata(ctrl); 2289 2290 clk_disable_unprepare(spi->clk); 2291 2292 return pinctrl_pm_select_sleep_state(dev); 2293 } 2294 2295 static int __maybe_unused stm32_spi_runtime_resume(struct device *dev) 2296 { 2297 struct spi_controller *ctrl = dev_get_drvdata(dev); 2298 struct stm32_spi *spi = spi_controller_get_devdata(ctrl); 2299 int ret; 2300 2301 ret = pinctrl_pm_select_default_state(dev); 2302 if (ret) 2303 return ret; 2304 2305 return clk_prepare_enable(spi->clk); 2306 } 2307 2308 static int __maybe_unused stm32_spi_suspend(struct device *dev) 2309 { 2310 struct spi_controller *ctrl = dev_get_drvdata(dev); 2311 int ret; 2312 2313 ret = spi_controller_suspend(ctrl); 2314 if (ret) 2315 return ret; 2316 2317 return pm_runtime_force_suspend(dev); 2318 } 2319 2320 static int __maybe_unused stm32_spi_resume(struct device *dev) 2321 { 2322 struct spi_controller *ctrl = dev_get_drvdata(dev); 2323 struct stm32_spi *spi = spi_controller_get_devdata(ctrl); 2324 int ret; 2325 2326 ret = pm_runtime_force_resume(dev); 2327 if (ret) 2328 return ret; 2329 2330 ret = spi_controller_resume(ctrl); 2331 if (ret) { 2332 clk_disable_unprepare(spi->clk); 2333 return ret; 2334 } 2335 2336 ret = pm_runtime_resume_and_get(dev); 2337 if (ret < 0) { 2338 dev_err(dev, "Unable to power device:%d\n", ret); 2339 return ret; 2340 } 2341 2342 spi->cfg->config(spi); 2343 2344 pm_runtime_mark_last_busy(dev); 2345 pm_runtime_put_autosuspend(dev); 2346 2347 return 0; 2348 } 2349 2350 static const struct dev_pm_ops stm32_spi_pm_ops = { 2351 SET_SYSTEM_SLEEP_PM_OPS(stm32_spi_suspend, stm32_spi_resume) 2352 SET_RUNTIME_PM_OPS(stm32_spi_runtime_suspend, 2353 stm32_spi_runtime_resume, NULL) 2354 }; 2355 2356 static struct platform_driver stm32_spi_driver = { 2357 .probe = stm32_spi_probe, 2358 .remove_new = stm32_spi_remove, 2359 .driver = { 2360 .name = DRIVER_NAME, 2361 .pm = &stm32_spi_pm_ops, 2362 .of_match_table = stm32_spi_of_match, 2363 }, 2364 }; 2365 2366 module_platform_driver(stm32_spi_driver); 2367 2368 MODULE_ALIAS("platform:" DRIVER_NAME); 2369 MODULE_DESCRIPTION("STMicroelectronics STM32 SPI Controller driver"); 2370 MODULE_AUTHOR("Amelie Delaunay <amelie.delaunay@st.com>"); 2371 MODULE_LICENSE("GPL v2"); 2372