xref: /linux/drivers/spi/spi-amd.c (revision 7f71507851fc7764b36a3221839607d3a45c2025)
1 // SPDX-License-Identifier: GPL-2.0 OR BSD-3-Clause
2 //
3 // AMD SPI controller driver
4 //
5 // Copyright (c) 2020, Advanced Micro Devices, Inc.
6 //
7 // Author: Sanjay R Mehta <sanju.mehta@amd.com>
8 
9 #include <linux/acpi.h>
10 #include <linux/delay.h>
11 #include <linux/dma-mapping.h>
12 #include <linux/init.h>
13 #include <linux/io-64-nonatomic-lo-hi.h>
14 #include <linux/iopoll.h>
15 #include <linux/module.h>
16 #include <linux/platform_device.h>
17 #include <linux/spi/spi.h>
18 #include <linux/spi/spi-mem.h>
19 
20 #define AMD_SPI_CTRL0_REG	0x00
21 #define AMD_SPI_EXEC_CMD	BIT(16)
22 #define AMD_SPI_FIFO_CLEAR	BIT(20)
23 #define AMD_SPI_BUSY		BIT(31)
24 
25 #define AMD_SPI_OPCODE_REG	0x45
26 #define AMD_SPI_CMD_TRIGGER_REG	0x47
27 #define AMD_SPI_TRIGGER_CMD	BIT(7)
28 
29 #define AMD_SPI_OPCODE_MASK	0xFF
30 
31 #define AMD_SPI_ALT_CS_REG	0x1D
32 #define AMD_SPI_ALT_CS_MASK	0x3
33 
34 #define AMD_SPI_FIFO_BASE	0x80
35 #define AMD_SPI_TX_COUNT_REG	0x48
36 #define AMD_SPI_RX_COUNT_REG	0x4B
37 #define AMD_SPI_STATUS_REG	0x4C
38 #define AMD_SPI_ADDR32CTRL_REG	0x50
39 
40 #define AMD_SPI_FIFO_SIZE	70
41 #define AMD_SPI_MEM_SIZE	200
42 #define AMD_SPI_MAX_DATA	64
43 #define AMD_SPI_HID2_DMA_SIZE   4096
44 
45 #define AMD_SPI_ENA_REG		0x20
46 #define AMD_SPI_ALT_SPD_SHIFT	20
47 #define AMD_SPI_ALT_SPD_MASK	GENMASK(23, AMD_SPI_ALT_SPD_SHIFT)
48 #define AMD_SPI_SPI100_SHIFT	0
49 #define AMD_SPI_SPI100_MASK	GENMASK(AMD_SPI_SPI100_SHIFT, AMD_SPI_SPI100_SHIFT)
50 #define AMD_SPI_SPEED_REG	0x6C
51 #define AMD_SPI_SPD7_SHIFT	8
52 #define AMD_SPI_SPD7_MASK	GENMASK(13, AMD_SPI_SPD7_SHIFT)
53 
54 #define AMD_SPI_HID2_INPUT_RING_BUF0	0X100
55 #define AMD_SPI_HID2_CNTRL		0x150
56 #define AMD_SPI_HID2_INT_STATUS		0x154
57 #define AMD_SPI_HID2_CMD_START		0x156
58 #define AMD_SPI_HID2_INT_MASK		0x158
59 #define AMD_SPI_HID2_READ_CNTRL0	0x170
60 #define AMD_SPI_HID2_READ_CNTRL1	0x174
61 #define AMD_SPI_HID2_READ_CNTRL2	0x180
62 
63 #define AMD_SPI_MAX_HZ		100000000
64 #define AMD_SPI_MIN_HZ		800000
65 
66 #define AMD_SPI_IO_SLEEP_US	20
67 #define AMD_SPI_IO_TIMEOUT_US	2000000
68 
69 /* SPI read command opcodes */
70 #define AMD_SPI_OP_READ          0x03	/* Read data bytes (low frequency) */
71 #define AMD_SPI_OP_READ_FAST     0x0b	/* Read data bytes (high frequency) */
72 #define AMD_SPI_OP_READ_1_1_2    0x3b	/* Read data bytes (Dual Output SPI) */
73 #define AMD_SPI_OP_READ_1_2_2    0xbb	/* Read data bytes (Dual I/O SPI) */
74 #define AMD_SPI_OP_READ_1_1_4    0x6b	/* Read data bytes (Quad Output SPI) */
75 #define AMD_SPI_OP_READ_1_4_4    0xeb	/* Read data bytes (Quad I/O SPI) */
76 
77 /* SPI read command opcodes - 4B address */
78 #define AMD_SPI_OP_READ_FAST_4B		0x0c    /* Read data bytes (high frequency) */
79 #define AMD_SPI_OP_READ_1_1_2_4B	0x3c    /* Read data bytes (Dual Output SPI) */
80 #define AMD_SPI_OP_READ_1_2_2_4B	0xbc    /* Read data bytes (Dual I/O SPI) */
81 #define AMD_SPI_OP_READ_1_1_4_4B	0x6c    /* Read data bytes (Quad Output SPI) */
82 #define AMD_SPI_OP_READ_1_4_4_4B	0xec    /* Read data bytes (Quad I/O SPI) */
83 
84 /**
85  * enum amd_spi_versions - SPI controller versions
86  * @AMD_SPI_V1:		AMDI0061 hardware version
87  * @AMD_SPI_V2:		AMDI0062 hardware version
88  * @AMD_HID2_SPI:	AMDI0063 hardware version
89  */
90 enum amd_spi_versions {
91 	AMD_SPI_V1 = 1,
92 	AMD_SPI_V2,
93 	AMD_HID2_SPI,
94 };
95 
96 enum amd_spi_speed {
97 	F_66_66MHz,
98 	F_33_33MHz,
99 	F_22_22MHz,
100 	F_16_66MHz,
101 	F_100MHz,
102 	F_800KHz,
103 	SPI_SPD7 = 0x7,
104 	F_50MHz = 0x4,
105 	F_4MHz = 0x32,
106 	F_3_17MHz = 0x3F
107 };
108 
109 /**
110  * struct amd_spi_freq - Matches device speed with values to write in regs
111  * @speed_hz: Device frequency
112  * @enable_val: Value to be written to "enable register"
113  * @spd7_val: Some frequencies requires to have a value written at SPISPEED register
114  */
115 struct amd_spi_freq {
116 	u32 speed_hz;
117 	u32 enable_val;
118 	u32 spd7_val;
119 };
120 
121 /**
122  * struct amd_spi - SPI driver instance
123  * @io_remap_addr:	Start address of the SPI controller registers
124  * @phy_dma_buf:	Physical address of DMA buffer
125  * @dma_virt_addr:	Virtual address of DMA buffer
126  * @version:		SPI controller hardware version
127  * @speed_hz:		Device frequency
128  */
129 struct amd_spi {
130 	void __iomem *io_remap_addr;
131 	dma_addr_t phy_dma_buf;
132 	void *dma_virt_addr;
133 	enum amd_spi_versions version;
134 	unsigned int speed_hz;
135 };
136 
137 static inline u8 amd_spi_readreg8(struct amd_spi *amd_spi, int idx)
138 {
139 	return readb((u8 __iomem *)amd_spi->io_remap_addr + idx);
140 }
141 
142 static inline void amd_spi_writereg8(struct amd_spi *amd_spi, int idx, u8 val)
143 {
144 	writeb(val, ((u8 __iomem *)amd_spi->io_remap_addr + idx));
145 }
146 
147 static void amd_spi_setclear_reg8(struct amd_spi *amd_spi, int idx, u8 set, u8 clear)
148 {
149 	u8 tmp = amd_spi_readreg8(amd_spi, idx);
150 
151 	tmp = (tmp & ~clear) | set;
152 	amd_spi_writereg8(amd_spi, idx, tmp);
153 }
154 
155 static inline u16 amd_spi_readreg16(struct amd_spi *amd_spi, int idx)
156 {
157 	return readw((u8 __iomem *)amd_spi->io_remap_addr + idx);
158 }
159 
160 static inline void amd_spi_writereg16(struct amd_spi *amd_spi, int idx, u16 val)
161 {
162 	writew(val, ((u8 __iomem *)amd_spi->io_remap_addr + idx));
163 }
164 
165 static inline u32 amd_spi_readreg32(struct amd_spi *amd_spi, int idx)
166 {
167 	return readl((u8 __iomem *)amd_spi->io_remap_addr + idx);
168 }
169 
170 static inline void amd_spi_writereg32(struct amd_spi *amd_spi, int idx, u32 val)
171 {
172 	writel(val, ((u8 __iomem *)amd_spi->io_remap_addr + idx));
173 }
174 
175 static inline u64 amd_spi_readreg64(struct amd_spi *amd_spi, int idx)
176 {
177 	return readq((u8 __iomem *)amd_spi->io_remap_addr + idx);
178 }
179 
180 static inline void amd_spi_writereg64(struct amd_spi *amd_spi, int idx, u64 val)
181 {
182 	writeq(val, ((u8 __iomem *)amd_spi->io_remap_addr + idx));
183 }
184 
185 static inline void amd_spi_setclear_reg32(struct amd_spi *amd_spi, int idx, u32 set, u32 clear)
186 {
187 	u32 tmp = amd_spi_readreg32(amd_spi, idx);
188 
189 	tmp = (tmp & ~clear) | set;
190 	amd_spi_writereg32(amd_spi, idx, tmp);
191 }
192 
193 static void amd_spi_select_chip(struct amd_spi *amd_spi, u8 cs)
194 {
195 	amd_spi_setclear_reg8(amd_spi, AMD_SPI_ALT_CS_REG, cs, AMD_SPI_ALT_CS_MASK);
196 }
197 
198 static inline void amd_spi_clear_chip(struct amd_spi *amd_spi, u8 chip_select)
199 {
200 	amd_spi_writereg8(amd_spi, AMD_SPI_ALT_CS_REG, chip_select & ~AMD_SPI_ALT_CS_MASK);
201 }
202 
203 static void amd_spi_clear_fifo_ptr(struct amd_spi *amd_spi)
204 {
205 	amd_spi_setclear_reg32(amd_spi, AMD_SPI_CTRL0_REG, AMD_SPI_FIFO_CLEAR, AMD_SPI_FIFO_CLEAR);
206 }
207 
208 static int amd_spi_set_opcode(struct amd_spi *amd_spi, u8 cmd_opcode)
209 {
210 	switch (amd_spi->version) {
211 	case AMD_SPI_V1:
212 		amd_spi_setclear_reg32(amd_spi, AMD_SPI_CTRL0_REG, cmd_opcode,
213 				       AMD_SPI_OPCODE_MASK);
214 		return 0;
215 	case AMD_SPI_V2:
216 	case AMD_HID2_SPI:
217 		amd_spi_writereg8(amd_spi, AMD_SPI_OPCODE_REG, cmd_opcode);
218 		return 0;
219 	default:
220 		return -ENODEV;
221 	}
222 }
223 
224 static inline void amd_spi_set_rx_count(struct amd_spi *amd_spi, u8 rx_count)
225 {
226 	amd_spi_writereg8(amd_spi, AMD_SPI_RX_COUNT_REG, rx_count);
227 }
228 
229 static inline void amd_spi_set_tx_count(struct amd_spi *amd_spi, u8 tx_count)
230 {
231 	amd_spi_writereg8(amd_spi, AMD_SPI_TX_COUNT_REG, tx_count);
232 }
233 
234 static int amd_spi_busy_wait(struct amd_spi *amd_spi)
235 {
236 	u32 val;
237 	int reg;
238 
239 	switch (amd_spi->version) {
240 	case AMD_SPI_V1:
241 		reg = AMD_SPI_CTRL0_REG;
242 		break;
243 	case AMD_SPI_V2:
244 	case AMD_HID2_SPI:
245 		reg = AMD_SPI_STATUS_REG;
246 		break;
247 	default:
248 		return -ENODEV;
249 	}
250 
251 	return readl_poll_timeout(amd_spi->io_remap_addr + reg, val,
252 				  !(val & AMD_SPI_BUSY), 20, 2000000);
253 }
254 
255 static int amd_spi_execute_opcode(struct amd_spi *amd_spi)
256 {
257 	int ret;
258 
259 	ret = amd_spi_busy_wait(amd_spi);
260 	if (ret)
261 		return ret;
262 
263 	switch (amd_spi->version) {
264 	case AMD_SPI_V1:
265 		/* Set ExecuteOpCode bit in the CTRL0 register */
266 		amd_spi_setclear_reg32(amd_spi, AMD_SPI_CTRL0_REG, AMD_SPI_EXEC_CMD,
267 				       AMD_SPI_EXEC_CMD);
268 		return 0;
269 	case AMD_SPI_V2:
270 	case AMD_HID2_SPI:
271 		/* Trigger the command execution */
272 		amd_spi_setclear_reg8(amd_spi, AMD_SPI_CMD_TRIGGER_REG,
273 				      AMD_SPI_TRIGGER_CMD, AMD_SPI_TRIGGER_CMD);
274 		return 0;
275 	default:
276 		return -ENODEV;
277 	}
278 }
279 
280 static int amd_spi_host_setup(struct spi_device *spi)
281 {
282 	struct amd_spi *amd_spi = spi_controller_get_devdata(spi->controller);
283 
284 	amd_spi_clear_fifo_ptr(amd_spi);
285 
286 	return 0;
287 }
288 
289 static const struct amd_spi_freq amd_spi_freq[] = {
290 	{ AMD_SPI_MAX_HZ,   F_100MHz,         0},
291 	{       66660000, F_66_66MHz,         0},
292 	{       50000000,   SPI_SPD7,   F_50MHz},
293 	{       33330000, F_33_33MHz,         0},
294 	{       22220000, F_22_22MHz,         0},
295 	{       16660000, F_16_66MHz,         0},
296 	{        4000000,   SPI_SPD7,    F_4MHz},
297 	{        3170000,   SPI_SPD7, F_3_17MHz},
298 	{ AMD_SPI_MIN_HZ,   F_800KHz,         0},
299 };
300 
301 static int amd_set_spi_freq(struct amd_spi *amd_spi, u32 speed_hz)
302 {
303 	unsigned int i, spd7_val, alt_spd;
304 
305 	if (speed_hz < AMD_SPI_MIN_HZ)
306 		return -EINVAL;
307 
308 	for (i = 0; i < ARRAY_SIZE(amd_spi_freq); i++)
309 		if (speed_hz >= amd_spi_freq[i].speed_hz)
310 			break;
311 
312 	if (amd_spi->speed_hz == amd_spi_freq[i].speed_hz)
313 		return 0;
314 
315 	amd_spi->speed_hz = amd_spi_freq[i].speed_hz;
316 
317 	alt_spd = (amd_spi_freq[i].enable_val << AMD_SPI_ALT_SPD_SHIFT)
318 		   & AMD_SPI_ALT_SPD_MASK;
319 	amd_spi_setclear_reg32(amd_spi, AMD_SPI_ENA_REG, alt_spd,
320 			       AMD_SPI_ALT_SPD_MASK);
321 
322 	if (amd_spi->speed_hz == AMD_SPI_MAX_HZ)
323 		amd_spi_setclear_reg32(amd_spi, AMD_SPI_ENA_REG, 1,
324 				       AMD_SPI_SPI100_MASK);
325 
326 	if (amd_spi_freq[i].spd7_val) {
327 		spd7_val = (amd_spi_freq[i].spd7_val << AMD_SPI_SPD7_SHIFT)
328 			    & AMD_SPI_SPD7_MASK;
329 		amd_spi_setclear_reg32(amd_spi, AMD_SPI_SPEED_REG, spd7_val,
330 				       AMD_SPI_SPD7_MASK);
331 	}
332 
333 	return 0;
334 }
335 
336 static inline int amd_spi_fifo_xfer(struct amd_spi *amd_spi,
337 				    struct spi_controller *host,
338 				    struct spi_message *message)
339 {
340 	struct spi_transfer *xfer = NULL;
341 	struct spi_device *spi = message->spi;
342 	u8 cmd_opcode = 0, fifo_pos = AMD_SPI_FIFO_BASE;
343 	u8 *buf = NULL;
344 	u32 i = 0;
345 	u32 tx_len = 0, rx_len = 0;
346 
347 	list_for_each_entry(xfer, &message->transfers,
348 			    transfer_list) {
349 		if (xfer->speed_hz)
350 			amd_set_spi_freq(amd_spi, xfer->speed_hz);
351 		else
352 			amd_set_spi_freq(amd_spi, spi->max_speed_hz);
353 
354 		if (xfer->tx_buf) {
355 			buf = (u8 *)xfer->tx_buf;
356 			if (!tx_len) {
357 				cmd_opcode = *(u8 *)xfer->tx_buf;
358 				buf++;
359 				xfer->len--;
360 			}
361 			tx_len += xfer->len;
362 
363 			/* Write data into the FIFO. */
364 			for (i = 0; i < xfer->len; i++)
365 				amd_spi_writereg8(amd_spi, fifo_pos + i, buf[i]);
366 
367 			fifo_pos += xfer->len;
368 		}
369 
370 		/* Store no. of bytes to be received from FIFO */
371 		if (xfer->rx_buf)
372 			rx_len += xfer->len;
373 	}
374 
375 	if (!buf) {
376 		message->status = -EINVAL;
377 		goto fin_msg;
378 	}
379 
380 	amd_spi_set_opcode(amd_spi, cmd_opcode);
381 	amd_spi_set_tx_count(amd_spi, tx_len);
382 	amd_spi_set_rx_count(amd_spi, rx_len);
383 
384 	/* Execute command */
385 	message->status = amd_spi_execute_opcode(amd_spi);
386 	if (message->status)
387 		goto fin_msg;
388 
389 	if (rx_len) {
390 		message->status = amd_spi_busy_wait(amd_spi);
391 		if (message->status)
392 			goto fin_msg;
393 
394 		list_for_each_entry(xfer, &message->transfers, transfer_list)
395 			if (xfer->rx_buf) {
396 				buf = (u8 *)xfer->rx_buf;
397 				/* Read data from FIFO to receive buffer */
398 				for (i = 0; i < xfer->len; i++)
399 					buf[i] = amd_spi_readreg8(amd_spi, fifo_pos + i);
400 				fifo_pos += xfer->len;
401 			}
402 	}
403 
404 	/* Update statistics */
405 	message->actual_length = tx_len + rx_len + 1;
406 
407 fin_msg:
408 	switch (amd_spi->version) {
409 	case AMD_SPI_V1:
410 		break;
411 	case AMD_SPI_V2:
412 	case AMD_HID2_SPI:
413 		amd_spi_clear_chip(amd_spi, spi_get_chipselect(message->spi, 0));
414 		break;
415 	default:
416 		return -ENODEV;
417 	}
418 
419 	spi_finalize_current_message(host);
420 
421 	return message->status;
422 }
423 
424 static inline bool amd_is_spi_read_cmd_4b(const u16 op)
425 {
426 	switch (op) {
427 	case AMD_SPI_OP_READ_FAST_4B:
428 	case AMD_SPI_OP_READ_1_1_2_4B:
429 	case AMD_SPI_OP_READ_1_2_2_4B:
430 	case AMD_SPI_OP_READ_1_1_4_4B:
431 	case AMD_SPI_OP_READ_1_4_4_4B:
432 		return true;
433 	default:
434 		return false;
435 	}
436 }
437 
438 static inline bool amd_is_spi_read_cmd(const u16 op)
439 {
440 	switch (op) {
441 	case AMD_SPI_OP_READ:
442 	case AMD_SPI_OP_READ_FAST:
443 	case AMD_SPI_OP_READ_1_1_2:
444 	case AMD_SPI_OP_READ_1_2_2:
445 	case AMD_SPI_OP_READ_1_1_4:
446 	case AMD_SPI_OP_READ_1_4_4:
447 		return true;
448 	default:
449 		return amd_is_spi_read_cmd_4b(op);
450 	}
451 }
452 
453 static bool amd_spi_supports_op(struct spi_mem *mem,
454 				const struct spi_mem_op *op)
455 {
456 	struct amd_spi *amd_spi = spi_controller_get_devdata(mem->spi->controller);
457 
458 	/* bus width is number of IO lines used to transmit */
459 	if (op->cmd.buswidth > 1 || op->addr.buswidth > 4)
460 		return false;
461 
462 	/* AMD SPI controllers support quad mode only for read operations */
463 	if (amd_is_spi_read_cmd(op->cmd.opcode)) {
464 		if (op->data.buswidth > 4)
465 			return false;
466 
467 		/*
468 		 * HID2 SPI controller supports DMA read up to 4K bytes and
469 		 * doesn't support 4-byte address commands.
470 		 */
471 		if (amd_spi->version == AMD_HID2_SPI) {
472 			if (amd_is_spi_read_cmd_4b(op->cmd.opcode) ||
473 			    op->data.nbytes > AMD_SPI_HID2_DMA_SIZE)
474 				return false;
475 		} else if (op->data.nbytes > AMD_SPI_MAX_DATA) {
476 			return false;
477 		}
478 	} else if (op->data.buswidth > 1 || op->data.nbytes > AMD_SPI_MAX_DATA) {
479 		return false;
480 	}
481 
482 	return spi_mem_default_supports_op(mem, op);
483 }
484 
485 static int amd_spi_adjust_op_size(struct spi_mem *mem, struct spi_mem_op *op)
486 {
487 	struct amd_spi *amd_spi = spi_controller_get_devdata(mem->spi->controller);
488 
489 	/*
490 	 * HID2 SPI controller DMA read mode supports reading up to 4k
491 	 * bytes in single transaction, where as SPI0 and HID2 SPI
492 	 * controller index mode supports maximum of 64 bytes in a single
493 	 * transaction.
494 	 */
495 	if (amd_spi->version == AMD_HID2_SPI && amd_is_spi_read_cmd(op->cmd.opcode))
496 		op->data.nbytes = clamp_val(op->data.nbytes, 0, AMD_SPI_HID2_DMA_SIZE);
497 	else
498 		op->data.nbytes = clamp_val(op->data.nbytes, 0, AMD_SPI_MAX_DATA);
499 
500 	return 0;
501 }
502 
503 static void amd_spi_set_addr(struct amd_spi *amd_spi,
504 			     const struct spi_mem_op *op)
505 {
506 	u8 nbytes = op->addr.nbytes;
507 	u64 addr_val = op->addr.val;
508 	int base_addr, i;
509 
510 	base_addr = AMD_SPI_FIFO_BASE + nbytes;
511 
512 	for (i = 0; i < nbytes; i++) {
513 		amd_spi_writereg8(amd_spi, base_addr - i - 1, addr_val &
514 				  GENMASK(7, 0));
515 		addr_val >>= 8;
516 	}
517 }
518 
519 static void amd_spi_mem_data_out(struct amd_spi *amd_spi,
520 				 const struct spi_mem_op *op)
521 {
522 	int base_addr = AMD_SPI_FIFO_BASE + op->addr.nbytes;
523 	u64 *buf_64 = (u64 *)op->data.buf.out;
524 	u32 nbytes = op->data.nbytes;
525 	u32 left_data = nbytes;
526 	u8 *buf;
527 	int i;
528 
529 	amd_spi_set_opcode(amd_spi, op->cmd.opcode);
530 	amd_spi_set_addr(amd_spi, op);
531 
532 	for (i = 0; left_data >= 8; i++, left_data -= 8)
533 		amd_spi_writereg64(amd_spi, base_addr + op->dummy.nbytes + (i * 8), *buf_64++);
534 
535 	buf = (u8 *)buf_64;
536 	for (i = 0; i < left_data; i++) {
537 		amd_spi_writereg8(amd_spi, base_addr + op->dummy.nbytes + nbytes + i - left_data,
538 				  buf[i]);
539 	}
540 
541 	amd_spi_set_tx_count(amd_spi, op->addr.nbytes + op->data.nbytes);
542 	amd_spi_set_rx_count(amd_spi, 0);
543 	amd_spi_clear_fifo_ptr(amd_spi);
544 	amd_spi_execute_opcode(amd_spi);
545 }
546 
547 static void amd_spi_hiddma_read(struct amd_spi *amd_spi, const struct spi_mem_op *op)
548 {
549 	u16 hid_cmd_start, val;
550 	u32 hid_regval;
551 
552 	/* Set the opcode in hid2_read_control0 register */
553 	hid_regval = amd_spi_readreg32(amd_spi, AMD_SPI_HID2_READ_CNTRL0);
554 	hid_regval = (hid_regval & ~GENMASK(7, 0)) | op->cmd.opcode;
555 
556 	/*
557 	 * Program the address in the hid2_read_control0 register [8:31]. The address should
558 	 * be written starting from the 8th bit of the register, requiring an 8-bit shift.
559 	 * Additionally, to convert a 2-byte spinand address to a 3-byte address, another
560 	 * 8-bit shift is needed. Therefore, a total shift of 16 bits is required.
561 	 */
562 	hid_regval = (hid_regval & ~GENMASK(31, 8)) | (op->addr.val << 16);
563 	amd_spi_writereg32(amd_spi, AMD_SPI_HID2_READ_CNTRL0, hid_regval);
564 
565 	/* Configure dummy clock cycles for fast read, dual, quad I/O commands */
566 	hid_regval = amd_spi_readreg32(amd_spi, AMD_SPI_HID2_READ_CNTRL2);
567 	/* Fast read dummy cycle */
568 	hid_regval &= ~GENMASK(4, 0);
569 
570 	/* Fast read Dual I/O dummy cycle */
571 	hid_regval &= ~GENMASK(12, 8);
572 
573 	/* Fast read Quad I/O dummy cycle */
574 	hid_regval = (hid_regval & ~GENMASK(20, 16)) | BIT(17);
575 
576 	/* Set no of preamble bytecount */
577 	hid_regval &= ~GENMASK(27, 24);
578 	amd_spi_writereg32(amd_spi, AMD_SPI_HID2_READ_CNTRL2, hid_regval);
579 
580 	/*
581 	 * Program the HID2 Input Ring Buffer0. 4k aligned buf_memory_addr[31:12],
582 	 * buf_size[4:0], end_input_ring[5].
583 	 */
584 	hid_regval = amd_spi->phy_dma_buf | BIT(5) | BIT(0);
585 	amd_spi_writereg32(amd_spi, AMD_SPI_HID2_INPUT_RING_BUF0, hid_regval);
586 
587 	/* Program max read length(no of DWs) in hid2_read_control1 register */
588 	hid_regval = amd_spi_readreg32(amd_spi, AMD_SPI_HID2_READ_CNTRL1);
589 	hid_regval = (hid_regval & ~GENMASK(15, 0)) | ((op->data.nbytes / 4) - 1);
590 	amd_spi_writereg32(amd_spi, AMD_SPI_HID2_READ_CNTRL1, hid_regval);
591 
592 	/* Set cmd start bit in hid2_cmd_start register to trigger HID basic read operation */
593 	hid_cmd_start = amd_spi_readreg16(amd_spi, AMD_SPI_HID2_CMD_START);
594 	amd_spi_writereg16(amd_spi, AMD_SPI_HID2_CMD_START, (hid_cmd_start | BIT(3)));
595 
596 	/* Check interrupt status of HIDDMA basic read operation in hid2_int_status register */
597 	readw_poll_timeout(amd_spi->io_remap_addr + AMD_SPI_HID2_INT_STATUS, val,
598 			   (val & BIT(3)), AMD_SPI_IO_SLEEP_US, AMD_SPI_IO_TIMEOUT_US);
599 
600 	/* Clear the interrupts by writing to hid2_int_status register */
601 	val = amd_spi_readreg16(amd_spi, AMD_SPI_HID2_INT_STATUS);
602 	amd_spi_writereg16(amd_spi, AMD_SPI_HID2_INT_STATUS, val);
603 }
604 
605 static void amd_spi_mem_data_in(struct amd_spi *amd_spi,
606 				const struct spi_mem_op *op)
607 {
608 	int base_addr = AMD_SPI_FIFO_BASE + op->addr.nbytes;
609 	u64 *buf_64 = (u64 *)op->data.buf.in;
610 	u32 nbytes = op->data.nbytes;
611 	u32 left_data = nbytes;
612 	u32 data;
613 	u8 *buf;
614 	int i;
615 
616 	/*
617 	 * Condition for using HID read mode. Only for reading complete page data, use HID read.
618 	 * Use index mode otherwise.
619 	 */
620 	if (amd_spi->version == AMD_HID2_SPI && amd_is_spi_read_cmd(op->cmd.opcode)) {
621 		amd_spi_hiddma_read(amd_spi, op);
622 
623 		for (i = 0; left_data >= 8; i++, left_data -= 8)
624 			*buf_64++ = readq((u8 __iomem *)amd_spi->dma_virt_addr + (i * 8));
625 
626 		buf = (u8 *)buf_64;
627 		for (i = 0; i < left_data; i++)
628 			buf[i] = readb((u8 __iomem *)amd_spi->dma_virt_addr +
629 				       (nbytes - left_data + i));
630 
631 		/* Reset HID RX memory logic */
632 		data = amd_spi_readreg32(amd_spi, AMD_SPI_HID2_CNTRL);
633 		amd_spi_writereg32(amd_spi, AMD_SPI_HID2_CNTRL, data | BIT(5));
634 	} else {
635 		/* Index mode */
636 		amd_spi_set_opcode(amd_spi, op->cmd.opcode);
637 		amd_spi_set_addr(amd_spi, op);
638 		amd_spi_set_tx_count(amd_spi, op->addr.nbytes + op->dummy.nbytes);
639 
640 		for (i = 0; i < op->dummy.nbytes; i++)
641 			amd_spi_writereg8(amd_spi, (base_addr + i), 0xff);
642 
643 		amd_spi_set_rx_count(amd_spi, op->data.nbytes);
644 		amd_spi_clear_fifo_ptr(amd_spi);
645 		amd_spi_execute_opcode(amd_spi);
646 		amd_spi_busy_wait(amd_spi);
647 
648 		for (i = 0; left_data >= 8; i++, left_data -= 8)
649 			*buf_64++ = amd_spi_readreg64(amd_spi, base_addr + op->dummy.nbytes +
650 						      (i * 8));
651 
652 		buf = (u8 *)buf_64;
653 		for (i = 0; i < left_data; i++)
654 			buf[i] = amd_spi_readreg8(amd_spi, base_addr + op->dummy.nbytes +
655 						  nbytes + i - left_data);
656 	}
657 
658 }
659 
660 static void amd_set_spi_addr_mode(struct amd_spi *amd_spi,
661 				  const struct spi_mem_op *op)
662 {
663 	u32 val = amd_spi_readreg32(amd_spi, AMD_SPI_ADDR32CTRL_REG);
664 
665 	if (amd_is_spi_read_cmd_4b(op->cmd.opcode))
666 		amd_spi_writereg32(amd_spi, AMD_SPI_ADDR32CTRL_REG, val | BIT(0));
667 	else
668 		amd_spi_writereg32(amd_spi, AMD_SPI_ADDR32CTRL_REG, val & ~BIT(0));
669 }
670 
671 static int amd_spi_exec_mem_op(struct spi_mem *mem,
672 			       const struct spi_mem_op *op)
673 {
674 	struct amd_spi *amd_spi;
675 	int ret;
676 
677 	amd_spi = spi_controller_get_devdata(mem->spi->controller);
678 
679 	ret = amd_set_spi_freq(amd_spi, mem->spi->max_speed_hz);
680 	if (ret)
681 		return ret;
682 
683 	if (amd_spi->version == AMD_SPI_V2)
684 		amd_set_spi_addr_mode(amd_spi, op);
685 
686 	switch (op->data.dir) {
687 	case SPI_MEM_DATA_IN:
688 		amd_spi_mem_data_in(amd_spi, op);
689 		break;
690 	case SPI_MEM_DATA_OUT:
691 		fallthrough;
692 	case SPI_MEM_NO_DATA:
693 		amd_spi_mem_data_out(amd_spi, op);
694 		break;
695 	default:
696 		ret = -EOPNOTSUPP;
697 	}
698 
699 	return ret;
700 }
701 
702 static const struct spi_controller_mem_ops amd_spi_mem_ops = {
703 	.exec_op = amd_spi_exec_mem_op,
704 	.adjust_op_size = amd_spi_adjust_op_size,
705 	.supports_op = amd_spi_supports_op,
706 };
707 
708 static int amd_spi_host_transfer(struct spi_controller *host,
709 				   struct spi_message *msg)
710 {
711 	struct amd_spi *amd_spi = spi_controller_get_devdata(host);
712 	struct spi_device *spi = msg->spi;
713 
714 	amd_spi_select_chip(amd_spi, spi_get_chipselect(spi, 0));
715 
716 	/*
717 	 * Extract spi_transfers from the spi message and
718 	 * program the controller.
719 	 */
720 	return amd_spi_fifo_xfer(amd_spi, host, msg);
721 }
722 
723 static size_t amd_spi_max_transfer_size(struct spi_device *spi)
724 {
725 	return AMD_SPI_FIFO_SIZE;
726 }
727 
728 static int amd_spi_setup_hiddma(struct amd_spi *amd_spi, struct device *dev)
729 {
730 	u32 hid_regval;
731 
732 	/* Allocate DMA buffer to use for HID basic read operation */
733 	amd_spi->dma_virt_addr = dma_alloc_coherent(dev, AMD_SPI_HID2_DMA_SIZE,
734 						    &amd_spi->phy_dma_buf, GFP_KERNEL);
735 	if (!amd_spi->dma_virt_addr)
736 		return -ENOMEM;
737 
738 	/*
739 	 * Enable interrupts and set mask bits in hid2_int_mask register to generate interrupt
740 	 * properly for HIDDMA basic read operations.
741 	 */
742 	hid_regval = amd_spi_readreg32(amd_spi, AMD_SPI_HID2_INT_MASK);
743 	hid_regval = (hid_regval & GENMASK(31, 8)) | BIT(19);
744 	amd_spi_writereg32(amd_spi, AMD_SPI_HID2_INT_MASK, hid_regval);
745 
746 	/* Configure buffer unit(4k) in hid2_control register */
747 	hid_regval = amd_spi_readreg32(amd_spi, AMD_SPI_HID2_CNTRL);
748 	amd_spi_writereg32(amd_spi, AMD_SPI_HID2_CNTRL, hid_regval & ~BIT(3));
749 
750 	return 0;
751 }
752 
753 static int amd_spi_probe(struct platform_device *pdev)
754 {
755 	struct device *dev = &pdev->dev;
756 	struct spi_controller *host;
757 	struct amd_spi *amd_spi;
758 	int err;
759 
760 	/* Allocate storage for host and driver private data */
761 	host = devm_spi_alloc_host(dev, sizeof(struct amd_spi));
762 	if (!host)
763 		return dev_err_probe(dev, -ENOMEM, "Error allocating SPI host\n");
764 
765 	amd_spi = spi_controller_get_devdata(host);
766 	amd_spi->io_remap_addr = devm_platform_ioremap_resource(pdev, 0);
767 	if (IS_ERR(amd_spi->io_remap_addr))
768 		return dev_err_probe(dev, PTR_ERR(amd_spi->io_remap_addr),
769 				     "ioremap of SPI registers failed\n");
770 
771 	dev_dbg(dev, "io_remap_address: %p\n", amd_spi->io_remap_addr);
772 
773 	amd_spi->version = (uintptr_t) device_get_match_data(dev);
774 
775 	/* Initialize the spi_controller fields */
776 	host->bus_num = (amd_spi->version == AMD_HID2_SPI) ? 2 : 0;
777 	host->num_chipselect = 4;
778 	host->mode_bits = SPI_TX_DUAL | SPI_TX_QUAD | SPI_RX_DUAL | SPI_RX_QUAD;
779 	host->flags = SPI_CONTROLLER_HALF_DUPLEX;
780 	host->max_speed_hz = AMD_SPI_MAX_HZ;
781 	host->min_speed_hz = AMD_SPI_MIN_HZ;
782 	host->setup = amd_spi_host_setup;
783 	host->transfer_one_message = amd_spi_host_transfer;
784 	host->mem_ops = &amd_spi_mem_ops;
785 	host->max_transfer_size = amd_spi_max_transfer_size;
786 	host->max_message_size = amd_spi_max_transfer_size;
787 
788 	/* Register the controller with SPI framework */
789 	err = devm_spi_register_controller(dev, host);
790 	if (err)
791 		return dev_err_probe(dev, err, "error registering SPI controller\n");
792 
793 	if (amd_spi->version == AMD_HID2_SPI)
794 		err = amd_spi_setup_hiddma(amd_spi, dev);
795 
796 	return err;
797 }
798 
799 #ifdef CONFIG_ACPI
800 static const struct acpi_device_id spi_acpi_match[] = {
801 	{ "AMDI0061", AMD_SPI_V1 },
802 	{ "AMDI0062", AMD_SPI_V2 },
803 	{ "AMDI0063", AMD_HID2_SPI },
804 	{},
805 };
806 MODULE_DEVICE_TABLE(acpi, spi_acpi_match);
807 #endif
808 
809 static struct platform_driver amd_spi_driver = {
810 	.driver = {
811 		.name = "amd_spi",
812 		.acpi_match_table = ACPI_PTR(spi_acpi_match),
813 	},
814 	.probe = amd_spi_probe,
815 };
816 
817 module_platform_driver(amd_spi_driver);
818 
819 MODULE_LICENSE("Dual BSD/GPL");
820 MODULE_AUTHOR("Sanjay Mehta <sanju.mehta@amd.com>");
821 MODULE_DESCRIPTION("AMD SPI Master Controller Driver");
822