xref: /linux/drivers/spi/spi-amd.c (revision 5722a6cecfff3e381b96bbbd7e9b3911731e80d9)
1 // SPDX-License-Identifier: GPL-2.0 OR BSD-3-Clause
2 //
3 // AMD SPI controller driver
4 //
5 // Copyright (c) 2020, Advanced Micro Devices, Inc.
6 //
7 // Author: Sanjay R Mehta <sanju.mehta@amd.com>
8 
9 #include <linux/acpi.h>
10 #include <linux/delay.h>
11 #include <linux/dma-mapping.h>
12 #include <linux/init.h>
13 #include <linux/io-64-nonatomic-lo-hi.h>
14 #include <linux/iopoll.h>
15 #include <linux/module.h>
16 #include <linux/platform_device.h>
17 #include <linux/spi/spi.h>
18 #include <linux/spi/spi-mem.h>
19 
20 #include "spi-amd.h"
21 
22 #define AMD_SPI_CTRL0_REG	0x00
23 #define AMD_SPI_EXEC_CMD	BIT(16)
24 #define AMD_SPI_FIFO_CLEAR	BIT(20)
25 #define AMD_SPI_BUSY		BIT(31)
26 
27 #define AMD_SPI_OPCODE_REG	0x45
28 #define AMD_SPI_CMD_TRIGGER_REG	0x47
29 #define AMD_SPI_TRIGGER_CMD	BIT(7)
30 
31 #define AMD_SPI_OPCODE_MASK	0xFF
32 
33 #define AMD_SPI_ALT_CS_REG	0x1D
34 #define AMD_SPI_ALT_CS_MASK	0x3
35 
36 #define AMD_SPI_FIFO_BASE	0x80
37 #define AMD_SPI_TX_COUNT_REG	0x48
38 #define AMD_SPI_RX_COUNT_REG	0x4B
39 #define AMD_SPI_STATUS_REG	0x4C
40 #define AMD_SPI_ADDR32CTRL_REG	0x50
41 
42 #define AMD_SPI_FIFO_SIZE	70
43 #define AMD_SPI_MEM_SIZE	200
44 #define AMD_SPI_MAX_DATA	64
45 #define AMD_SPI_HID2_DMA_SIZE   4096
46 
47 #define AMD_SPI_ENA_REG		0x20
48 #define AMD_SPI_ALT_SPD_SHIFT	20
49 #define AMD_SPI_ALT_SPD_MASK	GENMASK(23, AMD_SPI_ALT_SPD_SHIFT)
50 #define AMD_SPI_SPI100_SHIFT	0
51 #define AMD_SPI_SPI100_MASK	GENMASK(AMD_SPI_SPI100_SHIFT, AMD_SPI_SPI100_SHIFT)
52 #define AMD_SPI_SPEED_REG	0x6C
53 #define AMD_SPI_SPD7_SHIFT	8
54 #define AMD_SPI_SPD7_MASK	GENMASK(13, AMD_SPI_SPD7_SHIFT)
55 
56 #define AMD_SPI_HID2_INPUT_RING_BUF0	0X100
57 #define AMD_SPI_HID2_OUTPUT_BUF0	0x140
58 #define AMD_SPI_HID2_CNTRL		0x150
59 #define AMD_SPI_HID2_INT_STATUS		0x154
60 #define AMD_SPI_HID2_CMD_START		0x156
61 #define AMD_SPI_HID2_INT_MASK		0x158
62 #define AMD_SPI_HID2_WRITE_CNTRL0	0x160
63 #define AMD_SPI_HID2_WRITE_CNTRL1	0x164
64 #define AMD_SPI_HID2_READ_CNTRL0	0x170
65 #define AMD_SPI_HID2_READ_CNTRL1	0x174
66 #define AMD_SPI_HID2_READ_CNTRL2	0x180
67 
68 #define AMD_SPI_MAX_HZ		100000000
69 #define AMD_SPI_MIN_HZ		800000
70 
71 #define AMD_SPI_IO_SLEEP_US	20
72 #define AMD_SPI_IO_TIMEOUT_US	2000000
73 
74 /* SPI read command opcodes */
75 #define AMD_SPI_OP_READ          0x03	/* Read data bytes (low frequency) */
76 #define AMD_SPI_OP_READ_FAST     0x0b	/* Read data bytes (high frequency) */
77 #define AMD_SPI_OP_READ_1_1_2    0x3b	/* Read data bytes (Dual Output SPI) */
78 #define AMD_SPI_OP_READ_1_2_2    0xbb	/* Read data bytes (Dual I/O SPI) */
79 #define AMD_SPI_OP_READ_1_1_4    0x6b	/* Read data bytes (Quad Output SPI) */
80 #define AMD_SPI_OP_READ_1_4_4    0xeb	/* Read data bytes (Quad I/O SPI) */
81 
82 /* SPI read command opcodes - 4B address */
83 #define AMD_SPI_OP_READ_FAST_4B		0x0c    /* Read data bytes (high frequency) */
84 #define AMD_SPI_OP_READ_1_1_2_4B	0x3c    /* Read data bytes (Dual Output SPI) */
85 #define AMD_SPI_OP_READ_1_2_2_4B	0xbc    /* Read data bytes (Dual I/O SPI) */
86 #define AMD_SPI_OP_READ_1_1_4_4B	0x6c    /* Read data bytes (Quad Output SPI) */
87 #define AMD_SPI_OP_READ_1_4_4_4B	0xec    /* Read data bytes (Quad I/O SPI) */
88 
89 /* SPINAND write command opcodes */
90 #define AMD_SPI_OP_PP			0x02	/* Page program */
91 #define AMD_SPI_OP_PP_RANDOM		0x84	/* Page program */
92 
93 enum amd_spi_speed {
94 	F_66_66MHz,
95 	F_33_33MHz,
96 	F_22_22MHz,
97 	F_16_66MHz,
98 	F_100MHz,
99 	F_800KHz,
100 	SPI_SPD7 = 0x7,
101 	F_50MHz = 0x4,
102 	F_4MHz = 0x32,
103 	F_3_17MHz = 0x3F
104 };
105 
106 /**
107  * struct amd_spi_freq - Matches device speed with values to write in regs
108  * @speed_hz: Device frequency
109  * @enable_val: Value to be written to "enable register"
110  * @spd7_val: Some frequencies requires to have a value written at SPISPEED register
111  */
112 struct amd_spi_freq {
113 	u32 speed_hz;
114 	u32 enable_val;
115 	u32 spd7_val;
116 };
117 
amd_spi_readreg8(struct amd_spi * amd_spi,int idx)118 static inline u8 amd_spi_readreg8(struct amd_spi *amd_spi, int idx)
119 {
120 	return readb((u8 __iomem *)amd_spi->io_remap_addr + idx);
121 }
122 
amd_spi_writereg8(struct amd_spi * amd_spi,int idx,u8 val)123 static inline void amd_spi_writereg8(struct amd_spi *amd_spi, int idx, u8 val)
124 {
125 	writeb(val, ((u8 __iomem *)amd_spi->io_remap_addr + idx));
126 }
127 
amd_spi_setclear_reg8(struct amd_spi * amd_spi,int idx,u8 set,u8 clear)128 static void amd_spi_setclear_reg8(struct amd_spi *amd_spi, int idx, u8 set, u8 clear)
129 {
130 	u8 tmp = amd_spi_readreg8(amd_spi, idx);
131 
132 	tmp = (tmp & ~clear) | set;
133 	amd_spi_writereg8(amd_spi, idx, tmp);
134 }
135 
amd_spi_readreg16(struct amd_spi * amd_spi,int idx)136 static inline u16 amd_spi_readreg16(struct amd_spi *amd_spi, int idx)
137 {
138 	return readw((u8 __iomem *)amd_spi->io_remap_addr + idx);
139 }
140 
amd_spi_writereg16(struct amd_spi * amd_spi,int idx,u16 val)141 static inline void amd_spi_writereg16(struct amd_spi *amd_spi, int idx, u16 val)
142 {
143 	writew(val, ((u8 __iomem *)amd_spi->io_remap_addr + idx));
144 }
145 
amd_spi_readreg32(struct amd_spi * amd_spi,int idx)146 static inline u32 amd_spi_readreg32(struct amd_spi *amd_spi, int idx)
147 {
148 	return readl((u8 __iomem *)amd_spi->io_remap_addr + idx);
149 }
150 
amd_spi_writereg32(struct amd_spi * amd_spi,int idx,u32 val)151 static inline void amd_spi_writereg32(struct amd_spi *amd_spi, int idx, u32 val)
152 {
153 	writel(val, ((u8 __iomem *)amd_spi->io_remap_addr + idx));
154 }
155 
amd_spi_readreg64(struct amd_spi * amd_spi,int idx)156 static inline u64 amd_spi_readreg64(struct amd_spi *amd_spi, int idx)
157 {
158 	return readq((u8 __iomem *)amd_spi->io_remap_addr + idx);
159 }
160 
amd_spi_writereg64(struct amd_spi * amd_spi,int idx,u64 val)161 static inline void amd_spi_writereg64(struct amd_spi *amd_spi, int idx, u64 val)
162 {
163 	writeq(val, ((u8 __iomem *)amd_spi->io_remap_addr + idx));
164 }
165 
amd_spi_setclear_reg32(struct amd_spi * amd_spi,int idx,u32 set,u32 clear)166 static inline void amd_spi_setclear_reg32(struct amd_spi *amd_spi, int idx, u32 set, u32 clear)
167 {
168 	u32 tmp = amd_spi_readreg32(amd_spi, idx);
169 
170 	tmp = (tmp & ~clear) | set;
171 	amd_spi_writereg32(amd_spi, idx, tmp);
172 }
173 
amd_spi_select_chip(struct amd_spi * amd_spi,u8 cs)174 static void amd_spi_select_chip(struct amd_spi *amd_spi, u8 cs)
175 {
176 	amd_spi_setclear_reg8(amd_spi, AMD_SPI_ALT_CS_REG, cs, AMD_SPI_ALT_CS_MASK);
177 }
178 
amd_spi_clear_chip(struct amd_spi * amd_spi,u8 chip_select)179 static inline void amd_spi_clear_chip(struct amd_spi *amd_spi, u8 chip_select)
180 {
181 	amd_spi_writereg8(amd_spi, AMD_SPI_ALT_CS_REG, chip_select & ~AMD_SPI_ALT_CS_MASK);
182 }
183 
amd_spi_clear_fifo_ptr(struct amd_spi * amd_spi)184 static void amd_spi_clear_fifo_ptr(struct amd_spi *amd_spi)
185 {
186 	amd_spi_setclear_reg32(amd_spi, AMD_SPI_CTRL0_REG, AMD_SPI_FIFO_CLEAR, AMD_SPI_FIFO_CLEAR);
187 }
188 
amd_spi_set_opcode(struct amd_spi * amd_spi,u8 cmd_opcode)189 static int amd_spi_set_opcode(struct amd_spi *amd_spi, u8 cmd_opcode)
190 {
191 	switch (amd_spi->version) {
192 	case AMD_SPI_V1:
193 		amd_spi_setclear_reg32(amd_spi, AMD_SPI_CTRL0_REG, cmd_opcode,
194 				       AMD_SPI_OPCODE_MASK);
195 		return 0;
196 	case AMD_SPI_V2:
197 	case AMD_HID2_SPI:
198 		amd_spi_writereg8(amd_spi, AMD_SPI_OPCODE_REG, cmd_opcode);
199 		return 0;
200 	default:
201 		return -ENODEV;
202 	}
203 }
204 
amd_spi_set_rx_count(struct amd_spi * amd_spi,u8 rx_count)205 static inline void amd_spi_set_rx_count(struct amd_spi *amd_spi, u8 rx_count)
206 {
207 	amd_spi_writereg8(amd_spi, AMD_SPI_RX_COUNT_REG, rx_count);
208 }
209 
amd_spi_set_tx_count(struct amd_spi * amd_spi,u8 tx_count)210 static inline void amd_spi_set_tx_count(struct amd_spi *amd_spi, u8 tx_count)
211 {
212 	amd_spi_writereg8(amd_spi, AMD_SPI_TX_COUNT_REG, tx_count);
213 }
214 
amd_spi_busy_wait(struct amd_spi * amd_spi)215 static int amd_spi_busy_wait(struct amd_spi *amd_spi)
216 {
217 	u32 val;
218 	int reg;
219 
220 	switch (amd_spi->version) {
221 	case AMD_SPI_V1:
222 		reg = AMD_SPI_CTRL0_REG;
223 		break;
224 	case AMD_SPI_V2:
225 	case AMD_HID2_SPI:
226 		reg = AMD_SPI_STATUS_REG;
227 		break;
228 	default:
229 		return -ENODEV;
230 	}
231 
232 	return readl_poll_timeout(amd_spi->io_remap_addr + reg, val,
233 				  !(val & AMD_SPI_BUSY), 20, 2000000);
234 }
235 
amd_spi_execute_opcode(struct amd_spi * amd_spi)236 static int amd_spi_execute_opcode(struct amd_spi *amd_spi)
237 {
238 	int ret;
239 
240 	ret = amd_spi_busy_wait(amd_spi);
241 	if (ret)
242 		return ret;
243 
244 	switch (amd_spi->version) {
245 	case AMD_SPI_V1:
246 		/* Set ExecuteOpCode bit in the CTRL0 register */
247 		amd_spi_setclear_reg32(amd_spi, AMD_SPI_CTRL0_REG, AMD_SPI_EXEC_CMD,
248 				       AMD_SPI_EXEC_CMD);
249 		return 0;
250 	case AMD_SPI_V2:
251 	case AMD_HID2_SPI:
252 		/* Trigger the command execution */
253 		amd_spi_setclear_reg8(amd_spi, AMD_SPI_CMD_TRIGGER_REG,
254 				      AMD_SPI_TRIGGER_CMD, AMD_SPI_TRIGGER_CMD);
255 		return 0;
256 	default:
257 		return -ENODEV;
258 	}
259 }
260 
amd_spi_host_setup(struct spi_device * spi)261 static int amd_spi_host_setup(struct spi_device *spi)
262 {
263 	struct amd_spi *amd_spi = spi_controller_get_devdata(spi->controller);
264 
265 	amd_spi_clear_fifo_ptr(amd_spi);
266 
267 	return 0;
268 }
269 
270 static const struct amd_spi_freq amd_spi_freq[] = {
271 	{ AMD_SPI_MAX_HZ,   F_100MHz,         0},
272 	{       66660000, F_66_66MHz,         0},
273 	{       50000000,   SPI_SPD7,   F_50MHz},
274 	{       33330000, F_33_33MHz,         0},
275 	{       22220000, F_22_22MHz,         0},
276 	{       16660000, F_16_66MHz,         0},
277 	{        4000000,   SPI_SPD7,    F_4MHz},
278 	{        3170000,   SPI_SPD7, F_3_17MHz},
279 	{ AMD_SPI_MIN_HZ,   F_800KHz,         0},
280 };
281 
amd_set_spi_freq(struct amd_spi * amd_spi,u32 speed_hz)282 static void amd_set_spi_freq(struct amd_spi *amd_spi, u32 speed_hz)
283 {
284 	unsigned int i, spd7_val, alt_spd;
285 
286 	for (i = 0; i < ARRAY_SIZE(amd_spi_freq)-1; i++)
287 		if (speed_hz >= amd_spi_freq[i].speed_hz)
288 			break;
289 
290 	if (amd_spi->speed_hz == amd_spi_freq[i].speed_hz)
291 		return;
292 
293 	amd_spi->speed_hz = amd_spi_freq[i].speed_hz;
294 
295 	alt_spd = (amd_spi_freq[i].enable_val << AMD_SPI_ALT_SPD_SHIFT)
296 		   & AMD_SPI_ALT_SPD_MASK;
297 	amd_spi_setclear_reg32(amd_spi, AMD_SPI_ENA_REG, alt_spd,
298 			       AMD_SPI_ALT_SPD_MASK);
299 
300 	if (amd_spi->speed_hz == AMD_SPI_MAX_HZ)
301 		amd_spi_setclear_reg32(amd_spi, AMD_SPI_ENA_REG, 1,
302 				       AMD_SPI_SPI100_MASK);
303 
304 	if (amd_spi_freq[i].spd7_val) {
305 		spd7_val = (amd_spi_freq[i].spd7_val << AMD_SPI_SPD7_SHIFT)
306 			    & AMD_SPI_SPD7_MASK;
307 		amd_spi_setclear_reg32(amd_spi, AMD_SPI_SPEED_REG, spd7_val,
308 				       AMD_SPI_SPD7_MASK);
309 	}
310 }
311 
amd_spi_fifo_xfer(struct amd_spi * amd_spi,struct spi_controller * host,struct spi_message * message)312 static inline int amd_spi_fifo_xfer(struct amd_spi *amd_spi,
313 				    struct spi_controller *host,
314 				    struct spi_message *message)
315 {
316 	struct spi_transfer *xfer = NULL;
317 	struct spi_device *spi = message->spi;
318 	u8 cmd_opcode = 0, fifo_pos = AMD_SPI_FIFO_BASE;
319 	u8 *buf = NULL;
320 	u32 i = 0;
321 	u32 tx_len = 0, rx_len = 0;
322 
323 	list_for_each_entry(xfer, &message->transfers,
324 			    transfer_list) {
325 		if (xfer->speed_hz)
326 			amd_set_spi_freq(amd_spi, xfer->speed_hz);
327 		else
328 			amd_set_spi_freq(amd_spi, spi->max_speed_hz);
329 
330 		if (xfer->tx_buf) {
331 			buf = (u8 *)xfer->tx_buf;
332 			if (!tx_len) {
333 				cmd_opcode = *(u8 *)xfer->tx_buf;
334 				buf++;
335 				xfer->len--;
336 			}
337 			tx_len += xfer->len;
338 
339 			/* Write data into the FIFO. */
340 			for (i = 0; i < xfer->len; i++)
341 				amd_spi_writereg8(amd_spi, fifo_pos + i, buf[i]);
342 
343 			fifo_pos += xfer->len;
344 		}
345 
346 		/* Store no. of bytes to be received from FIFO */
347 		if (xfer->rx_buf)
348 			rx_len += xfer->len;
349 	}
350 
351 	if (!buf) {
352 		message->status = -EINVAL;
353 		goto fin_msg;
354 	}
355 
356 	amd_spi_set_opcode(amd_spi, cmd_opcode);
357 	amd_spi_set_tx_count(amd_spi, tx_len);
358 	amd_spi_set_rx_count(amd_spi, rx_len);
359 
360 	/* Execute command */
361 	message->status = amd_spi_execute_opcode(amd_spi);
362 	if (message->status)
363 		goto fin_msg;
364 
365 	if (rx_len) {
366 		message->status = amd_spi_busy_wait(amd_spi);
367 		if (message->status)
368 			goto fin_msg;
369 
370 		list_for_each_entry(xfer, &message->transfers, transfer_list)
371 			if (xfer->rx_buf) {
372 				buf = (u8 *)xfer->rx_buf;
373 				/* Read data from FIFO to receive buffer */
374 				for (i = 0; i < xfer->len; i++)
375 					buf[i] = amd_spi_readreg8(amd_spi, fifo_pos + i);
376 				fifo_pos += xfer->len;
377 			}
378 	}
379 
380 	/* Update statistics */
381 	message->actual_length = tx_len + rx_len + 1;
382 
383 fin_msg:
384 	switch (amd_spi->version) {
385 	case AMD_SPI_V1:
386 		break;
387 	case AMD_SPI_V2:
388 	case AMD_HID2_SPI:
389 		amd_spi_clear_chip(amd_spi, spi_get_chipselect(message->spi, 0));
390 		break;
391 	default:
392 		return -ENODEV;
393 	}
394 
395 	spi_finalize_current_message(host);
396 
397 	return message->status;
398 }
399 
amd_is_spi_read_cmd_4b(const u16 op)400 static inline bool amd_is_spi_read_cmd_4b(const u16 op)
401 {
402 	switch (op) {
403 	case AMD_SPI_OP_READ_FAST_4B:
404 	case AMD_SPI_OP_READ_1_1_2_4B:
405 	case AMD_SPI_OP_READ_1_2_2_4B:
406 	case AMD_SPI_OP_READ_1_1_4_4B:
407 	case AMD_SPI_OP_READ_1_4_4_4B:
408 		return true;
409 	default:
410 		return false;
411 	}
412 }
413 
amd_is_spi_read_cmd(const u16 op)414 static inline bool amd_is_spi_read_cmd(const u16 op)
415 {
416 	switch (op) {
417 	case AMD_SPI_OP_READ:
418 	case AMD_SPI_OP_READ_FAST:
419 	case AMD_SPI_OP_READ_1_1_2:
420 	case AMD_SPI_OP_READ_1_2_2:
421 	case AMD_SPI_OP_READ_1_1_4:
422 	case AMD_SPI_OP_READ_1_4_4:
423 		return true;
424 	default:
425 		return amd_is_spi_read_cmd_4b(op);
426 	}
427 }
428 
amd_is_spi_write_cmd(const u16 op)429 static inline bool amd_is_spi_write_cmd(const u16 op)
430 {
431 	switch (op) {
432 	case AMD_SPI_OP_PP:
433 	case AMD_SPI_OP_PP_RANDOM:
434 		return true;
435 	default:
436 		return false;
437 	}
438 }
439 
amd_spi_supports_op(struct spi_mem * mem,const struct spi_mem_op * op)440 static bool amd_spi_supports_op(struct spi_mem *mem,
441 				const struct spi_mem_op *op)
442 {
443 	struct amd_spi *amd_spi = spi_controller_get_devdata(mem->spi->controller);
444 
445 	/* bus width is number of IO lines used to transmit */
446 	if (op->cmd.buswidth > 1 || op->addr.buswidth > 4)
447 		return false;
448 
449 	/* AMD SPI controllers support quad mode only for read operations */
450 	if (amd_is_spi_read_cmd(op->cmd.opcode) || amd_is_spi_write_cmd(op->cmd.opcode)) {
451 		if (op->data.buswidth > 4)
452 			return false;
453 
454 		/*
455 		 * HID2 SPI controller supports DMA read up to 4K bytes and
456 		 * doesn't support 4-byte address commands.
457 		 */
458 		if (amd_spi->version == AMD_HID2_SPI) {
459 			if ((amd_is_spi_read_cmd_4b(op->cmd.opcode) ||
460 			     amd_is_spi_write_cmd(op->cmd.opcode)) &&
461 			    op->data.nbytes > AMD_SPI_HID2_DMA_SIZE)
462 				return false;
463 		} else if (op->data.nbytes > AMD_SPI_MAX_DATA) {
464 			return false;
465 		}
466 	} else if (op->data.buswidth > 1 || op->data.nbytes > AMD_SPI_MAX_DATA) {
467 		return false;
468 	}
469 
470 	if (op->max_freq < mem->spi->controller->min_speed_hz)
471 		return false;
472 
473 	return spi_mem_default_supports_op(mem, op);
474 }
475 
amd_spi_adjust_op_size(struct spi_mem * mem,struct spi_mem_op * op)476 static int amd_spi_adjust_op_size(struct spi_mem *mem, struct spi_mem_op *op)
477 {
478 	struct amd_spi *amd_spi = spi_controller_get_devdata(mem->spi->controller);
479 
480 	/*
481 	 * HID2 SPI controller DMA read mode supports reading up to 4k
482 	 * bytes in single transaction, where as SPI0 and HID2 SPI
483 	 * controller index mode supports maximum of 64 bytes in a single
484 	 * transaction.
485 	 */
486 	if (amd_spi->version == AMD_HID2_SPI && (amd_is_spi_read_cmd(op->cmd.opcode) ||
487 						 amd_is_spi_write_cmd(op->cmd.opcode)))
488 		op->data.nbytes = clamp_val(op->data.nbytes, 0, AMD_SPI_HID2_DMA_SIZE);
489 	else
490 		op->data.nbytes = clamp_val(op->data.nbytes, 0, AMD_SPI_MAX_DATA);
491 
492 	return 0;
493 }
494 
amd_spi_set_addr(struct amd_spi * amd_spi,const struct spi_mem_op * op)495 static void amd_spi_set_addr(struct amd_spi *amd_spi,
496 			     const struct spi_mem_op *op)
497 {
498 	u8 nbytes = op->addr.nbytes;
499 	u64 addr_val = op->addr.val;
500 	int base_addr, i;
501 
502 	base_addr = AMD_SPI_FIFO_BASE + nbytes;
503 
504 	for (i = 0; i < nbytes; i++) {
505 		amd_spi_writereg8(amd_spi, base_addr - i - 1, addr_val &
506 				  GENMASK(7, 0));
507 		addr_val >>= 8;
508 	}
509 }
510 
amd_spi_hiddma_write(struct amd_spi * amd_spi,const struct spi_mem_op * op)511 static void amd_spi_hiddma_write(struct amd_spi *amd_spi, const struct spi_mem_op *op)
512 {
513 	u16 hid_cmd_start, val;
514 	u32 hid_regval;
515 
516 	/*
517 	 * Program the HID2 output Buffer0. 4k aligned buf_memory_addr[31:12],
518 	 * buf_size[2:0].
519 	 */
520 	hid_regval = amd_spi->phy_dma_buf | BIT(0);
521 	amd_spi_writereg32(amd_spi, AMD_SPI_HID2_OUTPUT_BUF0, hid_regval);
522 
523 	/* Program max write length in hid2_write_control1 register */
524 	hid_regval = amd_spi_readreg32(amd_spi, AMD_SPI_HID2_WRITE_CNTRL1);
525 	hid_regval = (hid_regval & ~GENMASK(15, 0)) | ((op->data.nbytes) + 3);
526 	amd_spi_writereg32(amd_spi, AMD_SPI_HID2_WRITE_CNTRL1, hid_regval);
527 
528 	/* Set cmd start bit in hid2_cmd_start register to trigger HID basic write operation */
529 	hid_cmd_start = amd_spi_readreg16(amd_spi, AMD_SPI_HID2_CMD_START);
530 	amd_spi_writereg16(amd_spi, AMD_SPI_HID2_CMD_START, (hid_cmd_start | BIT(2)));
531 
532 	/* Check interrupt status of HIDDMA basic write operation in hid2_int_status register */
533 	readw_poll_timeout(amd_spi->io_remap_addr + AMD_SPI_HID2_INT_STATUS, val,
534 			   (val & BIT(2)), AMD_SPI_IO_SLEEP_US, AMD_SPI_IO_TIMEOUT_US);
535 
536 	/* Clear the interrupts by writing to hid2_int_status register */
537 	val = amd_spi_readreg16(amd_spi, AMD_SPI_HID2_INT_STATUS);
538 	amd_spi_writereg16(amd_spi, AMD_SPI_HID2_INT_STATUS, val);
539 }
540 
amd_spi_mem_data_out(struct amd_spi * amd_spi,const struct spi_mem_op * op)541 static void amd_spi_mem_data_out(struct amd_spi *amd_spi,
542 				 const struct spi_mem_op *op)
543 {
544 	int base_addr = AMD_SPI_FIFO_BASE + op->addr.nbytes;
545 	u64 *buf_64 = (u64 *)op->data.buf.out;
546 	u64 addr_val = op->addr.val;
547 	u32 nbytes = op->data.nbytes;
548 	u32 left_data = nbytes;
549 	u8 *buf;
550 	int i;
551 
552 	/*
553 	 * Condition for using HID write mode. Only for writing complete page data, use HID write.
554 	 * Use index mode otherwise.
555 	 */
556 	if (amd_spi->version == AMD_HID2_SPI && amd_is_spi_write_cmd(op->cmd.opcode)) {
557 		u64 *dma_buf64 = (u64 *)(amd_spi->dma_virt_addr + op->addr.nbytes + op->cmd.nbytes);
558 		u8 *dma_buf = (u8 *)amd_spi->dma_virt_addr;
559 
560 		/* Copy opcode and address to DMA buffer */
561 		*dma_buf = op->cmd.opcode;
562 
563 		dma_buf = (u8 *)dma_buf64;
564 		for (i = 0; i < op->addr.nbytes; i++) {
565 			*--dma_buf = addr_val & GENMASK(7, 0);
566 			addr_val >>= 8;
567 		}
568 
569 		/* Copy data to DMA buffer */
570 		while (left_data >= 8) {
571 			*dma_buf64++ = *buf_64++;
572 			left_data -= 8;
573 		}
574 
575 		buf = (u8 *)buf_64;
576 		dma_buf = (u8 *)dma_buf64;
577 		while (left_data--)
578 			*dma_buf++ = *buf++;
579 
580 		amd_spi_hiddma_write(amd_spi, op);
581 	} else {
582 		amd_spi_set_opcode(amd_spi, op->cmd.opcode);
583 		amd_spi_set_addr(amd_spi, op);
584 
585 		for (i = 0; left_data >= 8; i++, left_data -= 8)
586 			amd_spi_writereg64(amd_spi, base_addr + op->dummy.nbytes + (i * 8),
587 					   *buf_64++);
588 
589 		buf = (u8 *)buf_64;
590 		for (i = 0; i < left_data; i++) {
591 			amd_spi_writereg8(amd_spi,
592 					  base_addr + op->dummy.nbytes + nbytes + i - left_data,
593 					  buf[i]);
594 		}
595 
596 		amd_spi_set_tx_count(amd_spi, op->addr.nbytes + op->data.nbytes);
597 		amd_spi_set_rx_count(amd_spi, 0);
598 		amd_spi_clear_fifo_ptr(amd_spi);
599 		amd_spi_execute_opcode(amd_spi);
600 	}
601 }
602 
amd_spi_hiddma_read(struct amd_spi * amd_spi,const struct spi_mem_op * op)603 static void amd_spi_hiddma_read(struct amd_spi *amd_spi, const struct spi_mem_op *op)
604 {
605 	u16 hid_cmd_start, val;
606 	u32 hid_regval;
607 
608 	/* Set the opcode in hid2_read_control0 register */
609 	hid_regval = amd_spi_readreg32(amd_spi, AMD_SPI_HID2_READ_CNTRL0);
610 	hid_regval = (hid_regval & ~GENMASK(7, 0)) | op->cmd.opcode;
611 
612 	/*
613 	 * Program the address in the hid2_read_control0 register [8:31]. The address should
614 	 * be written starting from the 8th bit of the register, requiring an 8-bit shift.
615 	 * Additionally, to convert a 2-byte spinand address to a 3-byte address, another
616 	 * 8-bit shift is needed. Therefore, a total shift of 16 bits is required.
617 	 */
618 	hid_regval = (hid_regval & ~GENMASK(31, 8)) | (op->addr.val << 16);
619 	amd_spi_writereg32(amd_spi, AMD_SPI_HID2_READ_CNTRL0, hid_regval);
620 
621 	/* Configure dummy clock cycles for fast read, dual, quad I/O commands */
622 	hid_regval = amd_spi_readreg32(amd_spi, AMD_SPI_HID2_READ_CNTRL2);
623 	/* Fast read dummy cycle */
624 	hid_regval &= ~GENMASK(4, 0);
625 
626 	/* Fast read Dual I/O dummy cycle */
627 	hid_regval &= ~GENMASK(12, 8);
628 
629 	/* Fast read Quad I/O dummy cycle */
630 	hid_regval = (hid_regval & ~GENMASK(20, 16)) | BIT(17);
631 
632 	/* Set no of preamble bytecount */
633 	hid_regval &= ~GENMASK(27, 24);
634 	amd_spi_writereg32(amd_spi, AMD_SPI_HID2_READ_CNTRL2, hid_regval);
635 
636 	/*
637 	 * Program the HID2 Input Ring Buffer0. 4k aligned buf_memory_addr[31:12],
638 	 * buf_size[4:0], end_input_ring[5].
639 	 */
640 	hid_regval = amd_spi->phy_dma_buf | BIT(5) | BIT(0);
641 	amd_spi_writereg32(amd_spi, AMD_SPI_HID2_INPUT_RING_BUF0, hid_regval);
642 
643 	/* Program max read length(no of DWs) in hid2_read_control1 register */
644 	hid_regval = amd_spi_readreg32(amd_spi, AMD_SPI_HID2_READ_CNTRL1);
645 	hid_regval = (hid_regval & ~GENMASK(15, 0)) | ((op->data.nbytes / 4) - 1);
646 	amd_spi_writereg32(amd_spi, AMD_SPI_HID2_READ_CNTRL1, hid_regval);
647 
648 	/* Set cmd start bit in hid2_cmd_start register to trigger HID basic read operation */
649 	hid_cmd_start = amd_spi_readreg16(amd_spi, AMD_SPI_HID2_CMD_START);
650 	amd_spi_writereg16(amd_spi, AMD_SPI_HID2_CMD_START, (hid_cmd_start | BIT(3)));
651 
652 	/* Check interrupt status of HIDDMA basic read operation in hid2_int_status register */
653 	readw_poll_timeout(amd_spi->io_remap_addr + AMD_SPI_HID2_INT_STATUS, val,
654 			   (val & BIT(3)), AMD_SPI_IO_SLEEP_US, AMD_SPI_IO_TIMEOUT_US);
655 
656 	/* Clear the interrupts by writing to hid2_int_status register */
657 	val = amd_spi_readreg16(amd_spi, AMD_SPI_HID2_INT_STATUS);
658 	amd_spi_writereg16(amd_spi, AMD_SPI_HID2_INT_STATUS, val);
659 }
660 
amd_spi_mem_data_in(struct amd_spi * amd_spi,const struct spi_mem_op * op)661 static void amd_spi_mem_data_in(struct amd_spi *amd_spi,
662 				const struct spi_mem_op *op)
663 {
664 	int base_addr = AMD_SPI_FIFO_BASE + op->addr.nbytes;
665 	u64 *buf_64 = (u64 *)op->data.buf.in;
666 	u32 nbytes = op->data.nbytes;
667 	u32 left_data = nbytes;
668 	u32 data;
669 	u8 *buf;
670 	int i;
671 
672 	/*
673 	 * Condition for using HID read mode. Only for reading complete page data, use HID read.
674 	 * Use index mode otherwise.
675 	 */
676 	if (amd_spi->version == AMD_HID2_SPI && amd_is_spi_read_cmd(op->cmd.opcode)) {
677 		u64 *dma_buf64 = (u64 *)amd_spi->dma_virt_addr;
678 		u8 *dma_buf;
679 
680 		amd_spi_hiddma_read(amd_spi, op);
681 
682 		/* Copy data from DMA buffer */
683 		while (left_data >= 8) {
684 			*buf_64++ = *dma_buf64++;
685 			left_data -= 8;
686 		}
687 
688 		buf = (u8 *)buf_64;
689 		dma_buf = (u8 *)dma_buf64;
690 		while (left_data--)
691 			*buf++ = *dma_buf++;
692 
693 		/* Reset HID RX memory logic */
694 		data = amd_spi_readreg32(amd_spi, AMD_SPI_HID2_CNTRL);
695 		amd_spi_writereg32(amd_spi, AMD_SPI_HID2_CNTRL, data | BIT(5));
696 	} else {
697 		/* Index mode */
698 		amd_spi_set_opcode(amd_spi, op->cmd.opcode);
699 		amd_spi_set_addr(amd_spi, op);
700 		amd_spi_set_tx_count(amd_spi, op->addr.nbytes + op->dummy.nbytes);
701 
702 		for (i = 0; i < op->dummy.nbytes; i++)
703 			amd_spi_writereg8(amd_spi, (base_addr + i), 0xff);
704 
705 		amd_spi_set_rx_count(amd_spi, op->data.nbytes);
706 		amd_spi_clear_fifo_ptr(amd_spi);
707 		amd_spi_execute_opcode(amd_spi);
708 		amd_spi_busy_wait(amd_spi);
709 
710 		for (i = 0; left_data >= 8; i++, left_data -= 8)
711 			*buf_64++ = amd_spi_readreg64(amd_spi, base_addr + op->dummy.nbytes +
712 						      (i * 8));
713 
714 		buf = (u8 *)buf_64;
715 		for (i = 0; i < left_data; i++)
716 			buf[i] = amd_spi_readreg8(amd_spi, base_addr + op->dummy.nbytes +
717 						  nbytes + i - left_data);
718 	}
719 
720 }
721 
amd_set_spi_addr_mode(struct amd_spi * amd_spi,const struct spi_mem_op * op)722 static void amd_set_spi_addr_mode(struct amd_spi *amd_spi,
723 				  const struct spi_mem_op *op)
724 {
725 	u32 val = amd_spi_readreg32(amd_spi, AMD_SPI_ADDR32CTRL_REG);
726 
727 	if (amd_is_spi_read_cmd_4b(op->cmd.opcode))
728 		amd_spi_writereg32(amd_spi, AMD_SPI_ADDR32CTRL_REG, val | BIT(0));
729 	else
730 		amd_spi_writereg32(amd_spi, AMD_SPI_ADDR32CTRL_REG, val & ~BIT(0));
731 }
732 
amd_spi_exec_mem_op(struct spi_mem * mem,const struct spi_mem_op * op)733 static int amd_spi_exec_mem_op(struct spi_mem *mem,
734 			       const struct spi_mem_op *op)
735 {
736 	struct amd_spi *amd_spi;
737 
738 	amd_spi = spi_controller_get_devdata(mem->spi->controller);
739 
740 	amd_set_spi_freq(amd_spi, op->max_freq);
741 
742 	if (amd_spi->version == AMD_SPI_V2)
743 		amd_set_spi_addr_mode(amd_spi, op);
744 
745 	switch (op->data.dir) {
746 	case SPI_MEM_DATA_IN:
747 		amd_spi_mem_data_in(amd_spi, op);
748 		break;
749 	case SPI_MEM_DATA_OUT:
750 		fallthrough;
751 	case SPI_MEM_NO_DATA:
752 		amd_spi_mem_data_out(amd_spi, op);
753 		break;
754 	default:
755 		return -EOPNOTSUPP;
756 	}
757 
758 	return 0;
759 }
760 
761 static const struct spi_controller_mem_ops amd_spi_mem_ops = {
762 	.exec_op = amd_spi_exec_mem_op,
763 	.adjust_op_size = amd_spi_adjust_op_size,
764 	.supports_op = amd_spi_supports_op,
765 };
766 
767 static const struct spi_controller_mem_caps amd_spi_mem_caps = {
768 	.per_op_freq = true,
769 };
770 
amd_spi_host_transfer(struct spi_controller * host,struct spi_message * msg)771 static int amd_spi_host_transfer(struct spi_controller *host,
772 				   struct spi_message *msg)
773 {
774 	struct amd_spi *amd_spi = spi_controller_get_devdata(host);
775 	struct spi_device *spi = msg->spi;
776 
777 	amd_spi_select_chip(amd_spi, spi_get_chipselect(spi, 0));
778 
779 	/*
780 	 * Extract spi_transfers from the spi message and
781 	 * program the controller.
782 	 */
783 	return amd_spi_fifo_xfer(amd_spi, host, msg);
784 }
785 
amd_spi_max_transfer_size(struct spi_device * spi)786 static size_t amd_spi_max_transfer_size(struct spi_device *spi)
787 {
788 	return AMD_SPI_FIFO_SIZE;
789 }
790 
amd_spi_setup_hiddma(struct amd_spi * amd_spi,struct device * dev)791 static int amd_spi_setup_hiddma(struct amd_spi *amd_spi, struct device *dev)
792 {
793 	u32 hid_regval;
794 
795 	/* Allocate DMA buffer to use for HID basic read and write operations. For write
796 	 * operations, the DMA buffer should include the opcode, address bytes and dummy
797 	 * bytes(if any) in addition to the data bytes. Additionally, the hardware requires
798 	 * that the buffer address be 4K aligned. So, allocate DMA buffer of size
799 	 * 2 * AMD_SPI_HID2_DMA_SIZE.
800 	 */
801 	amd_spi->dma_virt_addr = dmam_alloc_coherent(dev, AMD_SPI_HID2_DMA_SIZE * 2,
802 						     &amd_spi->phy_dma_buf, GFP_KERNEL);
803 	if (!amd_spi->dma_virt_addr)
804 		return -ENOMEM;
805 
806 	/*
807 	 * Enable interrupts and set mask bits in hid2_int_mask register to generate interrupt
808 	 * properly for HIDDMA basic read and write operations.
809 	 */
810 	hid_regval = amd_spi_readreg32(amd_spi, AMD_SPI_HID2_INT_MASK);
811 	hid_regval = (hid_regval & GENMASK(31, 8)) | BIT(18) | BIT(19);
812 	amd_spi_writereg32(amd_spi, AMD_SPI_HID2_INT_MASK, hid_regval);
813 
814 	/* Configure buffer unit(4k) and write threshold in hid2_control register */
815 	hid_regval = amd_spi_readreg32(amd_spi, AMD_SPI_HID2_CNTRL);
816 	amd_spi_writereg32(amd_spi, AMD_SPI_HID2_CNTRL, (hid_regval | GENMASK(13, 12)) & ~BIT(3));
817 
818 	return 0;
819 }
820 
amd_spi_probe_common(struct device * dev,struct spi_controller * host)821 int amd_spi_probe_common(struct device *dev, struct spi_controller *host)
822 {
823 	struct amd_spi *amd_spi = spi_controller_get_devdata(host);
824 	int err;
825 
826 	/* Initialize the spi_controller fields */
827 	host->num_chipselect = 4;
828 	host->mode_bits = SPI_TX_DUAL | SPI_TX_QUAD | SPI_RX_DUAL | SPI_RX_QUAD;
829 	host->flags = SPI_CONTROLLER_HALF_DUPLEX;
830 	host->max_speed_hz = AMD_SPI_MAX_HZ;
831 	host->min_speed_hz = AMD_SPI_MIN_HZ;
832 	host->setup = amd_spi_host_setup;
833 	host->transfer_one_message = amd_spi_host_transfer;
834 	host->mem_ops = &amd_spi_mem_ops;
835 	host->mem_caps = &amd_spi_mem_caps;
836 	host->max_transfer_size = amd_spi_max_transfer_size;
837 	host->max_message_size = amd_spi_max_transfer_size;
838 
839 	/* Register the controller with SPI framework */
840 	err = devm_spi_register_controller(dev, host);
841 	if (err)
842 		return dev_err_probe(dev, err, "error registering SPI controller\n");
843 
844 	if (amd_spi->version == AMD_HID2_SPI)
845 		err = amd_spi_setup_hiddma(amd_spi, dev);
846 
847 	return err;
848 }
849 EXPORT_SYMBOL_GPL(amd_spi_probe_common);
850 
amd_spi_probe(struct platform_device * pdev)851 static int amd_spi_probe(struct platform_device *pdev)
852 {
853 	struct device *dev = &pdev->dev;
854 	struct spi_controller *host;
855 	struct amd_spi *amd_spi;
856 
857 	/* Allocate storage for host and driver private data */
858 	host = devm_spi_alloc_host(dev, sizeof(struct amd_spi));
859 	if (!host)
860 		return dev_err_probe(dev, -ENOMEM, "Error allocating SPI host\n");
861 
862 	amd_spi = spi_controller_get_devdata(host);
863 	amd_spi->io_remap_addr = devm_platform_ioremap_resource(pdev, 0);
864 	if (IS_ERR(amd_spi->io_remap_addr))
865 		return dev_err_probe(dev, PTR_ERR(amd_spi->io_remap_addr),
866 				     "ioremap of SPI registers failed\n");
867 
868 	dev_dbg(dev, "io_remap_address: %p\n", amd_spi->io_remap_addr);
869 
870 	amd_spi->version = (uintptr_t)device_get_match_data(dev);
871 	host->bus_num = 0;
872 
873 	return amd_spi_probe_common(dev, host);
874 }
875 
876 #ifdef CONFIG_ACPI
877 static const struct acpi_device_id spi_acpi_match[] = {
878 	{ "AMDI0061", AMD_SPI_V1 },
879 	{ "AMDI0062", AMD_SPI_V2 },
880 	{ "AMDI0063", AMD_HID2_SPI },
881 	{},
882 };
883 MODULE_DEVICE_TABLE(acpi, spi_acpi_match);
884 #endif
885 
886 static struct platform_driver amd_spi_driver = {
887 	.driver = {
888 		.name = "amd_spi",
889 		.acpi_match_table = ACPI_PTR(spi_acpi_match),
890 	},
891 	.probe = amd_spi_probe,
892 };
893 
894 module_platform_driver(amd_spi_driver);
895 
896 MODULE_LICENSE("Dual BSD/GPL");
897 MODULE_AUTHOR("Sanjay Mehta <sanju.mehta@amd.com>");
898 MODULE_DESCRIPTION("AMD SPI Master Controller Driver");
899