xref: /linux/drivers/spi/spi-airoha-snfi.c (revision 3f1c07fc21c68bd3bd2df9d2c9441f6485e934d9)
1 // SPDX-License-Identifier: GPL-2.0-only
2 /*
3  * Copyright (c) 2024 AIROHA Inc
4  * Author: Lorenzo Bianconi <lorenzo@kernel.org>
5  * Author: Ray Liu <ray.liu@airoha.com>
6  */
7 
8 #include <linux/bitfield.h>
9 #include <linux/clk.h>
10 #include <linux/delay.h>
11 #include <linux/device.h>
12 #include <linux/dma-mapping.h>
13 #include <linux/errno.h>
14 #include <linux/limits.h>
15 #include <linux/math.h>
16 #include <linux/minmax.h>
17 #include <linux/module.h>
18 #include <linux/mutex.h>
19 #include <linux/platform_device.h>
20 #include <linux/property.h>
21 #include <linux/regmap.h>
22 #include <linux/sizes.h>
23 #include <linux/spi/spi.h>
24 #include <linux/spi/spi-mem.h>
25 #include <linux/types.h>
26 #include <linux/unaligned.h>
27 
28 /* SPI */
29 #define REG_SPI_CTRL_BASE			0x1FA10000
30 
31 #define REG_SPI_CTRL_READ_MODE			0x0000
32 #define REG_SPI_CTRL_READ_IDLE_EN		0x0004
33 #define REG_SPI_CTRL_SIDLY			0x0008
34 #define REG_SPI_CTRL_CSHEXT			0x000c
35 #define REG_SPI_CTRL_CSLEXT			0x0010
36 
37 #define REG_SPI_CTRL_MTX_MODE_TOG		0x0014
38 #define SPI_CTRL_MTX_MODE_TOG			GENMASK(3, 0)
39 
40 #define REG_SPI_CTRL_RDCTL_FSM			0x0018
41 #define SPI_CTRL_RDCTL_FSM			GENMASK(3, 0)
42 
43 #define REG_SPI_CTRL_MACMUX_SEL			0x001c
44 
45 #define REG_SPI_CTRL_MANUAL_EN			0x0020
46 #define SPI_CTRL_MANUAL_EN			BIT(0)
47 
48 #define REG_SPI_CTRL_OPFIFO_EMPTY		0x0024
49 #define SPI_CTRL_OPFIFO_EMPTY			BIT(0)
50 
51 #define REG_SPI_CTRL_OPFIFO_WDATA		0x0028
52 #define SPI_CTRL_OPFIFO_LEN			GENMASK(8, 0)
53 #define SPI_CTRL_OPFIFO_OP			GENMASK(13, 9)
54 
55 #define REG_SPI_CTRL_OPFIFO_FULL		0x002c
56 #define SPI_CTRL_OPFIFO_FULL			BIT(0)
57 
58 #define REG_SPI_CTRL_OPFIFO_WR			0x0030
59 #define SPI_CTRL_OPFIFO_WR			BIT(0)
60 
61 #define REG_SPI_CTRL_DFIFO_FULL			0x0034
62 #define SPI_CTRL_DFIFO_FULL			BIT(0)
63 
64 #define REG_SPI_CTRL_DFIFO_WDATA		0x0038
65 #define SPI_CTRL_DFIFO_WDATA			GENMASK(7, 0)
66 
67 #define REG_SPI_CTRL_DFIFO_EMPTY		0x003c
68 #define SPI_CTRL_DFIFO_EMPTY			BIT(0)
69 
70 #define REG_SPI_CTRL_DFIFO_RD			0x0040
71 #define SPI_CTRL_DFIFO_RD			BIT(0)
72 
73 #define REG_SPI_CTRL_DFIFO_RDATA		0x0044
74 #define SPI_CTRL_DFIFO_RDATA			GENMASK(7, 0)
75 
76 #define REG_SPI_CTRL_DUMMY			0x0080
77 #define SPI_CTRL_CTRL_DUMMY			GENMASK(3, 0)
78 
79 #define REG_SPI_CTRL_PROBE_SEL			0x0088
80 #define REG_SPI_CTRL_INTERRUPT			0x0090
81 #define REG_SPI_CTRL_INTERRUPT_EN		0x0094
82 #define REG_SPI_CTRL_SI_CK_SEL			0x009c
83 #define REG_SPI_CTRL_SW_CFGNANDADDR_VAL		0x010c
84 #define REG_SPI_CTRL_SW_CFGNANDADDR_EN		0x0110
85 #define REG_SPI_CTRL_SFC_STRAP			0x0114
86 
87 #define REG_SPI_CTRL_NFI2SPI_EN			0x0130
88 #define SPI_CTRL_NFI2SPI_EN			BIT(0)
89 
90 /* NFI2SPI */
91 #define REG_SPI_NFI_CNFG			0x0000
92 #define SPI_NFI_DMA_MODE			BIT(0)
93 #define SPI_NFI_READ_MODE			BIT(1)
94 #define SPI_NFI_DMA_BURST_EN			BIT(2)
95 #define SPI_NFI_HW_ECC_EN			BIT(8)
96 #define SPI_NFI_AUTO_FDM_EN			BIT(9)
97 #define SPI_NFI_OPMODE				GENMASK(14, 12)
98 
99 #define REG_SPI_NFI_PAGEFMT			0x0004
100 #define SPI_NFI_PAGE_SIZE			GENMASK(1, 0)
101 #define SPI_NFI_SPARE_SIZE			GENMASK(5, 4)
102 
103 #define REG_SPI_NFI_CON				0x0008
104 #define SPI_NFI_FIFO_FLUSH			BIT(0)
105 #define SPI_NFI_RST				BIT(1)
106 #define SPI_NFI_RD_TRIG				BIT(8)
107 #define SPI_NFI_WR_TRIG				BIT(9)
108 #define SPI_NFI_SEC_NUM				GENMASK(15, 12)
109 
110 #define REG_SPI_NFI_INTR_EN			0x0010
111 #define SPI_NFI_RD_DONE_EN			BIT(0)
112 #define SPI_NFI_WR_DONE_EN			BIT(1)
113 #define SPI_NFI_RST_DONE_EN			BIT(2)
114 #define SPI_NFI_ERASE_DONE_EN			BIT(3)
115 #define SPI_NFI_BUSY_RETURN_EN			BIT(4)
116 #define SPI_NFI_ACCESS_LOCK_EN			BIT(5)
117 #define SPI_NFI_AHB_DONE_EN			BIT(6)
118 #define SPI_NFI_ALL_IRQ_EN					\
119 	(SPI_NFI_RD_DONE_EN | SPI_NFI_WR_DONE_EN |		\
120 	 SPI_NFI_RST_DONE_EN | SPI_NFI_ERASE_DONE_EN |		\
121 	 SPI_NFI_BUSY_RETURN_EN | SPI_NFI_ACCESS_LOCK_EN |	\
122 	 SPI_NFI_AHB_DONE_EN)
123 
124 #define REG_SPI_NFI_INTR			0x0014
125 #define SPI_NFI_AHB_DONE			BIT(6)
126 
127 #define REG_SPI_NFI_CMD				0x0020
128 
129 #define REG_SPI_NFI_ADDR_NOB			0x0030
130 #define SPI_NFI_ROW_ADDR_NOB			GENMASK(6, 4)
131 
132 #define REG_SPI_NFI_STA				0x0060
133 #define REG_SPI_NFI_FIFOSTA			0x0064
134 #define REG_SPI_NFI_STRADDR			0x0080
135 #define REG_SPI_NFI_FDM0L			0x00a0
136 #define REG_SPI_NFI_FDM0M			0x00a4
137 #define REG_SPI_NFI_FDM7L			0x00d8
138 #define REG_SPI_NFI_FDM7M			0x00dc
139 #define REG_SPI_NFI_FIFODATA0			0x0190
140 #define REG_SPI_NFI_FIFODATA1			0x0194
141 #define REG_SPI_NFI_FIFODATA2			0x0198
142 #define REG_SPI_NFI_FIFODATA3			0x019c
143 #define REG_SPI_NFI_MASTERSTA			0x0224
144 
145 #define REG_SPI_NFI_SECCUS_SIZE			0x022c
146 #define SPI_NFI_CUS_SEC_SIZE			GENMASK(12, 0)
147 #define SPI_NFI_CUS_SEC_SIZE_EN			BIT(16)
148 
149 #define REG_SPI_NFI_RD_CTL2			0x0510
150 #define SPI_NFI_DATA_READ_CMD			GENMASK(7, 0)
151 
152 #define REG_SPI_NFI_RD_CTL3			0x0514
153 
154 #define REG_SPI_NFI_PG_CTL1			0x0524
155 #define SPI_NFI_PG_LOAD_CMD			GENMASK(15, 8)
156 
157 #define REG_SPI_NFI_PG_CTL2			0x0528
158 #define REG_SPI_NFI_NOR_PROG_ADDR		0x052c
159 #define REG_SPI_NFI_NOR_RD_ADDR			0x0534
160 
161 #define REG_SPI_NFI_SNF_MISC_CTL		0x0538
162 #define SPI_NFI_DATA_READ_WR_MODE		GENMASK(18, 16)
163 
164 #define REG_SPI_NFI_SNF_MISC_CTL2		0x053c
165 #define SPI_NFI_READ_DATA_BYTE_NUM		GENMASK(12, 0)
166 #define SPI_NFI_PROG_LOAD_BYTE_NUM		GENMASK(28, 16)
167 
168 #define REG_SPI_NFI_SNF_STA_CTL1		0x0550
169 #define SPI_NFI_READ_FROM_CACHE_DONE		BIT(25)
170 #define SPI_NFI_LOAD_TO_CACHE_DONE		BIT(26)
171 
172 #define REG_SPI_NFI_SNF_STA_CTL2		0x0554
173 
174 #define REG_SPI_NFI_SNF_NFI_CNFG		0x055c
175 #define SPI_NFI_SPI_MODE			BIT(0)
176 
177 /* SPI NAND Protocol OP */
178 #define SPI_NAND_OP_GET_FEATURE			0x0f
179 #define SPI_NAND_OP_SET_FEATURE			0x1f
180 #define SPI_NAND_OP_PAGE_READ			0x13
181 #define SPI_NAND_OP_READ_FROM_CACHE_SINGLE	0x03
182 #define SPI_NAND_OP_READ_FROM_CACHE_SINGLE_FAST	0x0b
183 #define SPI_NAND_OP_READ_FROM_CACHE_DUAL	0x3b
184 #define SPI_NAND_OP_READ_FROM_CACHE_DUALIO	0xbb
185 #define SPI_NAND_OP_READ_FROM_CACHE_QUAD	0x6b
186 #define SPI_NAND_OP_READ_FROM_CACHE_QUADIO	0xeb
187 #define SPI_NAND_OP_WRITE_ENABLE		0x06
188 #define SPI_NAND_OP_WRITE_DISABLE		0x04
189 #define SPI_NAND_OP_PROGRAM_LOAD_SINGLE		0x02
190 #define SPI_NAND_OP_PROGRAM_LOAD_QUAD		0x32
191 #define SPI_NAND_OP_PROGRAM_LOAD_RAMDOM_SINGLE	0x84
192 #define SPI_NAND_OP_PROGRAM_LOAD_RAMDON_QUAD	0x34
193 #define SPI_NAND_OP_PROGRAM_EXECUTE		0x10
194 #define SPI_NAND_OP_READ_ID			0x9f
195 #define SPI_NAND_OP_BLOCK_ERASE			0xd8
196 #define SPI_NAND_OP_RESET			0xff
197 #define SPI_NAND_OP_DIE_SELECT			0xc2
198 
199 /* SNAND FIFO commands */
200 #define SNAND_FIFO_TX_BUSWIDTH_SINGLE		0x08
201 #define SNAND_FIFO_TX_BUSWIDTH_DUAL		0x09
202 #define SNAND_FIFO_TX_BUSWIDTH_QUAD		0x0a
203 #define SNAND_FIFO_RX_BUSWIDTH_SINGLE		0x0c
204 #define SNAND_FIFO_RX_BUSWIDTH_DUAL		0x0e
205 #define SNAND_FIFO_RX_BUSWIDTH_QUAD		0x0f
206 
207 #define SPI_NAND_CACHE_SIZE			(SZ_4K + SZ_256)
208 #define SPI_MAX_TRANSFER_SIZE			511
209 
210 enum airoha_snand_mode {
211 	SPI_MODE_AUTO,
212 	SPI_MODE_MANUAL,
213 	SPI_MODE_DMA,
214 };
215 
216 enum airoha_snand_cs {
217 	SPI_CHIP_SEL_HIGH,
218 	SPI_CHIP_SEL_LOW,
219 };
220 
221 struct airoha_snand_ctrl {
222 	struct device *dev;
223 	struct regmap *regmap_ctrl;
224 	struct regmap *regmap_nfi;
225 	struct clk *spi_clk;
226 };
227 
airoha_snand_set_fifo_op(struct airoha_snand_ctrl * as_ctrl,u8 op_cmd,int op_len)228 static int airoha_snand_set_fifo_op(struct airoha_snand_ctrl *as_ctrl,
229 				    u8 op_cmd, int op_len)
230 {
231 	int err;
232 	u32 val;
233 
234 	err = regmap_write(as_ctrl->regmap_ctrl, REG_SPI_CTRL_OPFIFO_WDATA,
235 			   FIELD_PREP(SPI_CTRL_OPFIFO_LEN, op_len) |
236 			   FIELD_PREP(SPI_CTRL_OPFIFO_OP, op_cmd));
237 	if (err)
238 		return err;
239 
240 	err = regmap_read_poll_timeout(as_ctrl->regmap_ctrl,
241 				       REG_SPI_CTRL_OPFIFO_FULL,
242 				       val, !(val & SPI_CTRL_OPFIFO_FULL),
243 				       0, 250 * USEC_PER_MSEC);
244 	if (err)
245 		return err;
246 
247 	err = regmap_write(as_ctrl->regmap_ctrl, REG_SPI_CTRL_OPFIFO_WR,
248 			   SPI_CTRL_OPFIFO_WR);
249 	if (err)
250 		return err;
251 
252 	return regmap_read_poll_timeout(as_ctrl->regmap_ctrl,
253 					REG_SPI_CTRL_OPFIFO_EMPTY,
254 					val, (val & SPI_CTRL_OPFIFO_EMPTY),
255 					0, 250 * USEC_PER_MSEC);
256 }
257 
airoha_snand_set_cs(struct airoha_snand_ctrl * as_ctrl,u8 cs)258 static int airoha_snand_set_cs(struct airoha_snand_ctrl *as_ctrl, u8 cs)
259 {
260 	return airoha_snand_set_fifo_op(as_ctrl, cs, sizeof(cs));
261 }
262 
airoha_snand_write_data_to_fifo(struct airoha_snand_ctrl * as_ctrl,const u8 * data,int len)263 static int airoha_snand_write_data_to_fifo(struct airoha_snand_ctrl *as_ctrl,
264 					   const u8 *data, int len)
265 {
266 	int i;
267 
268 	for (i = 0; i < len; i++) {
269 		int err;
270 		u32 val;
271 
272 		/* 1. Wait until dfifo is not full */
273 		err = regmap_read_poll_timeout(as_ctrl->regmap_ctrl,
274 					       REG_SPI_CTRL_DFIFO_FULL, val,
275 					       !(val & SPI_CTRL_DFIFO_FULL),
276 					       0, 250 * USEC_PER_MSEC);
277 		if (err)
278 			return err;
279 
280 		/* 2. Write data to register DFIFO_WDATA */
281 		err = regmap_write(as_ctrl->regmap_ctrl,
282 				   REG_SPI_CTRL_DFIFO_WDATA,
283 				   FIELD_PREP(SPI_CTRL_DFIFO_WDATA, data[i]));
284 		if (err)
285 			return err;
286 
287 		/* 3. Wait until dfifo is not full */
288 		err = regmap_read_poll_timeout(as_ctrl->regmap_ctrl,
289 					       REG_SPI_CTRL_DFIFO_FULL, val,
290 					       !(val & SPI_CTRL_DFIFO_FULL),
291 					       0, 250 * USEC_PER_MSEC);
292 		if (err)
293 			return err;
294 	}
295 
296 	return 0;
297 }
298 
airoha_snand_read_data_from_fifo(struct airoha_snand_ctrl * as_ctrl,u8 * ptr,int len)299 static int airoha_snand_read_data_from_fifo(struct airoha_snand_ctrl *as_ctrl,
300 					    u8 *ptr, int len)
301 {
302 	int i;
303 
304 	for (i = 0; i < len; i++) {
305 		int err;
306 		u32 val;
307 
308 		/* 1. wait until dfifo is not empty */
309 		err = regmap_read_poll_timeout(as_ctrl->regmap_ctrl,
310 					       REG_SPI_CTRL_DFIFO_EMPTY, val,
311 					       !(val & SPI_CTRL_DFIFO_EMPTY),
312 					       0, 250 * USEC_PER_MSEC);
313 		if (err)
314 			return err;
315 
316 		/* 2. read from dfifo to register DFIFO_RDATA */
317 		err = regmap_read(as_ctrl->regmap_ctrl,
318 				  REG_SPI_CTRL_DFIFO_RDATA, &val);
319 		if (err)
320 			return err;
321 
322 		ptr[i] = FIELD_GET(SPI_CTRL_DFIFO_RDATA, val);
323 		/* 3. enable register DFIFO_RD to read next byte */
324 		err = regmap_write(as_ctrl->regmap_ctrl,
325 				   REG_SPI_CTRL_DFIFO_RD, SPI_CTRL_DFIFO_RD);
326 		if (err)
327 			return err;
328 	}
329 
330 	return 0;
331 }
332 
airoha_snand_set_mode(struct airoha_snand_ctrl * as_ctrl,enum airoha_snand_mode mode)333 static int airoha_snand_set_mode(struct airoha_snand_ctrl *as_ctrl,
334 				 enum airoha_snand_mode mode)
335 {
336 	int err;
337 
338 	switch (mode) {
339 	case SPI_MODE_MANUAL: {
340 		u32 val;
341 
342 		err = regmap_write(as_ctrl->regmap_ctrl,
343 				   REG_SPI_CTRL_NFI2SPI_EN, 0);
344 		if (err)
345 			return err;
346 
347 		err = regmap_write(as_ctrl->regmap_ctrl,
348 				   REG_SPI_CTRL_READ_IDLE_EN, 0);
349 		if (err)
350 			return err;
351 
352 		err = regmap_read_poll_timeout(as_ctrl->regmap_ctrl,
353 					       REG_SPI_CTRL_RDCTL_FSM, val,
354 					       !(val & SPI_CTRL_RDCTL_FSM),
355 					       0, 250 * USEC_PER_MSEC);
356 		if (err)
357 			return err;
358 
359 		err = regmap_write(as_ctrl->regmap_ctrl,
360 				   REG_SPI_CTRL_MTX_MODE_TOG, 9);
361 		if (err)
362 			return err;
363 
364 		err = regmap_write(as_ctrl->regmap_ctrl,
365 				   REG_SPI_CTRL_MANUAL_EN, SPI_CTRL_MANUAL_EN);
366 		if (err)
367 			return err;
368 		break;
369 	}
370 	case SPI_MODE_DMA:
371 		err = regmap_write(as_ctrl->regmap_ctrl,
372 				   REG_SPI_CTRL_NFI2SPI_EN,
373 				   SPI_CTRL_MANUAL_EN);
374 		if (err < 0)
375 			return err;
376 
377 		err = regmap_write(as_ctrl->regmap_ctrl,
378 				   REG_SPI_CTRL_MTX_MODE_TOG, 0x0);
379 		if (err < 0)
380 			return err;
381 
382 		err = regmap_write(as_ctrl->regmap_ctrl,
383 				   REG_SPI_CTRL_MANUAL_EN, 0x0);
384 		if (err < 0)
385 			return err;
386 		break;
387 	case SPI_MODE_AUTO:
388 	default:
389 		break;
390 	}
391 
392 	return regmap_write(as_ctrl->regmap_ctrl, REG_SPI_CTRL_DUMMY, 0);
393 }
394 
airoha_snand_write_data(struct airoha_snand_ctrl * as_ctrl,const u8 * data,int len,int buswidth)395 static int airoha_snand_write_data(struct airoha_snand_ctrl *as_ctrl,
396 				   const u8 *data, int len, int buswidth)
397 {
398 	int i, data_len;
399 	u8 cmd;
400 
401 	switch (buswidth) {
402 	case 0:
403 	case 1:
404 		cmd = SNAND_FIFO_TX_BUSWIDTH_SINGLE;
405 		break;
406 	case 2:
407 		cmd = SNAND_FIFO_TX_BUSWIDTH_DUAL;
408 		break;
409 	case 4:
410 		cmd = SNAND_FIFO_TX_BUSWIDTH_QUAD;
411 		break;
412 	default:
413 		return -EINVAL;
414 	}
415 
416 	for (i = 0; i < len; i += data_len) {
417 		int err;
418 
419 		data_len = min(len - i, SPI_MAX_TRANSFER_SIZE);
420 		err = airoha_snand_set_fifo_op(as_ctrl, cmd, data_len);
421 		if (err)
422 			return err;
423 
424 		err = airoha_snand_write_data_to_fifo(as_ctrl, &data[i],
425 						      data_len);
426 		if (err < 0)
427 			return err;
428 	}
429 
430 	return 0;
431 }
432 
airoha_snand_read_data(struct airoha_snand_ctrl * as_ctrl,u8 * data,int len,int buswidth)433 static int airoha_snand_read_data(struct airoha_snand_ctrl *as_ctrl,
434 				  u8 *data, int len, int buswidth)
435 {
436 	int i, data_len;
437 	u8 cmd;
438 
439 	switch (buswidth) {
440 	case 0:
441 	case 1:
442 		cmd = SNAND_FIFO_RX_BUSWIDTH_SINGLE;
443 		break;
444 	case 2:
445 		cmd = SNAND_FIFO_RX_BUSWIDTH_DUAL;
446 		break;
447 	case 4:
448 		cmd = SNAND_FIFO_RX_BUSWIDTH_QUAD;
449 		break;
450 	default:
451 		return -EINVAL;
452 	}
453 
454 	for (i = 0; i < len; i += data_len) {
455 		int err;
456 
457 		data_len = min(len - i, SPI_MAX_TRANSFER_SIZE);
458 		err = airoha_snand_set_fifo_op(as_ctrl, cmd, data_len);
459 		if (err)
460 			return err;
461 
462 		err = airoha_snand_read_data_from_fifo(as_ctrl, &data[i],
463 						       data_len);
464 		if (err < 0)
465 			return err;
466 	}
467 
468 	return 0;
469 }
470 
airoha_snand_nfi_init(struct airoha_snand_ctrl * as_ctrl)471 static int airoha_snand_nfi_init(struct airoha_snand_ctrl *as_ctrl)
472 {
473 	int err;
474 
475 	/* switch to SNFI mode */
476 	err = regmap_write(as_ctrl->regmap_nfi, REG_SPI_NFI_SNF_NFI_CNFG,
477 			   SPI_NFI_SPI_MODE);
478 	if (err)
479 		return err;
480 
481 	/* Enable DMA */
482 	return regmap_update_bits(as_ctrl->regmap_nfi, REG_SPI_NFI_INTR_EN,
483 				  SPI_NFI_ALL_IRQ_EN, SPI_NFI_AHB_DONE_EN);
484 }
485 
airoha_snand_is_page_ops(const struct spi_mem_op * op)486 static bool airoha_snand_is_page_ops(const struct spi_mem_op *op)
487 {
488 	if (op->addr.nbytes != 2)
489 		return false;
490 
491 	if (op->addr.buswidth != 1 && op->addr.buswidth != 2 &&
492 	    op->addr.buswidth != 4)
493 		return false;
494 
495 	switch (op->data.dir) {
496 	case SPI_MEM_DATA_IN:
497 		if (op->dummy.nbytes * BITS_PER_BYTE / op->dummy.buswidth > 0xf)
498 			return false;
499 
500 		/* quad in / quad out */
501 		if (op->addr.buswidth == 4)
502 			return op->data.buswidth == 4;
503 
504 		if (op->addr.buswidth == 2)
505 			return op->data.buswidth == 2;
506 
507 		/* standard spi */
508 		return op->data.buswidth == 4 || op->data.buswidth == 2 ||
509 		       op->data.buswidth == 1;
510 	case SPI_MEM_DATA_OUT:
511 		return !op->dummy.nbytes && op->addr.buswidth == 1 &&
512 		       (op->data.buswidth == 4 || op->data.buswidth == 1);
513 	default:
514 		return false;
515 	}
516 }
517 
airoha_snand_supports_op(struct spi_mem * mem,const struct spi_mem_op * op)518 static bool airoha_snand_supports_op(struct spi_mem *mem,
519 				     const struct spi_mem_op *op)
520 {
521 	if (!spi_mem_default_supports_op(mem, op))
522 		return false;
523 
524 	if (op->cmd.buswidth != 1)
525 		return false;
526 
527 	if (airoha_snand_is_page_ops(op))
528 		return true;
529 
530 	return (!op->addr.nbytes || op->addr.buswidth == 1) &&
531 	       (!op->dummy.nbytes || op->dummy.buswidth == 1) &&
532 	       (!op->data.nbytes || op->data.buswidth == 1);
533 }
534 
airoha_snand_dirmap_create(struct spi_mem_dirmap_desc * desc)535 static int airoha_snand_dirmap_create(struct spi_mem_dirmap_desc *desc)
536 {
537 	u8 *txrx_buf = spi_get_ctldata(desc->mem->spi);
538 
539 	if (!txrx_buf)
540 		return -EINVAL;
541 
542 	if (desc->info.offset + desc->info.length > U32_MAX)
543 		return -EINVAL;
544 
545 	/* continuous reading is not supported */
546 	if (desc->info.length > SPI_NAND_CACHE_SIZE)
547 		return -E2BIG;
548 
549 	if (!airoha_snand_supports_op(desc->mem, &desc->info.op_tmpl))
550 		return -EOPNOTSUPP;
551 
552 	return 0;
553 }
554 
airoha_snand_dirmap_read(struct spi_mem_dirmap_desc * desc,u64 offs,size_t len,void * buf)555 static ssize_t airoha_snand_dirmap_read(struct spi_mem_dirmap_desc *desc,
556 					u64 offs, size_t len, void *buf)
557 {
558 	struct spi_device *spi = desc->mem->spi;
559 	struct airoha_snand_ctrl *as_ctrl;
560 	u8 *txrx_buf = spi_get_ctldata(spi);
561 	dma_addr_t dma_addr;
562 	u32 val, rd_mode, opcode;
563 	size_t bytes;
564 	int err;
565 
566 	as_ctrl = spi_controller_get_devdata(spi->controller);
567 
568 	/* minimum oob size is 64 */
569 	bytes = round_up(offs + len, 64);
570 
571 	/*
572 	 * DUALIO and QUADIO opcodes are not supported by the spi controller,
573 	 * replace them with supported opcodes.
574 	 */
575 	opcode = desc->info.op_tmpl.cmd.opcode;
576 	switch (opcode) {
577 	case SPI_NAND_OP_READ_FROM_CACHE_SINGLE:
578 	case SPI_NAND_OP_READ_FROM_CACHE_SINGLE_FAST:
579 		rd_mode = 0;
580 		break;
581 	case SPI_NAND_OP_READ_FROM_CACHE_DUAL:
582 	case SPI_NAND_OP_READ_FROM_CACHE_DUALIO:
583 		opcode = SPI_NAND_OP_READ_FROM_CACHE_DUAL;
584 		rd_mode = 1;
585 		break;
586 	case SPI_NAND_OP_READ_FROM_CACHE_QUAD:
587 	case SPI_NAND_OP_READ_FROM_CACHE_QUADIO:
588 		opcode = SPI_NAND_OP_READ_FROM_CACHE_QUAD;
589 		rd_mode = 2;
590 		break;
591 	default:
592 		/* unknown opcode */
593 		return -EOPNOTSUPP;
594 	}
595 
596 	err = airoha_snand_set_mode(as_ctrl, SPI_MODE_DMA);
597 	if (err < 0)
598 		return err;
599 
600 	/* NFI reset */
601 	err = regmap_write(as_ctrl->regmap_nfi, REG_SPI_NFI_CON,
602 			   SPI_NFI_FIFO_FLUSH | SPI_NFI_RST);
603 	if (err)
604 		goto error_dma_mode_off;
605 
606 	/* NFI configure:
607 	 *   - No AutoFDM (custom sector size (SECCUS) register will be used)
608 	 *   - No SoC's hardware ECC (flash internal ECC will be used)
609 	 *   - Use burst mode (faster, but requires 16 byte alignment for addresses)
610 	 *   - Setup for reading (SPI_NFI_READ_MODE)
611 	 *   - Setup reading command: FIELD_PREP(SPI_NFI_OPMODE, 6)
612 	 *   - Use DMA instead of PIO for data reading
613 	 */
614 	err = regmap_update_bits(as_ctrl->regmap_nfi, REG_SPI_NFI_CNFG,
615 				 SPI_NFI_DMA_MODE |
616 				 SPI_NFI_READ_MODE |
617 				 SPI_NFI_DMA_BURST_EN |
618 				 SPI_NFI_HW_ECC_EN |
619 				 SPI_NFI_AUTO_FDM_EN |
620 				 SPI_NFI_OPMODE,
621 				 SPI_NFI_DMA_MODE |
622 				 SPI_NFI_READ_MODE |
623 				 SPI_NFI_DMA_BURST_EN |
624 				 FIELD_PREP(SPI_NFI_OPMODE, 6));
625 	if (err)
626 		goto error_dma_mode_off;
627 
628 	/* Set number of sector will be read */
629 	err = regmap_update_bits(as_ctrl->regmap_nfi, REG_SPI_NFI_CON,
630 				 SPI_NFI_SEC_NUM,
631 				 FIELD_PREP(SPI_NFI_SEC_NUM, 1));
632 	if (err)
633 		goto error_dma_mode_off;
634 
635 	/* Set custom sector size */
636 	err = regmap_update_bits(as_ctrl->regmap_nfi, REG_SPI_NFI_SECCUS_SIZE,
637 				 SPI_NFI_CUS_SEC_SIZE |
638 				 SPI_NFI_CUS_SEC_SIZE_EN,
639 				 FIELD_PREP(SPI_NFI_CUS_SEC_SIZE, bytes) |
640 				 SPI_NFI_CUS_SEC_SIZE_EN);
641 	if (err)
642 		goto error_dma_mode_off;
643 
644 	dma_addr = dma_map_single(as_ctrl->dev, txrx_buf, SPI_NAND_CACHE_SIZE,
645 				  DMA_FROM_DEVICE);
646 	err = dma_mapping_error(as_ctrl->dev, dma_addr);
647 	if (err)
648 		goto error_dma_mode_off;
649 
650 	/* set dma addr */
651 	err = regmap_write(as_ctrl->regmap_nfi, REG_SPI_NFI_STRADDR,
652 			   dma_addr);
653 	if (err)
654 		goto error_dma_unmap;
655 
656 	/*
657 	 * Setup transfer length
658 	 * ---------------------
659 	 * The following rule MUST be met:
660 	 *     transfer_length =
661 	 *        = NFI_SNF_MISC_CTL2.read_data_byte_number =
662 	 *        = NFI_CON.sector_number * NFI_SECCUS.custom_sector_size
663 	 */
664 	err = regmap_update_bits(as_ctrl->regmap_nfi,
665 				 REG_SPI_NFI_SNF_MISC_CTL2,
666 				 SPI_NFI_READ_DATA_BYTE_NUM,
667 				 FIELD_PREP(SPI_NFI_READ_DATA_BYTE_NUM, bytes));
668 	if (err)
669 		goto error_dma_unmap;
670 
671 	/* set read command */
672 	err = regmap_write(as_ctrl->regmap_nfi, REG_SPI_NFI_RD_CTL2,
673 			   FIELD_PREP(SPI_NFI_DATA_READ_CMD, opcode));
674 	if (err)
675 		goto error_dma_unmap;
676 
677 	/* set read mode */
678 	err = regmap_write(as_ctrl->regmap_nfi, REG_SPI_NFI_SNF_MISC_CTL,
679 			   FIELD_PREP(SPI_NFI_DATA_READ_WR_MODE, rd_mode));
680 	if (err)
681 		goto error_dma_unmap;
682 
683 	/* set read addr: zero page offset + descriptor read offset */
684 	err = regmap_write(as_ctrl->regmap_nfi, REG_SPI_NFI_RD_CTL3,
685 			   desc->info.offset);
686 	if (err)
687 		goto error_dma_unmap;
688 
689 	err = regmap_write(as_ctrl->regmap_nfi, REG_SPI_NFI_CMD, 0x0);
690 	if (err)
691 		goto error_dma_unmap;
692 
693 	/* trigger dma reading */
694 	err = regmap_clear_bits(as_ctrl->regmap_nfi, REG_SPI_NFI_CON,
695 				SPI_NFI_RD_TRIG);
696 	if (err)
697 		goto error_dma_unmap;
698 
699 	err = regmap_set_bits(as_ctrl->regmap_nfi, REG_SPI_NFI_CON,
700 			      SPI_NFI_RD_TRIG);
701 	if (err)
702 		goto error_dma_unmap;
703 
704 	err = regmap_read_poll_timeout(as_ctrl->regmap_nfi,
705 				       REG_SPI_NFI_SNF_STA_CTL1, val,
706 				       (val & SPI_NFI_READ_FROM_CACHE_DONE),
707 				       0, 1 * USEC_PER_SEC);
708 	if (err)
709 		goto error_dma_unmap;
710 
711 	/*
712 	 * SPI_NFI_READ_FROM_CACHE_DONE bit must be written at the end
713 	 * of dirmap_read operation even if it is already set.
714 	 */
715 	err = regmap_write_bits(as_ctrl->regmap_nfi, REG_SPI_NFI_SNF_STA_CTL1,
716 				SPI_NFI_READ_FROM_CACHE_DONE,
717 				SPI_NFI_READ_FROM_CACHE_DONE);
718 	if (err)
719 		goto error_dma_unmap;
720 
721 	err = regmap_read_poll_timeout(as_ctrl->regmap_nfi, REG_SPI_NFI_INTR,
722 				       val, (val & SPI_NFI_AHB_DONE), 0,
723 				       1 * USEC_PER_SEC);
724 	if (err)
725 		goto error_dma_unmap;
726 
727 	/* DMA read need delay for data ready from controller to DRAM */
728 	udelay(1);
729 
730 	dma_unmap_single(as_ctrl->dev, dma_addr, SPI_NAND_CACHE_SIZE,
731 			 DMA_FROM_DEVICE);
732 	err = airoha_snand_set_mode(as_ctrl, SPI_MODE_MANUAL);
733 	if (err < 0)
734 		return err;
735 
736 	memcpy(buf, txrx_buf + offs, len);
737 
738 	return len;
739 
740 error_dma_unmap:
741 	dma_unmap_single(as_ctrl->dev, dma_addr, SPI_NAND_CACHE_SIZE,
742 			 DMA_FROM_DEVICE);
743 error_dma_mode_off:
744 	airoha_snand_set_mode(as_ctrl, SPI_MODE_MANUAL);
745 	return err;
746 }
747 
airoha_snand_dirmap_write(struct spi_mem_dirmap_desc * desc,u64 offs,size_t len,const void * buf)748 static ssize_t airoha_snand_dirmap_write(struct spi_mem_dirmap_desc *desc,
749 					 u64 offs, size_t len, const void *buf)
750 {
751 	struct spi_device *spi = desc->mem->spi;
752 	u8 *txrx_buf = spi_get_ctldata(spi);
753 	struct airoha_snand_ctrl *as_ctrl;
754 	dma_addr_t dma_addr;
755 	u32 wr_mode, val, opcode;
756 	size_t bytes;
757 	int err;
758 
759 	as_ctrl = spi_controller_get_devdata(spi->controller);
760 
761 	/* minimum oob size is 64 */
762 	bytes = round_up(offs + len, 64);
763 
764 	opcode = desc->info.op_tmpl.cmd.opcode;
765 	switch (opcode) {
766 	case SPI_NAND_OP_PROGRAM_LOAD_SINGLE:
767 	case SPI_NAND_OP_PROGRAM_LOAD_RAMDOM_SINGLE:
768 		wr_mode = 0;
769 		break;
770 	case SPI_NAND_OP_PROGRAM_LOAD_QUAD:
771 	case SPI_NAND_OP_PROGRAM_LOAD_RAMDON_QUAD:
772 		wr_mode = 2;
773 		break;
774 	default:
775 		/* unknown opcode */
776 		return -EOPNOTSUPP;
777 	}
778 
779 	if (offs > 0)
780 		memset(txrx_buf, 0xff, offs);
781 	memcpy(txrx_buf + offs, buf, len);
782 	if (bytes > offs + len)
783 		memset(txrx_buf + offs + len, 0xff, bytes - offs - len);
784 
785 	err = airoha_snand_set_mode(as_ctrl, SPI_MODE_DMA);
786 	if (err < 0)
787 		return err;
788 
789 	/* NFI reset */
790 	err = regmap_write(as_ctrl->regmap_nfi, REG_SPI_NFI_CON,
791 			   SPI_NFI_FIFO_FLUSH | SPI_NFI_RST);
792 	if (err)
793 		goto error_dma_mode_off;
794 
795 	/*
796 	 * NFI configure:
797 	 *   - No AutoFDM (custom sector size (SECCUS) register will be used)
798 	 *   - No SoC's hardware ECC (flash internal ECC will be used)
799 	 *   - Use burst mode (faster, but requires 16 byte alignment for addresses)
800 	 *   - Setup for writing (SPI_NFI_READ_MODE bit is cleared)
801 	 *   - Setup writing command: FIELD_PREP(SPI_NFI_OPMODE, 3)
802 	 *   - Use DMA instead of PIO for data writing
803 	 */
804 	err = regmap_update_bits(as_ctrl->regmap_nfi, REG_SPI_NFI_CNFG,
805 				 SPI_NFI_DMA_MODE |
806 				 SPI_NFI_READ_MODE |
807 				 SPI_NFI_DMA_BURST_EN |
808 				 SPI_NFI_HW_ECC_EN |
809 				 SPI_NFI_AUTO_FDM_EN |
810 				 SPI_NFI_OPMODE,
811 				 SPI_NFI_DMA_MODE |
812 				 SPI_NFI_DMA_BURST_EN |
813 				 FIELD_PREP(SPI_NFI_OPMODE, 3));
814 	if (err)
815 		goto error_dma_mode_off;
816 
817 	/* Set number of sector will be written */
818 	err = regmap_update_bits(as_ctrl->regmap_nfi, REG_SPI_NFI_CON,
819 				 SPI_NFI_SEC_NUM,
820 				 FIELD_PREP(SPI_NFI_SEC_NUM, 1));
821 	if (err)
822 		goto error_dma_mode_off;
823 
824 	/* Set custom sector size */
825 	err = regmap_update_bits(as_ctrl->regmap_nfi, REG_SPI_NFI_SECCUS_SIZE,
826 				 SPI_NFI_CUS_SEC_SIZE |
827 				 SPI_NFI_CUS_SEC_SIZE_EN,
828 				 FIELD_PREP(SPI_NFI_CUS_SEC_SIZE, bytes) |
829 				 SPI_NFI_CUS_SEC_SIZE_EN);
830 	if (err)
831 		goto error_dma_mode_off;
832 
833 	dma_addr = dma_map_single(as_ctrl->dev, txrx_buf, SPI_NAND_CACHE_SIZE,
834 				  DMA_TO_DEVICE);
835 	err = dma_mapping_error(as_ctrl->dev, dma_addr);
836 	if (err)
837 		goto error_dma_mode_off;
838 
839 	/* set dma addr */
840 	err = regmap_write(as_ctrl->regmap_nfi, REG_SPI_NFI_STRADDR,
841 			   dma_addr);
842 	if (err)
843 		goto error_dma_unmap;
844 
845 	/*
846 	 * Setup transfer length
847 	 * ---------------------
848 	 * The following rule MUST be met:
849 	 *     transfer_length =
850 	 *        = NFI_SNF_MISC_CTL2.write_data_byte_number =
851 	 *        = NFI_CON.sector_number * NFI_SECCUS.custom_sector_size
852 	 */
853 	err = regmap_update_bits(as_ctrl->regmap_nfi,
854 				 REG_SPI_NFI_SNF_MISC_CTL2,
855 				 SPI_NFI_PROG_LOAD_BYTE_NUM,
856 				 FIELD_PREP(SPI_NFI_PROG_LOAD_BYTE_NUM, bytes));
857 	if (err)
858 		goto error_dma_unmap;
859 
860 	/* set write command */
861 	err = regmap_write(as_ctrl->regmap_nfi, REG_SPI_NFI_PG_CTL1,
862 			   FIELD_PREP(SPI_NFI_PG_LOAD_CMD, opcode));
863 	if (err)
864 		goto error_dma_unmap;
865 
866 	/* set write mode */
867 	err = regmap_write(as_ctrl->regmap_nfi, REG_SPI_NFI_SNF_MISC_CTL,
868 			   FIELD_PREP(SPI_NFI_DATA_READ_WR_MODE, wr_mode));
869 	if (err)
870 		goto error_dma_unmap;
871 
872 	/* set write addr: zero page offset + descriptor write offset */
873 	err = regmap_write(as_ctrl->regmap_nfi, REG_SPI_NFI_PG_CTL2,
874 			   desc->info.offset);
875 	if (err)
876 		goto error_dma_unmap;
877 
878 	err = regmap_write(as_ctrl->regmap_nfi, REG_SPI_NFI_CMD, 0x80);
879 	if (err)
880 		goto error_dma_unmap;
881 
882 	/* trigger dma writing */
883 	err = regmap_clear_bits(as_ctrl->regmap_nfi, REG_SPI_NFI_CON,
884 				SPI_NFI_WR_TRIG);
885 	if (err)
886 		goto error_dma_unmap;
887 
888 	err = regmap_set_bits(as_ctrl->regmap_nfi, REG_SPI_NFI_CON,
889 			      SPI_NFI_WR_TRIG);
890 	if (err)
891 		goto error_dma_unmap;
892 
893 	err = regmap_read_poll_timeout(as_ctrl->regmap_nfi, REG_SPI_NFI_INTR,
894 				       val, (val & SPI_NFI_AHB_DONE), 0,
895 				       1 * USEC_PER_SEC);
896 	if (err)
897 		goto error_dma_unmap;
898 
899 	err = regmap_read_poll_timeout(as_ctrl->regmap_nfi,
900 				       REG_SPI_NFI_SNF_STA_CTL1, val,
901 				       (val & SPI_NFI_LOAD_TO_CACHE_DONE),
902 				       0, 1 * USEC_PER_SEC);
903 	if (err)
904 		goto error_dma_unmap;
905 
906 	/*
907 	 * SPI_NFI_LOAD_TO_CACHE_DONE bit must be written at the end
908 	 * of dirmap_write operation even if it is already set.
909 	 */
910 	err = regmap_write_bits(as_ctrl->regmap_nfi, REG_SPI_NFI_SNF_STA_CTL1,
911 				SPI_NFI_LOAD_TO_CACHE_DONE,
912 				SPI_NFI_LOAD_TO_CACHE_DONE);
913 	if (err)
914 		goto error_dma_unmap;
915 
916 	dma_unmap_single(as_ctrl->dev, dma_addr, SPI_NAND_CACHE_SIZE,
917 			 DMA_TO_DEVICE);
918 	err = airoha_snand_set_mode(as_ctrl, SPI_MODE_MANUAL);
919 	if (err < 0)
920 		return err;
921 
922 	return len;
923 
924 error_dma_unmap:
925 	dma_unmap_single(as_ctrl->dev, dma_addr, SPI_NAND_CACHE_SIZE,
926 			 DMA_TO_DEVICE);
927 error_dma_mode_off:
928 	airoha_snand_set_mode(as_ctrl, SPI_MODE_MANUAL);
929 	return err;
930 }
931 
airoha_snand_exec_op(struct spi_mem * mem,const struct spi_mem_op * op)932 static int airoha_snand_exec_op(struct spi_mem *mem,
933 				const struct spi_mem_op *op)
934 {
935 	struct airoha_snand_ctrl *as_ctrl;
936 	int op_len, addr_len, dummy_len;
937 	u8 buf[20], *data;
938 	int i, err;
939 
940 	as_ctrl = spi_controller_get_devdata(mem->spi->controller);
941 
942 	op_len = op->cmd.nbytes;
943 	addr_len = op->addr.nbytes;
944 	dummy_len = op->dummy.nbytes;
945 
946 	if (op_len + dummy_len + addr_len > sizeof(buf))
947 		return -EIO;
948 
949 	data = buf;
950 	for (i = 0; i < op_len; i++)
951 		*data++ = op->cmd.opcode >> (8 * (op_len - i - 1));
952 	for (i = 0; i < addr_len; i++)
953 		*data++ = op->addr.val >> (8 * (addr_len - i - 1));
954 	for (i = 0; i < dummy_len; i++)
955 		*data++ = 0xff;
956 
957 	/* switch to manual mode */
958 	err = airoha_snand_set_mode(as_ctrl, SPI_MODE_MANUAL);
959 	if (err < 0)
960 		return err;
961 
962 	err = airoha_snand_set_cs(as_ctrl, SPI_CHIP_SEL_LOW);
963 	if (err < 0)
964 		return err;
965 
966 	/* opcode */
967 	data = buf;
968 	err = airoha_snand_write_data(as_ctrl, data, op_len,
969 				      op->cmd.buswidth);
970 	if (err)
971 		return err;
972 
973 	/* addr part */
974 	data += op_len;
975 	if (addr_len) {
976 		err = airoha_snand_write_data(as_ctrl, data, addr_len,
977 					      op->addr.buswidth);
978 		if (err)
979 			return err;
980 	}
981 
982 	/* dummy */
983 	data += addr_len;
984 	if (dummy_len) {
985 		err = airoha_snand_write_data(as_ctrl, data, dummy_len,
986 					      op->dummy.buswidth);
987 		if (err)
988 			return err;
989 	}
990 
991 	/* data */
992 	if (op->data.nbytes) {
993 		if (op->data.dir == SPI_MEM_DATA_IN)
994 			err = airoha_snand_read_data(as_ctrl, op->data.buf.in,
995 						     op->data.nbytes,
996 						     op->data.buswidth);
997 		else
998 			err = airoha_snand_write_data(as_ctrl, op->data.buf.out,
999 						      op->data.nbytes,
1000 						      op->data.buswidth);
1001 		if (err)
1002 			return err;
1003 	}
1004 
1005 	return airoha_snand_set_cs(as_ctrl, SPI_CHIP_SEL_HIGH);
1006 }
1007 
1008 static const struct spi_controller_mem_ops airoha_snand_mem_ops = {
1009 	.supports_op = airoha_snand_supports_op,
1010 	.exec_op = airoha_snand_exec_op,
1011 	.dirmap_create = airoha_snand_dirmap_create,
1012 	.dirmap_read = airoha_snand_dirmap_read,
1013 	.dirmap_write = airoha_snand_dirmap_write,
1014 };
1015 
1016 static const struct spi_controller_mem_ops airoha_snand_nodma_mem_ops = {
1017 	.supports_op = airoha_snand_supports_op,
1018 	.exec_op = airoha_snand_exec_op,
1019 };
1020 
airoha_snand_setup(struct spi_device * spi)1021 static int airoha_snand_setup(struct spi_device *spi)
1022 {
1023 	struct airoha_snand_ctrl *as_ctrl;
1024 	u8 *txrx_buf;
1025 
1026 	/* prepare device buffer */
1027 	as_ctrl = spi_controller_get_devdata(spi->controller);
1028 	txrx_buf = devm_kzalloc(as_ctrl->dev, SPI_NAND_CACHE_SIZE,
1029 				GFP_KERNEL);
1030 	if (!txrx_buf)
1031 		return -ENOMEM;
1032 
1033 	spi_set_ctldata(spi, txrx_buf);
1034 
1035 	return 0;
1036 }
1037 
1038 static const struct regmap_config spi_ctrl_regmap_config = {
1039 	.name		= "ctrl",
1040 	.reg_bits	= 32,
1041 	.val_bits	= 32,
1042 	.reg_stride	= 4,
1043 	.max_register	= REG_SPI_CTRL_NFI2SPI_EN,
1044 };
1045 
1046 static const struct regmap_config spi_nfi_regmap_config = {
1047 	.name		= "nfi",
1048 	.reg_bits	= 32,
1049 	.val_bits	= 32,
1050 	.reg_stride	= 4,
1051 	.max_register	= REG_SPI_NFI_SNF_NFI_CNFG,
1052 };
1053 
1054 static const struct of_device_id airoha_snand_ids[] = {
1055 	{ .compatible	= "airoha,en7581-snand" },
1056 	{ /* sentinel */ }
1057 };
1058 MODULE_DEVICE_TABLE(of, airoha_snand_ids);
1059 
airoha_snand_probe(struct platform_device * pdev)1060 static int airoha_snand_probe(struct platform_device *pdev)
1061 {
1062 	struct airoha_snand_ctrl *as_ctrl;
1063 	struct device *dev = &pdev->dev;
1064 	struct spi_controller *ctrl;
1065 	bool dma_enable = true;
1066 	void __iomem *base;
1067 	u32 sfc_strap;
1068 	int err;
1069 
1070 	ctrl = devm_spi_alloc_host(dev, sizeof(*as_ctrl));
1071 	if (!ctrl)
1072 		return -ENOMEM;
1073 
1074 	as_ctrl = spi_controller_get_devdata(ctrl);
1075 	as_ctrl->dev = dev;
1076 
1077 	base = devm_platform_ioremap_resource(pdev, 0);
1078 	if (IS_ERR(base))
1079 		return PTR_ERR(base);
1080 
1081 	as_ctrl->regmap_ctrl = devm_regmap_init_mmio(dev, base,
1082 						     &spi_ctrl_regmap_config);
1083 	if (IS_ERR(as_ctrl->regmap_ctrl))
1084 		return dev_err_probe(dev, PTR_ERR(as_ctrl->regmap_ctrl),
1085 				     "failed to init spi ctrl regmap\n");
1086 
1087 	base = devm_platform_ioremap_resource(pdev, 1);
1088 	if (IS_ERR(base))
1089 		return PTR_ERR(base);
1090 
1091 	as_ctrl->regmap_nfi = devm_regmap_init_mmio(dev, base,
1092 						    &spi_nfi_regmap_config);
1093 	if (IS_ERR(as_ctrl->regmap_nfi))
1094 		return dev_err_probe(dev, PTR_ERR(as_ctrl->regmap_nfi),
1095 				     "failed to init spi nfi regmap\n");
1096 
1097 	as_ctrl->spi_clk = devm_clk_get_enabled(dev, "spi");
1098 	if (IS_ERR(as_ctrl->spi_clk))
1099 		return dev_err_probe(dev, PTR_ERR(as_ctrl->spi_clk),
1100 				     "unable to get spi clk\n");
1101 
1102 	if (device_is_compatible(dev, "airoha,en7523-snand")) {
1103 		err = regmap_read(as_ctrl->regmap_ctrl,
1104 				  REG_SPI_CTRL_SFC_STRAP, &sfc_strap);
1105 		if (err)
1106 			return err;
1107 
1108 		if (!(sfc_strap & 0x04)) {
1109 			dma_enable = false;
1110 			dev_warn(dev, "Detected booting in RESERVED mode (UART_TXD was short to GND).\n");
1111 			dev_warn(dev, "This mode is known for incorrect DMA reading of some flashes.\n");
1112 			dev_warn(dev, "Much slower PIO mode will be used to prevent flash data damage.\n");
1113 			dev_warn(dev, "Unplug UART cable and power cycle board to get full performance.\n");
1114 		}
1115 	}
1116 
1117 	err = dma_set_mask(as_ctrl->dev, DMA_BIT_MASK(32));
1118 	if (err)
1119 		return err;
1120 
1121 	ctrl->num_chipselect = 2;
1122 	ctrl->mem_ops = dma_enable ? &airoha_snand_mem_ops
1123 				   : &airoha_snand_nodma_mem_ops;
1124 	ctrl->bits_per_word_mask = SPI_BPW_MASK(8);
1125 	ctrl->mode_bits = SPI_RX_DUAL;
1126 	ctrl->setup = airoha_snand_setup;
1127 	device_set_node(&ctrl->dev, dev_fwnode(dev));
1128 
1129 	err = airoha_snand_nfi_init(as_ctrl);
1130 	if (err)
1131 		return err;
1132 
1133 	return devm_spi_register_controller(dev, ctrl);
1134 }
1135 
1136 static struct platform_driver airoha_snand_driver = {
1137 	.driver = {
1138 		.name = "airoha-spi",
1139 		.of_match_table = airoha_snand_ids,
1140 	},
1141 	.probe = airoha_snand_probe,
1142 };
1143 module_platform_driver(airoha_snand_driver);
1144 
1145 MODULE_DESCRIPTION("Airoha SPI-NAND Flash Controller Driver");
1146 MODULE_AUTHOR("Lorenzo Bianconi <lorenzo@kernel.org>");
1147 MODULE_AUTHOR("Ray Liu <ray.liu@airoha.com>");
1148 MODULE_LICENSE("GPL");
1149