xref: /linux/drivers/spi/spi-tegra20-sflash.c (revision 06d07429858317ded2db7986113a9e0129cd599b)
1 // SPDX-License-Identifier: GPL-2.0-only
2 /*
3  * SPI driver for Nvidia's Tegra20 Serial Flash Controller.
4  *
5  * Copyright (c) 2012, NVIDIA CORPORATION.  All rights reserved.
6  *
7  * Author: Laxman Dewangan <ldewangan@nvidia.com>
8  */
9 
10 #include <linux/clk.h>
11 #include <linux/completion.h>
12 #include <linux/delay.h>
13 #include <linux/err.h>
14 #include <linux/interrupt.h>
15 #include <linux/io.h>
16 #include <linux/kernel.h>
17 #include <linux/kthread.h>
18 #include <linux/module.h>
19 #include <linux/platform_device.h>
20 #include <linux/pm_runtime.h>
21 #include <linux/of.h>
22 #include <linux/of_device.h>
23 #include <linux/reset.h>
24 #include <linux/spi/spi.h>
25 
26 #define SPI_COMMAND				0x000
27 #define SPI_GO					BIT(30)
28 #define SPI_M_S					BIT(28)
29 #define SPI_ACTIVE_SCLK_MASK			(0x3 << 26)
30 #define SPI_ACTIVE_SCLK_DRIVE_LOW		(0 << 26)
31 #define SPI_ACTIVE_SCLK_DRIVE_HIGH		(1 << 26)
32 #define SPI_ACTIVE_SCLK_PULL_LOW		(2 << 26)
33 #define SPI_ACTIVE_SCLK_PULL_HIGH		(3 << 26)
34 
35 #define SPI_CK_SDA_FALLING			(1 << 21)
36 #define SPI_CK_SDA_RISING			(0 << 21)
37 #define SPI_CK_SDA_MASK				(1 << 21)
38 #define SPI_ACTIVE_SDA				(0x3 << 18)
39 #define SPI_ACTIVE_SDA_DRIVE_LOW		(0 << 18)
40 #define SPI_ACTIVE_SDA_DRIVE_HIGH		(1 << 18)
41 #define SPI_ACTIVE_SDA_PULL_LOW			(2 << 18)
42 #define SPI_ACTIVE_SDA_PULL_HIGH		(3 << 18)
43 
44 #define SPI_CS_POL_INVERT			BIT(16)
45 #define SPI_TX_EN				BIT(15)
46 #define SPI_RX_EN				BIT(14)
47 #define SPI_CS_VAL_HIGH				BIT(13)
48 #define SPI_CS_VAL_LOW				0x0
49 #define SPI_CS_SW				BIT(12)
50 #define SPI_CS_HW				0x0
51 #define SPI_CS_DELAY_MASK			(7 << 9)
52 #define SPI_CS3_EN				BIT(8)
53 #define SPI_CS2_EN				BIT(7)
54 #define SPI_CS1_EN				BIT(6)
55 #define SPI_CS0_EN				BIT(5)
56 
57 #define SPI_CS_MASK			(SPI_CS3_EN | SPI_CS2_EN |	\
58 					SPI_CS1_EN | SPI_CS0_EN)
59 #define SPI_BIT_LENGTH(x)		(((x) & 0x1f) << 0)
60 
61 #define SPI_MODES			(SPI_ACTIVE_SCLK_MASK | SPI_CK_SDA_MASK)
62 
63 #define SPI_STATUS			0x004
64 #define SPI_BSY				BIT(31)
65 #define SPI_RDY				BIT(30)
66 #define SPI_TXF_FLUSH			BIT(29)
67 #define SPI_RXF_FLUSH			BIT(28)
68 #define SPI_RX_UNF			BIT(27)
69 #define SPI_TX_OVF			BIT(26)
70 #define SPI_RXF_EMPTY			BIT(25)
71 #define SPI_RXF_FULL			BIT(24)
72 #define SPI_TXF_EMPTY			BIT(23)
73 #define SPI_TXF_FULL			BIT(22)
74 #define SPI_BLK_CNT(count)		(((count) & 0xffff) + 1)
75 
76 #define SPI_FIFO_ERROR			(SPI_RX_UNF | SPI_TX_OVF)
77 #define SPI_FIFO_EMPTY			(SPI_TX_EMPTY | SPI_RX_EMPTY)
78 
79 #define SPI_RX_CMP			0x8
80 #define SPI_DMA_CTL			0x0C
81 #define SPI_DMA_EN			BIT(31)
82 #define SPI_IE_RXC			BIT(27)
83 #define SPI_IE_TXC			BIT(26)
84 #define SPI_PACKED			BIT(20)
85 #define SPI_RX_TRIG_MASK		(0x3 << 18)
86 #define SPI_RX_TRIG_1W			(0x0 << 18)
87 #define SPI_RX_TRIG_4W			(0x1 << 18)
88 #define SPI_TX_TRIG_MASK		(0x3 << 16)
89 #define SPI_TX_TRIG_1W			(0x0 << 16)
90 #define SPI_TX_TRIG_4W			(0x1 << 16)
91 #define SPI_DMA_BLK_COUNT(count)	(((count) - 1) & 0xFFFF)
92 
93 #define SPI_TX_FIFO			0x10
94 #define SPI_RX_FIFO			0x20
95 
96 #define DATA_DIR_TX			(1 << 0)
97 #define DATA_DIR_RX			(1 << 1)
98 
99 #define MAX_CHIP_SELECT			4
100 #define SPI_FIFO_DEPTH			4
101 #define SPI_DMA_TIMEOUT               (msecs_to_jiffies(1000))
102 
103 struct tegra_sflash_data {
104 	struct device				*dev;
105 	struct spi_controller			*host;
106 	spinlock_t				lock;
107 
108 	struct clk				*clk;
109 	struct reset_control			*rst;
110 	void __iomem				*base;
111 	unsigned				irq;
112 	u32					cur_speed;
113 
114 	struct spi_device			*cur_spi;
115 	unsigned				cur_pos;
116 	unsigned				cur_len;
117 	unsigned				bytes_per_word;
118 	unsigned				cur_direction;
119 	unsigned				curr_xfer_words;
120 
121 	unsigned				cur_rx_pos;
122 	unsigned				cur_tx_pos;
123 
124 	u32					tx_status;
125 	u32					rx_status;
126 	u32					status_reg;
127 
128 	u32					def_command_reg;
129 	u32					command_reg;
130 	u32					dma_control_reg;
131 
132 	struct completion			xfer_completion;
133 	struct spi_transfer			*curr_xfer;
134 };
135 
136 static int tegra_sflash_runtime_suspend(struct device *dev);
137 static int tegra_sflash_runtime_resume(struct device *dev);
138 
tegra_sflash_readl(struct tegra_sflash_data * tsd,unsigned long reg)139 static inline u32 tegra_sflash_readl(struct tegra_sflash_data *tsd,
140 		unsigned long reg)
141 {
142 	return readl(tsd->base + reg);
143 }
144 
tegra_sflash_writel(struct tegra_sflash_data * tsd,u32 val,unsigned long reg)145 static inline void tegra_sflash_writel(struct tegra_sflash_data *tsd,
146 		u32 val, unsigned long reg)
147 {
148 	writel(val, tsd->base + reg);
149 }
150 
tegra_sflash_clear_status(struct tegra_sflash_data * tsd)151 static void tegra_sflash_clear_status(struct tegra_sflash_data *tsd)
152 {
153 	/* Write 1 to clear status register */
154 	tegra_sflash_writel(tsd, SPI_RDY | SPI_FIFO_ERROR, SPI_STATUS);
155 }
156 
tegra_sflash_calculate_curr_xfer_param(struct spi_device * spi,struct tegra_sflash_data * tsd,struct spi_transfer * t)157 static unsigned tegra_sflash_calculate_curr_xfer_param(
158 	struct spi_device *spi, struct tegra_sflash_data *tsd,
159 	struct spi_transfer *t)
160 {
161 	unsigned remain_len = t->len - tsd->cur_pos;
162 	unsigned max_word;
163 
164 	tsd->bytes_per_word = DIV_ROUND_UP(t->bits_per_word, 8);
165 	max_word = remain_len / tsd->bytes_per_word;
166 	if (max_word > SPI_FIFO_DEPTH)
167 		max_word = SPI_FIFO_DEPTH;
168 	tsd->curr_xfer_words = max_word;
169 	return max_word;
170 }
171 
tegra_sflash_fill_tx_fifo_from_client_txbuf(struct tegra_sflash_data * tsd,struct spi_transfer * t)172 static unsigned tegra_sflash_fill_tx_fifo_from_client_txbuf(
173 	struct tegra_sflash_data *tsd, struct spi_transfer *t)
174 {
175 	unsigned nbytes;
176 	u32 status;
177 	unsigned max_n_32bit = tsd->curr_xfer_words;
178 	u8 *tx_buf = (u8 *)t->tx_buf + tsd->cur_tx_pos;
179 
180 	if (max_n_32bit > SPI_FIFO_DEPTH)
181 		max_n_32bit = SPI_FIFO_DEPTH;
182 	nbytes = max_n_32bit * tsd->bytes_per_word;
183 
184 	status = tegra_sflash_readl(tsd, SPI_STATUS);
185 	while (!(status & SPI_TXF_FULL)) {
186 		int i;
187 		u32 x = 0;
188 
189 		for (i = 0; nbytes && (i < tsd->bytes_per_word);
190 							i++, nbytes--)
191 			x |= (u32)(*tx_buf++) << (i * 8);
192 		tegra_sflash_writel(tsd, x, SPI_TX_FIFO);
193 		if (!nbytes)
194 			break;
195 
196 		status = tegra_sflash_readl(tsd, SPI_STATUS);
197 	}
198 	tsd->cur_tx_pos += max_n_32bit * tsd->bytes_per_word;
199 	return max_n_32bit;
200 }
201 
tegra_sflash_read_rx_fifo_to_client_rxbuf(struct tegra_sflash_data * tsd,struct spi_transfer * t)202 static int tegra_sflash_read_rx_fifo_to_client_rxbuf(
203 		struct tegra_sflash_data *tsd, struct spi_transfer *t)
204 {
205 	u32 status;
206 	unsigned int read_words = 0;
207 	u8 *rx_buf = (u8 *)t->rx_buf + tsd->cur_rx_pos;
208 
209 	status = tegra_sflash_readl(tsd, SPI_STATUS);
210 	while (!(status & SPI_RXF_EMPTY)) {
211 		int i;
212 		u32 x = tegra_sflash_readl(tsd, SPI_RX_FIFO);
213 
214 		for (i = 0; (i < tsd->bytes_per_word); i++)
215 			*rx_buf++ = (x >> (i*8)) & 0xFF;
216 		read_words++;
217 		status = tegra_sflash_readl(tsd, SPI_STATUS);
218 	}
219 	tsd->cur_rx_pos += read_words * tsd->bytes_per_word;
220 	return 0;
221 }
222 
tegra_sflash_start_cpu_based_transfer(struct tegra_sflash_data * tsd,struct spi_transfer * t)223 static int tegra_sflash_start_cpu_based_transfer(
224 		struct tegra_sflash_data *tsd, struct spi_transfer *t)
225 {
226 	u32 val = 0;
227 	unsigned cur_words;
228 
229 	if (tsd->cur_direction & DATA_DIR_TX)
230 		val |= SPI_IE_TXC;
231 
232 	if (tsd->cur_direction & DATA_DIR_RX)
233 		val |= SPI_IE_RXC;
234 
235 	tegra_sflash_writel(tsd, val, SPI_DMA_CTL);
236 	tsd->dma_control_reg = val;
237 
238 	if (tsd->cur_direction & DATA_DIR_TX)
239 		cur_words = tegra_sflash_fill_tx_fifo_from_client_txbuf(tsd, t);
240 	else
241 		cur_words = tsd->curr_xfer_words;
242 	val |= SPI_DMA_BLK_COUNT(cur_words);
243 	tegra_sflash_writel(tsd, val, SPI_DMA_CTL);
244 	tsd->dma_control_reg = val;
245 	val |= SPI_DMA_EN;
246 	tegra_sflash_writel(tsd, val, SPI_DMA_CTL);
247 	return 0;
248 }
249 
tegra_sflash_start_transfer_one(struct spi_device * spi,struct spi_transfer * t,bool is_first_of_msg,bool is_single_xfer)250 static int tegra_sflash_start_transfer_one(struct spi_device *spi,
251 		struct spi_transfer *t, bool is_first_of_msg,
252 		bool is_single_xfer)
253 {
254 	struct tegra_sflash_data *tsd = spi_controller_get_devdata(spi->controller);
255 	u32 speed;
256 	u32 command;
257 
258 	speed = t->speed_hz;
259 	if (speed != tsd->cur_speed) {
260 		clk_set_rate(tsd->clk, speed);
261 		tsd->cur_speed = speed;
262 	}
263 
264 	tsd->cur_spi = spi;
265 	tsd->cur_pos = 0;
266 	tsd->cur_rx_pos = 0;
267 	tsd->cur_tx_pos = 0;
268 	tsd->curr_xfer = t;
269 	tegra_sflash_calculate_curr_xfer_param(spi, tsd, t);
270 	if (is_first_of_msg) {
271 		command = tsd->def_command_reg;
272 		command |= SPI_BIT_LENGTH(t->bits_per_word - 1);
273 		command |= SPI_CS_VAL_HIGH;
274 
275 		command &= ~SPI_MODES;
276 		if (spi->mode & SPI_CPHA)
277 			command |= SPI_CK_SDA_FALLING;
278 
279 		if (spi->mode & SPI_CPOL)
280 			command |= SPI_ACTIVE_SCLK_DRIVE_HIGH;
281 		else
282 			command |= SPI_ACTIVE_SCLK_DRIVE_LOW;
283 		command |= SPI_CS0_EN << spi_get_chipselect(spi, 0);
284 	} else {
285 		command = tsd->command_reg;
286 		command &= ~SPI_BIT_LENGTH(~0);
287 		command |= SPI_BIT_LENGTH(t->bits_per_word - 1);
288 		command &= ~(SPI_RX_EN | SPI_TX_EN);
289 	}
290 
291 	tsd->cur_direction = 0;
292 	if (t->rx_buf) {
293 		command |= SPI_RX_EN;
294 		tsd->cur_direction |= DATA_DIR_RX;
295 	}
296 	if (t->tx_buf) {
297 		command |= SPI_TX_EN;
298 		tsd->cur_direction |= DATA_DIR_TX;
299 	}
300 	tegra_sflash_writel(tsd, command, SPI_COMMAND);
301 	tsd->command_reg = command;
302 
303 	return tegra_sflash_start_cpu_based_transfer(tsd, t);
304 }
305 
tegra_sflash_transfer_one_message(struct spi_controller * host,struct spi_message * msg)306 static int tegra_sflash_transfer_one_message(struct spi_controller *host,
307 			struct spi_message *msg)
308 {
309 	bool is_first_msg = true;
310 	int single_xfer;
311 	struct tegra_sflash_data *tsd = spi_controller_get_devdata(host);
312 	struct spi_transfer *xfer;
313 	struct spi_device *spi = msg->spi;
314 	int ret;
315 
316 	msg->status = 0;
317 	msg->actual_length = 0;
318 	single_xfer = list_is_singular(&msg->transfers);
319 	list_for_each_entry(xfer, &msg->transfers, transfer_list) {
320 		reinit_completion(&tsd->xfer_completion);
321 		ret = tegra_sflash_start_transfer_one(spi, xfer,
322 					is_first_msg, single_xfer);
323 		if (ret < 0) {
324 			dev_err(tsd->dev,
325 				"spi can not start transfer, err %d\n", ret);
326 			goto exit;
327 		}
328 		is_first_msg = false;
329 		ret = wait_for_completion_timeout(&tsd->xfer_completion,
330 						SPI_DMA_TIMEOUT);
331 		if (WARN_ON(ret == 0)) {
332 			dev_err(tsd->dev,
333 				"spi transfer timeout, err %d\n", ret);
334 			ret = -EIO;
335 			goto exit;
336 		}
337 
338 		if (tsd->tx_status ||  tsd->rx_status) {
339 			dev_err(tsd->dev, "Error in Transfer\n");
340 			ret = -EIO;
341 			goto exit;
342 		}
343 		msg->actual_length += xfer->len;
344 		if (xfer->cs_change && xfer->delay.value) {
345 			tegra_sflash_writel(tsd, tsd->def_command_reg,
346 					SPI_COMMAND);
347 			spi_transfer_delay_exec(xfer);
348 		}
349 	}
350 	ret = 0;
351 exit:
352 	tegra_sflash_writel(tsd, tsd->def_command_reg, SPI_COMMAND);
353 	msg->status = ret;
354 	spi_finalize_current_message(host);
355 	return ret;
356 }
357 
handle_cpu_based_xfer(struct tegra_sflash_data * tsd)358 static irqreturn_t handle_cpu_based_xfer(struct tegra_sflash_data *tsd)
359 {
360 	struct spi_transfer *t = tsd->curr_xfer;
361 
362 	spin_lock(&tsd->lock);
363 	if (tsd->tx_status || tsd->rx_status || (tsd->status_reg & SPI_BSY)) {
364 		dev_err(tsd->dev,
365 			"CpuXfer ERROR bit set 0x%x\n", tsd->status_reg);
366 		dev_err(tsd->dev,
367 			"CpuXfer 0x%08x:0x%08x\n", tsd->command_reg,
368 				tsd->dma_control_reg);
369 		reset_control_assert(tsd->rst);
370 		udelay(2);
371 		reset_control_deassert(tsd->rst);
372 		complete(&tsd->xfer_completion);
373 		goto exit;
374 	}
375 
376 	if (tsd->cur_direction & DATA_DIR_RX)
377 		tegra_sflash_read_rx_fifo_to_client_rxbuf(tsd, t);
378 
379 	if (tsd->cur_direction & DATA_DIR_TX)
380 		tsd->cur_pos = tsd->cur_tx_pos;
381 	else
382 		tsd->cur_pos = tsd->cur_rx_pos;
383 
384 	if (tsd->cur_pos == t->len) {
385 		complete(&tsd->xfer_completion);
386 		goto exit;
387 	}
388 
389 	tegra_sflash_calculate_curr_xfer_param(tsd->cur_spi, tsd, t);
390 	tegra_sflash_start_cpu_based_transfer(tsd, t);
391 exit:
392 	spin_unlock(&tsd->lock);
393 	return IRQ_HANDLED;
394 }
395 
tegra_sflash_isr(int irq,void * context_data)396 static irqreturn_t tegra_sflash_isr(int irq, void *context_data)
397 {
398 	struct tegra_sflash_data *tsd = context_data;
399 
400 	tsd->status_reg = tegra_sflash_readl(tsd, SPI_STATUS);
401 	if (tsd->cur_direction & DATA_DIR_TX)
402 		tsd->tx_status = tsd->status_reg & SPI_TX_OVF;
403 
404 	if (tsd->cur_direction & DATA_DIR_RX)
405 		tsd->rx_status = tsd->status_reg & SPI_RX_UNF;
406 	tegra_sflash_clear_status(tsd);
407 
408 	return handle_cpu_based_xfer(tsd);
409 }
410 
411 static const struct of_device_id tegra_sflash_of_match[] = {
412 	{ .compatible = "nvidia,tegra20-sflash", },
413 	{}
414 };
415 MODULE_DEVICE_TABLE(of, tegra_sflash_of_match);
416 
tegra_sflash_probe(struct platform_device * pdev)417 static int tegra_sflash_probe(struct platform_device *pdev)
418 {
419 	struct spi_controller	*host;
420 	struct tegra_sflash_data	*tsd;
421 	int ret;
422 	const struct of_device_id *match;
423 
424 	match = of_match_device(tegra_sflash_of_match, &pdev->dev);
425 	if (!match) {
426 		dev_err(&pdev->dev, "Error: No device match found\n");
427 		return -ENODEV;
428 	}
429 
430 	host = spi_alloc_host(&pdev->dev, sizeof(*tsd));
431 	if (!host) {
432 		dev_err(&pdev->dev, "host allocation failed\n");
433 		return -ENOMEM;
434 	}
435 
436 	/* the spi->mode bits understood by this driver: */
437 	host->mode_bits = SPI_CPOL | SPI_CPHA;
438 	host->transfer_one_message = tegra_sflash_transfer_one_message;
439 	host->auto_runtime_pm = true;
440 	host->num_chipselect = MAX_CHIP_SELECT;
441 
442 	platform_set_drvdata(pdev, host);
443 	tsd = spi_controller_get_devdata(host);
444 	tsd->host = host;
445 	tsd->dev = &pdev->dev;
446 	spin_lock_init(&tsd->lock);
447 
448 	if (of_property_read_u32(tsd->dev->of_node, "spi-max-frequency",
449 				 &host->max_speed_hz))
450 		host->max_speed_hz = 25000000; /* 25MHz */
451 
452 	tsd->base = devm_platform_ioremap_resource(pdev, 0);
453 	if (IS_ERR(tsd->base)) {
454 		ret = PTR_ERR(tsd->base);
455 		goto exit_free_host;
456 	}
457 
458 	ret = platform_get_irq(pdev, 0);
459 	if (ret < 0)
460 		goto exit_free_host;
461 	tsd->irq = ret;
462 
463 	ret = request_irq(tsd->irq, tegra_sflash_isr, 0,
464 			dev_name(&pdev->dev), tsd);
465 	if (ret < 0) {
466 		dev_err(&pdev->dev, "Failed to register ISR for IRQ %d\n",
467 					tsd->irq);
468 		goto exit_free_host;
469 	}
470 
471 	tsd->clk = devm_clk_get(&pdev->dev, NULL);
472 	if (IS_ERR(tsd->clk)) {
473 		dev_err(&pdev->dev, "can not get clock\n");
474 		ret = PTR_ERR(tsd->clk);
475 		goto exit_free_irq;
476 	}
477 
478 	tsd->rst = devm_reset_control_get_exclusive(&pdev->dev, "spi");
479 	if (IS_ERR(tsd->rst)) {
480 		dev_err(&pdev->dev, "can not get reset\n");
481 		ret = PTR_ERR(tsd->rst);
482 		goto exit_free_irq;
483 	}
484 
485 	init_completion(&tsd->xfer_completion);
486 	pm_runtime_enable(&pdev->dev);
487 	if (!pm_runtime_enabled(&pdev->dev)) {
488 		ret = tegra_sflash_runtime_resume(&pdev->dev);
489 		if (ret)
490 			goto exit_pm_disable;
491 	}
492 
493 	ret = pm_runtime_resume_and_get(&pdev->dev);
494 	if (ret < 0) {
495 		dev_err(&pdev->dev, "pm runtime get failed, e = %d\n", ret);
496 		goto exit_pm_disable;
497 	}
498 
499 	/* Reset controller */
500 	reset_control_assert(tsd->rst);
501 	udelay(2);
502 	reset_control_deassert(tsd->rst);
503 
504 	tsd->def_command_reg  = SPI_M_S | SPI_CS_SW;
505 	tegra_sflash_writel(tsd, tsd->def_command_reg, SPI_COMMAND);
506 	pm_runtime_put(&pdev->dev);
507 
508 	host->dev.of_node = pdev->dev.of_node;
509 	ret = devm_spi_register_controller(&pdev->dev, host);
510 	if (ret < 0) {
511 		dev_err(&pdev->dev, "can not register to host err %d\n", ret);
512 		goto exit_pm_disable;
513 	}
514 	return ret;
515 
516 exit_pm_disable:
517 	pm_runtime_disable(&pdev->dev);
518 	if (!pm_runtime_status_suspended(&pdev->dev))
519 		tegra_sflash_runtime_suspend(&pdev->dev);
520 exit_free_irq:
521 	free_irq(tsd->irq, tsd);
522 exit_free_host:
523 	spi_controller_put(host);
524 	return ret;
525 }
526 
tegra_sflash_remove(struct platform_device * pdev)527 static void tegra_sflash_remove(struct platform_device *pdev)
528 {
529 	struct spi_controller *host = platform_get_drvdata(pdev);
530 	struct tegra_sflash_data	*tsd = spi_controller_get_devdata(host);
531 
532 	free_irq(tsd->irq, tsd);
533 
534 	pm_runtime_disable(&pdev->dev);
535 	if (!pm_runtime_status_suspended(&pdev->dev))
536 		tegra_sflash_runtime_suspend(&pdev->dev);
537 }
538 
539 #ifdef CONFIG_PM_SLEEP
tegra_sflash_suspend(struct device * dev)540 static int tegra_sflash_suspend(struct device *dev)
541 {
542 	struct spi_controller *host = dev_get_drvdata(dev);
543 
544 	return spi_controller_suspend(host);
545 }
546 
tegra_sflash_resume(struct device * dev)547 static int tegra_sflash_resume(struct device *dev)
548 {
549 	struct spi_controller *host = dev_get_drvdata(dev);
550 	struct tegra_sflash_data *tsd = spi_controller_get_devdata(host);
551 	int ret;
552 
553 	ret = pm_runtime_resume_and_get(dev);
554 	if (ret < 0) {
555 		dev_err(dev, "pm runtime failed, e = %d\n", ret);
556 		return ret;
557 	}
558 	tegra_sflash_writel(tsd, tsd->command_reg, SPI_COMMAND);
559 	pm_runtime_put(dev);
560 
561 	return spi_controller_resume(host);
562 }
563 #endif
564 
tegra_sflash_runtime_suspend(struct device * dev)565 static int tegra_sflash_runtime_suspend(struct device *dev)
566 {
567 	struct spi_controller *host = dev_get_drvdata(dev);
568 	struct tegra_sflash_data *tsd = spi_controller_get_devdata(host);
569 
570 	/* Flush all write which are in PPSB queue by reading back */
571 	tegra_sflash_readl(tsd, SPI_COMMAND);
572 
573 	clk_disable_unprepare(tsd->clk);
574 	return 0;
575 }
576 
tegra_sflash_runtime_resume(struct device * dev)577 static int tegra_sflash_runtime_resume(struct device *dev)
578 {
579 	struct spi_controller *host = dev_get_drvdata(dev);
580 	struct tegra_sflash_data *tsd = spi_controller_get_devdata(host);
581 	int ret;
582 
583 	ret = clk_prepare_enable(tsd->clk);
584 	if (ret < 0) {
585 		dev_err(tsd->dev, "clk_prepare failed: %d\n", ret);
586 		return ret;
587 	}
588 	return 0;
589 }
590 
591 static const struct dev_pm_ops slink_pm_ops = {
592 	SET_RUNTIME_PM_OPS(tegra_sflash_runtime_suspend,
593 		tegra_sflash_runtime_resume, NULL)
594 	SET_SYSTEM_SLEEP_PM_OPS(tegra_sflash_suspend, tegra_sflash_resume)
595 };
596 static struct platform_driver tegra_sflash_driver = {
597 	.driver = {
598 		.name		= "spi-tegra-sflash",
599 		.pm		= &slink_pm_ops,
600 		.of_match_table	= tegra_sflash_of_match,
601 	},
602 	.probe =	tegra_sflash_probe,
603 	.remove_new =	tegra_sflash_remove,
604 };
605 module_platform_driver(tegra_sflash_driver);
606 
607 MODULE_ALIAS("platform:spi-tegra-sflash");
608 MODULE_DESCRIPTION("NVIDIA Tegra20 Serial Flash Controller Driver");
609 MODULE_AUTHOR("Laxman Dewangan <ldewangan@nvidia.com>");
610 MODULE_LICENSE("GPL v2");
611