xref: /linux/drivers/i2c/busses/i2c-imx-lpi2c.c (revision 0ae982df67760cd08affa935c0fe86c8a9311797)
1 // SPDX-License-Identifier: GPL-2.0+
2 /*
3  * This is i.MX low power i2c controller driver.
4  *
5  * Copyright 2016 Freescale Semiconductor, Inc.
6  */
7 
8 #include <linux/clk.h>
9 #include <linux/completion.h>
10 #include <linux/delay.h>
11 #include <linux/dma-mapping.h>
12 #include <linux/dmaengine.h>
13 #include <linux/err.h>
14 #include <linux/errno.h>
15 #include <linux/i2c.h>
16 #include <linux/init.h>
17 #include <linux/interrupt.h>
18 #include <linux/io.h>
19 #include <linux/iopoll.h>
20 #include <linux/kernel.h>
21 #include <linux/module.h>
22 #include <linux/of.h>
23 #include <linux/pinctrl/consumer.h>
24 #include <linux/platform_device.h>
25 #include <linux/pm_runtime.h>
26 #include <linux/sched.h>
27 #include <linux/slab.h>
28 
29 #define DRIVER_NAME "imx-lpi2c"
30 
31 #define LPI2C_PARAM	0x04	/* i2c RX/TX FIFO size */
32 #define LPI2C_MCR	0x10	/* i2c contrl register */
33 #define LPI2C_MSR	0x14	/* i2c status register */
34 #define LPI2C_MIER	0x18	/* i2c interrupt enable */
35 #define LPI2C_MDER	0x1C	/* i2c DMA enable */
36 #define LPI2C_MCFGR0	0x20	/* i2c master configuration */
37 #define LPI2C_MCFGR1	0x24	/* i2c master configuration */
38 #define LPI2C_MCFGR2	0x28	/* i2c master configuration */
39 #define LPI2C_MCFGR3	0x2C	/* i2c master configuration */
40 #define LPI2C_MCCR0	0x48	/* i2c master clk configuration */
41 #define LPI2C_MCCR1	0x50	/* i2c master clk configuration */
42 #define LPI2C_MFCR	0x58	/* i2c master FIFO control */
43 #define LPI2C_MFSR	0x5C	/* i2c master FIFO status */
44 #define LPI2C_MTDR	0x60	/* i2c master TX data register */
45 #define LPI2C_MRDR	0x70	/* i2c master RX data register */
46 
47 #define LPI2C_SCR	0x110	/* i2c target control register */
48 #define LPI2C_SSR	0x114	/* i2c target status register */
49 #define LPI2C_SIER	0x118	/* i2c target interrupt enable */
50 #define LPI2C_SDER	0x11C	/* i2c target DMA enable */
51 #define LPI2C_SCFGR0	0x120	/* i2c target configuration */
52 #define LPI2C_SCFGR1	0x124	/* i2c target configuration */
53 #define LPI2C_SCFGR2	0x128	/* i2c target configuration */
54 #define LPI2C_SAMR	0x140	/* i2c target address match */
55 #define LPI2C_SASR	0x150	/* i2c target address status */
56 #define LPI2C_STAR	0x154	/* i2c target transmit ACK */
57 #define LPI2C_STDR	0x160	/* i2c target transmit data */
58 #define LPI2C_SRDR	0x170	/* i2c target receive data */
59 #define LPI2C_SRDROR	0x178	/* i2c target receive data read only */
60 
61 /* i2c command */
62 #define TRAN_DATA	0X00
63 #define RECV_DATA	0X01
64 #define GEN_STOP	0X02
65 #define RECV_DISCARD	0X03
66 #define GEN_START	0X04
67 #define START_NACK	0X05
68 #define START_HIGH	0X06
69 #define START_HIGH_NACK	0X07
70 
71 #define MCR_MEN		BIT(0)
72 #define MCR_RST		BIT(1)
73 #define MCR_DOZEN	BIT(2)
74 #define MCR_DBGEN	BIT(3)
75 #define MCR_RTF		BIT(8)
76 #define MCR_RRF		BIT(9)
77 #define MSR_TDF		BIT(0)
78 #define MSR_RDF		BIT(1)
79 #define MSR_SDF		BIT(9)
80 #define MSR_NDF		BIT(10)
81 #define MSR_ALF		BIT(11)
82 #define MSR_MBF		BIT(24)
83 #define MSR_BBF		BIT(25)
84 #define MIER_TDIE	BIT(0)
85 #define MIER_RDIE	BIT(1)
86 #define MIER_SDIE	BIT(9)
87 #define MIER_NDIE	BIT(10)
88 #define MCFGR1_AUTOSTOP	BIT(8)
89 #define MCFGR1_IGNACK	BIT(9)
90 #define MRDR_RXEMPTY	BIT(14)
91 #define MDER_TDDE	BIT(0)
92 #define MDER_RDDE	BIT(1)
93 
94 #define SCR_SEN		BIT(0)
95 #define SCR_RST		BIT(1)
96 #define SCR_FILTEN	BIT(4)
97 #define SCR_RTF		BIT(8)
98 #define SCR_RRF		BIT(9)
99 #define SSR_TDF		BIT(0)
100 #define SSR_RDF		BIT(1)
101 #define SSR_AVF		BIT(2)
102 #define SSR_TAF		BIT(3)
103 #define SSR_RSF		BIT(8)
104 #define SSR_SDF		BIT(9)
105 #define SSR_BEF		BIT(10)
106 #define SSR_FEF		BIT(11)
107 #define SSR_SBF		BIT(24)
108 #define SSR_BBF		BIT(25)
109 #define SSR_CLEAR_BITS	(SSR_RSF | SSR_SDF | SSR_BEF | SSR_FEF)
110 #define SIER_TDIE	BIT(0)
111 #define SIER_RDIE	BIT(1)
112 #define SIER_AVIE	BIT(2)
113 #define SIER_TAIE	BIT(3)
114 #define SIER_RSIE	BIT(8)
115 #define SIER_SDIE	BIT(9)
116 #define SIER_BEIE	BIT(10)
117 #define SIER_FEIE	BIT(11)
118 #define SIER_AM0F	BIT(12)
119 #define SCFGR1_RXSTALL	BIT(1)
120 #define SCFGR1_TXDSTALL	BIT(2)
121 #define SCFGR2_FILTSDA_SHIFT	24
122 #define SCFGR2_FILTSCL_SHIFT	16
123 #define SCFGR2_CLKHOLD(x)	(x)
124 #define SCFGR2_FILTSDA(x)	((x) << SCFGR2_FILTSDA_SHIFT)
125 #define SCFGR2_FILTSCL(x)	((x) << SCFGR2_FILTSCL_SHIFT)
126 #define SASR_READ_REQ	0x1
127 #define SLAVE_INT_FLAG	(SIER_TDIE | SIER_RDIE | SIER_AVIE | \
128 			 SIER_SDIE | SIER_BEIE)
129 
130 #define I2C_CLK_RATIO	2
131 #define CHUNK_DATA	256
132 
133 #define I2C_PM_TIMEOUT		10 /* ms */
134 #define I2C_DMA_THRESHOLD	8 /* bytes */
135 
136 enum lpi2c_imx_mode {
137 	STANDARD,	/* 100+Kbps */
138 	FAST,		/* 400+Kbps */
139 	FAST_PLUS,	/* 1.0+Mbps */
140 	HS,		/* 3.4+Mbps */
141 	ULTRA_FAST,	/* 5.0+Mbps */
142 };
143 
144 enum lpi2c_imx_pincfg {
145 	TWO_PIN_OD,
146 	TWO_PIN_OO,
147 	TWO_PIN_PP,
148 	FOUR_PIN_PP,
149 };
150 
151 struct lpi2c_imx_dma {
152 	bool		using_pio_mode;
153 	u8		rx_cmd_buf_len;
154 	u8		*dma_buf;
155 	u16		*rx_cmd_buf;
156 	unsigned int	dma_len;
157 	unsigned int	tx_burst_num;
158 	unsigned int	rx_burst_num;
159 	unsigned long	dma_msg_flag;
160 	resource_size_t	phy_addr;
161 	dma_addr_t	dma_tx_addr;
162 	dma_addr_t	dma_addr;
163 	enum dma_data_direction dma_data_dir;
164 	enum dma_transfer_direction dma_transfer_dir;
165 	struct dma_chan	*chan_tx;
166 	struct dma_chan	*chan_rx;
167 };
168 
169 struct lpi2c_imx_struct {
170 	struct i2c_adapter	adapter;
171 	int			num_clks;
172 	struct clk_bulk_data	*clks;
173 	void __iomem		*base;
174 	__u8			*rx_buf;
175 	__u8			*tx_buf;
176 	struct completion	complete;
177 	unsigned long		rate_per;
178 	unsigned int		msglen;
179 	unsigned int		delivered;
180 	unsigned int		block_data;
181 	unsigned int		bitrate;
182 	unsigned int		txfifosize;
183 	unsigned int		rxfifosize;
184 	enum lpi2c_imx_mode	mode;
185 	struct i2c_bus_recovery_info rinfo;
186 	bool			can_use_dma;
187 	struct lpi2c_imx_dma	*dma;
188 	struct i2c_client	*target;
189 };
190 
191 #define lpi2c_imx_read_msr_poll_timeout(atomic, val, cond)                    \
192 	(atomic ? readl_poll_timeout_atomic(lpi2c_imx->base + LPI2C_MSR, val, \
193 					    cond, 0, 500000) :                \
194 		  readl_poll_timeout(lpi2c_imx->base + LPI2C_MSR, val, cond,  \
195 				     0, 500000))
196 
lpi2c_imx_intctrl(struct lpi2c_imx_struct * lpi2c_imx,unsigned int enable)197 static void lpi2c_imx_intctrl(struct lpi2c_imx_struct *lpi2c_imx,
198 			      unsigned int enable)
199 {
200 	writel(enable, lpi2c_imx->base + LPI2C_MIER);
201 }
202 
lpi2c_imx_bus_busy(struct lpi2c_imx_struct * lpi2c_imx,bool atomic)203 static int lpi2c_imx_bus_busy(struct lpi2c_imx_struct *lpi2c_imx, bool atomic)
204 {
205 	unsigned int temp;
206 	int err;
207 
208 	err = lpi2c_imx_read_msr_poll_timeout(atomic, temp,
209 					      temp & (MSR_ALF | MSR_BBF | MSR_MBF));
210 
211 	/* check for arbitration lost, clear if set */
212 	if (temp & MSR_ALF) {
213 		writel(temp, lpi2c_imx->base + LPI2C_MSR);
214 		return -EAGAIN;
215 	}
216 
217 	/* check for bus not busy */
218 	if (err) {
219 		dev_dbg(&lpi2c_imx->adapter.dev, "bus not work\n");
220 		if (lpi2c_imx->adapter.bus_recovery_info)
221 			i2c_recover_bus(&lpi2c_imx->adapter);
222 		return -ETIMEDOUT;
223 	}
224 
225 	return 0;
226 }
227 
lpi2c_imx_txfifo_cnt(struct lpi2c_imx_struct * lpi2c_imx)228 static u32 lpi2c_imx_txfifo_cnt(struct lpi2c_imx_struct *lpi2c_imx)
229 {
230 	return readl(lpi2c_imx->base + LPI2C_MFSR) & 0xff;
231 }
232 
lpi2c_imx_set_mode(struct lpi2c_imx_struct * lpi2c_imx)233 static void lpi2c_imx_set_mode(struct lpi2c_imx_struct *lpi2c_imx)
234 {
235 	unsigned int bitrate = lpi2c_imx->bitrate;
236 	enum lpi2c_imx_mode mode;
237 
238 	if (bitrate < I2C_MAX_FAST_MODE_FREQ)
239 		mode = STANDARD;
240 	else if (bitrate < I2C_MAX_FAST_MODE_PLUS_FREQ)
241 		mode = FAST;
242 	else if (bitrate < I2C_MAX_HIGH_SPEED_MODE_FREQ)
243 		mode = FAST_PLUS;
244 	else if (bitrate < I2C_MAX_ULTRA_FAST_MODE_FREQ)
245 		mode = HS;
246 	else
247 		mode = ULTRA_FAST;
248 
249 	lpi2c_imx->mode = mode;
250 }
251 
lpi2c_imx_start(struct lpi2c_imx_struct * lpi2c_imx,struct i2c_msg * msgs,bool atomic)252 static int lpi2c_imx_start(struct lpi2c_imx_struct *lpi2c_imx,
253 			   struct i2c_msg *msgs, bool atomic)
254 {
255 	unsigned int temp;
256 
257 	temp = readl(lpi2c_imx->base + LPI2C_MCR);
258 	temp |= MCR_RRF | MCR_RTF;
259 	writel(temp, lpi2c_imx->base + LPI2C_MCR);
260 	writel(0x7f00, lpi2c_imx->base + LPI2C_MSR);
261 
262 	temp = i2c_8bit_addr_from_msg(msgs) | (GEN_START << 8);
263 	writel(temp, lpi2c_imx->base + LPI2C_MTDR);
264 
265 	return lpi2c_imx_bus_busy(lpi2c_imx, atomic);
266 }
267 
lpi2c_imx_stop(struct lpi2c_imx_struct * lpi2c_imx,bool atomic)268 static void lpi2c_imx_stop(struct lpi2c_imx_struct *lpi2c_imx, bool atomic)
269 {
270 	unsigned int temp;
271 	int err;
272 
273 	writel(GEN_STOP << 8, lpi2c_imx->base + LPI2C_MTDR);
274 
275 	err = lpi2c_imx_read_msr_poll_timeout(atomic, temp, temp & MSR_SDF);
276 
277 	if (err) {
278 		dev_dbg(&lpi2c_imx->adapter.dev, "stop timeout\n");
279 		if (lpi2c_imx->adapter.bus_recovery_info)
280 			i2c_recover_bus(&lpi2c_imx->adapter);
281 	}
282 }
283 
284 /* CLKLO = I2C_CLK_RATIO * CLKHI, SETHOLD = CLKHI, DATAVD = CLKHI/2 */
lpi2c_imx_config(struct lpi2c_imx_struct * lpi2c_imx)285 static int lpi2c_imx_config(struct lpi2c_imx_struct *lpi2c_imx)
286 {
287 	u8 prescale, filt, sethold, datavd;
288 	unsigned int clk_rate, clk_cycle, clkhi, clklo;
289 	enum lpi2c_imx_pincfg pincfg;
290 	unsigned int temp;
291 
292 	lpi2c_imx_set_mode(lpi2c_imx);
293 
294 	clk_rate = lpi2c_imx->rate_per;
295 
296 	if (lpi2c_imx->mode == HS || lpi2c_imx->mode == ULTRA_FAST)
297 		filt = 0;
298 	else
299 		filt = 2;
300 
301 	for (prescale = 0; prescale <= 7; prescale++) {
302 		clk_cycle = clk_rate / ((1 << prescale) * lpi2c_imx->bitrate)
303 			    - 3 - (filt >> 1);
304 		clkhi = DIV_ROUND_UP(clk_cycle, I2C_CLK_RATIO + 1);
305 		clklo = clk_cycle - clkhi;
306 		if (clklo < 64)
307 			break;
308 	}
309 
310 	if (prescale > 7)
311 		return -EINVAL;
312 
313 	/* set MCFGR1: PINCFG, PRESCALE, IGNACK */
314 	if (lpi2c_imx->mode == ULTRA_FAST)
315 		pincfg = TWO_PIN_OO;
316 	else
317 		pincfg = TWO_PIN_OD;
318 	temp = prescale | pincfg << 24;
319 
320 	if (lpi2c_imx->mode == ULTRA_FAST)
321 		temp |= MCFGR1_IGNACK;
322 
323 	writel(temp, lpi2c_imx->base + LPI2C_MCFGR1);
324 
325 	/* set MCFGR2: FILTSDA, FILTSCL */
326 	temp = (filt << 16) | (filt << 24);
327 	writel(temp, lpi2c_imx->base + LPI2C_MCFGR2);
328 
329 	/* set MCCR: DATAVD, SETHOLD, CLKHI, CLKLO */
330 	sethold = clkhi;
331 	datavd = clkhi >> 1;
332 	temp = datavd << 24 | sethold << 16 | clkhi << 8 | clklo;
333 
334 	if (lpi2c_imx->mode == HS)
335 		writel(temp, lpi2c_imx->base + LPI2C_MCCR1);
336 	else
337 		writel(temp, lpi2c_imx->base + LPI2C_MCCR0);
338 
339 	return 0;
340 }
341 
lpi2c_imx_master_enable(struct lpi2c_imx_struct * lpi2c_imx)342 static int lpi2c_imx_master_enable(struct lpi2c_imx_struct *lpi2c_imx)
343 {
344 	unsigned int temp;
345 	int ret;
346 
347 	ret = pm_runtime_resume_and_get(lpi2c_imx->adapter.dev.parent);
348 	if (ret < 0)
349 		return ret;
350 
351 	temp = MCR_RST;
352 	writel(temp, lpi2c_imx->base + LPI2C_MCR);
353 	writel(0, lpi2c_imx->base + LPI2C_MCR);
354 
355 	ret = lpi2c_imx_config(lpi2c_imx);
356 	if (ret)
357 		goto rpm_put;
358 
359 	temp = readl(lpi2c_imx->base + LPI2C_MCR);
360 	temp |= MCR_MEN;
361 	writel(temp, lpi2c_imx->base + LPI2C_MCR);
362 
363 	return 0;
364 
365 rpm_put:
366 	pm_runtime_mark_last_busy(lpi2c_imx->adapter.dev.parent);
367 	pm_runtime_put_autosuspend(lpi2c_imx->adapter.dev.parent);
368 
369 	return ret;
370 }
371 
lpi2c_imx_master_disable(struct lpi2c_imx_struct * lpi2c_imx)372 static int lpi2c_imx_master_disable(struct lpi2c_imx_struct *lpi2c_imx)
373 {
374 	u32 temp;
375 
376 	temp = readl(lpi2c_imx->base + LPI2C_MCR);
377 	temp &= ~MCR_MEN;
378 	writel(temp, lpi2c_imx->base + LPI2C_MCR);
379 
380 	pm_runtime_mark_last_busy(lpi2c_imx->adapter.dev.parent);
381 	pm_runtime_put_autosuspend(lpi2c_imx->adapter.dev.parent);
382 
383 	return 0;
384 }
385 
lpi2c_imx_pio_msg_complete(struct lpi2c_imx_struct * lpi2c_imx)386 static int lpi2c_imx_pio_msg_complete(struct lpi2c_imx_struct *lpi2c_imx)
387 {
388 	unsigned long time_left;
389 
390 	time_left = wait_for_completion_timeout(&lpi2c_imx->complete, HZ);
391 
392 	return time_left ? 0 : -ETIMEDOUT;
393 }
394 
lpi2c_imx_txfifo_empty(struct lpi2c_imx_struct * lpi2c_imx,bool atomic)395 static int lpi2c_imx_txfifo_empty(struct lpi2c_imx_struct *lpi2c_imx, bool atomic)
396 {
397 	unsigned int temp;
398 	int err;
399 
400 	err = lpi2c_imx_read_msr_poll_timeout(atomic, temp,
401 					      (temp & MSR_NDF) || !lpi2c_imx_txfifo_cnt(lpi2c_imx));
402 
403 	if (temp & MSR_NDF) {
404 		dev_dbg(&lpi2c_imx->adapter.dev, "NDF detected\n");
405 		return -EIO;
406 	}
407 
408 	if (err) {
409 		dev_dbg(&lpi2c_imx->adapter.dev, "txfifo empty timeout\n");
410 		if (lpi2c_imx->adapter.bus_recovery_info)
411 			i2c_recover_bus(&lpi2c_imx->adapter);
412 		return -ETIMEDOUT;
413 	}
414 
415 	return 0;
416 }
417 
lpi2c_imx_set_tx_watermark(struct lpi2c_imx_struct * lpi2c_imx)418 static void lpi2c_imx_set_tx_watermark(struct lpi2c_imx_struct *lpi2c_imx)
419 {
420 	writel(lpi2c_imx->txfifosize >> 1, lpi2c_imx->base + LPI2C_MFCR);
421 }
422 
lpi2c_imx_set_rx_watermark(struct lpi2c_imx_struct * lpi2c_imx)423 static void lpi2c_imx_set_rx_watermark(struct lpi2c_imx_struct *lpi2c_imx)
424 {
425 	unsigned int temp, remaining;
426 
427 	remaining = lpi2c_imx->msglen - lpi2c_imx->delivered;
428 
429 	if (remaining > (lpi2c_imx->rxfifosize >> 1))
430 		temp = lpi2c_imx->rxfifosize >> 1;
431 	else
432 		temp = 0;
433 
434 	writel(temp << 16, lpi2c_imx->base + LPI2C_MFCR);
435 }
436 
lpi2c_imx_write_txfifo(struct lpi2c_imx_struct * lpi2c_imx,bool atomic)437 static bool lpi2c_imx_write_txfifo(struct lpi2c_imx_struct *lpi2c_imx, bool atomic)
438 {
439 	unsigned int data, txcnt;
440 
441 	txcnt = readl(lpi2c_imx->base + LPI2C_MFSR) & 0xff;
442 
443 	while (txcnt < lpi2c_imx->txfifosize) {
444 		if (lpi2c_imx->delivered == lpi2c_imx->msglen)
445 			break;
446 
447 		data = lpi2c_imx->tx_buf[lpi2c_imx->delivered++];
448 		writel(data, lpi2c_imx->base + LPI2C_MTDR);
449 		txcnt++;
450 	}
451 
452 	if (lpi2c_imx->delivered < lpi2c_imx->msglen) {
453 		if (!atomic)
454 			lpi2c_imx_intctrl(lpi2c_imx, MIER_TDIE | MIER_NDIE);
455 		return false;
456 	}
457 
458 	if (!atomic)
459 		complete(&lpi2c_imx->complete);
460 
461 	return true;
462 }
463 
lpi2c_imx_read_rxfifo(struct lpi2c_imx_struct * lpi2c_imx,bool atomic)464 static bool lpi2c_imx_read_rxfifo(struct lpi2c_imx_struct *lpi2c_imx, bool atomic)
465 {
466 	unsigned int blocklen, remaining;
467 	unsigned int temp, data;
468 
469 	do {
470 		data = readl(lpi2c_imx->base + LPI2C_MRDR);
471 		if (data & MRDR_RXEMPTY)
472 			break;
473 
474 		lpi2c_imx->rx_buf[lpi2c_imx->delivered++] = data & 0xff;
475 	} while (1);
476 
477 	/*
478 	 * First byte is the length of remaining packet in the SMBus block
479 	 * data read. Add it to msgs->len.
480 	 */
481 	if (lpi2c_imx->block_data) {
482 		blocklen = lpi2c_imx->rx_buf[0];
483 		lpi2c_imx->msglen += blocklen;
484 	}
485 
486 	remaining = lpi2c_imx->msglen - lpi2c_imx->delivered;
487 
488 	if (!remaining) {
489 		if (!atomic)
490 			complete(&lpi2c_imx->complete);
491 		return true;
492 	}
493 
494 	/* not finished, still waiting for rx data */
495 	lpi2c_imx_set_rx_watermark(lpi2c_imx);
496 
497 	/* multiple receive commands */
498 	if (lpi2c_imx->block_data) {
499 		lpi2c_imx->block_data = 0;
500 		temp = remaining;
501 		temp |= (RECV_DATA << 8);
502 		writel(temp, lpi2c_imx->base + LPI2C_MTDR);
503 	} else if (!(lpi2c_imx->delivered & 0xff)) {
504 		temp = (remaining > CHUNK_DATA ? CHUNK_DATA : remaining) - 1;
505 		temp |= (RECV_DATA << 8);
506 		writel(temp, lpi2c_imx->base + LPI2C_MTDR);
507 	}
508 
509 	if (!atomic)
510 		lpi2c_imx_intctrl(lpi2c_imx, MIER_RDIE);
511 
512 	return false;
513 }
514 
lpi2c_imx_write(struct lpi2c_imx_struct * lpi2c_imx,struct i2c_msg * msgs)515 static void lpi2c_imx_write(struct lpi2c_imx_struct *lpi2c_imx,
516 			    struct i2c_msg *msgs)
517 {
518 	lpi2c_imx->tx_buf = msgs->buf;
519 	lpi2c_imx_set_tx_watermark(lpi2c_imx);
520 	lpi2c_imx_write_txfifo(lpi2c_imx, false);
521 }
522 
lpi2c_imx_write_atomic(struct lpi2c_imx_struct * lpi2c_imx,struct i2c_msg * msgs)523 static int lpi2c_imx_write_atomic(struct lpi2c_imx_struct *lpi2c_imx,
524 				  struct i2c_msg *msgs)
525 {
526 	u32 temp;
527 	int err;
528 
529 	lpi2c_imx->tx_buf = msgs->buf;
530 
531 	err = lpi2c_imx_read_msr_poll_timeout(true, temp,
532 					      (temp & MSR_NDF) ||
533 					      lpi2c_imx_write_txfifo(lpi2c_imx, true));
534 
535 	if (temp & MSR_NDF)
536 		return -EIO;
537 
538 	return err;
539 }
540 
lpi2c_imx_read_init(struct lpi2c_imx_struct * lpi2c_imx,struct i2c_msg * msgs)541 static void lpi2c_imx_read_init(struct lpi2c_imx_struct *lpi2c_imx,
542 				struct i2c_msg *msgs)
543 {
544 	unsigned int temp;
545 
546 	lpi2c_imx->rx_buf = msgs->buf;
547 	lpi2c_imx->block_data = msgs->flags & I2C_M_RECV_LEN;
548 
549 	lpi2c_imx_set_rx_watermark(lpi2c_imx);
550 	temp = msgs->len > CHUNK_DATA ? CHUNK_DATA - 1 : msgs->len - 1;
551 	temp |= (RECV_DATA << 8);
552 	writel(temp, lpi2c_imx->base + LPI2C_MTDR);
553 }
554 
lpi2c_imx_read_chunk_atomic(struct lpi2c_imx_struct * lpi2c_imx)555 static bool lpi2c_imx_read_chunk_atomic(struct lpi2c_imx_struct *lpi2c_imx)
556 {
557 	u32 rxcnt;
558 
559 	rxcnt = (readl(lpi2c_imx->base + LPI2C_MFSR) >> 16) & 0xFF;
560 	if (!rxcnt)
561 		return false;
562 
563 	if (!lpi2c_imx_read_rxfifo(lpi2c_imx, true))
564 		return false;
565 
566 	return true;
567 }
568 
lpi2c_imx_read_atomic(struct lpi2c_imx_struct * lpi2c_imx,struct i2c_msg * msgs)569 static int lpi2c_imx_read_atomic(struct lpi2c_imx_struct *lpi2c_imx,
570 				 struct i2c_msg *msgs)
571 {
572 	u32 temp;
573 	int tmo_us;
574 
575 	tmo_us = 1000000;
576 	do {
577 		if (lpi2c_imx_read_chunk_atomic(lpi2c_imx))
578 			return 0;
579 
580 		temp = readl(lpi2c_imx->base + LPI2C_MSR);
581 
582 		if (temp & MSR_NDF)
583 			return -EIO;
584 
585 		udelay(100);
586 		tmo_us -= 100;
587 	} while (tmo_us > 0);
588 
589 	return -ETIMEDOUT;
590 }
591 
is_use_dma(struct lpi2c_imx_struct * lpi2c_imx,struct i2c_msg * msg)592 static bool is_use_dma(struct lpi2c_imx_struct *lpi2c_imx, struct i2c_msg *msg)
593 {
594 	if (!lpi2c_imx->can_use_dma)
595 		return false;
596 
597 	/*
598 	 * When the length of data is less than I2C_DMA_THRESHOLD,
599 	 * cpu mode is used directly to avoid low performance.
600 	 */
601 	return !(msg->len < I2C_DMA_THRESHOLD);
602 }
603 
lpi2c_imx_pio_xfer(struct lpi2c_imx_struct * lpi2c_imx,struct i2c_msg * msg)604 static int lpi2c_imx_pio_xfer(struct lpi2c_imx_struct *lpi2c_imx,
605 			      struct i2c_msg *msg)
606 {
607 	reinit_completion(&lpi2c_imx->complete);
608 
609 	if (msg->flags & I2C_M_RD) {
610 		lpi2c_imx_read_init(lpi2c_imx, msg);
611 		lpi2c_imx_intctrl(lpi2c_imx, MIER_RDIE | MIER_NDIE);
612 	} else {
613 		lpi2c_imx_write(lpi2c_imx, msg);
614 	}
615 
616 	return lpi2c_imx_pio_msg_complete(lpi2c_imx);
617 }
618 
lpi2c_imx_pio_xfer_atomic(struct lpi2c_imx_struct * lpi2c_imx,struct i2c_msg * msg)619 static int lpi2c_imx_pio_xfer_atomic(struct lpi2c_imx_struct *lpi2c_imx,
620 				     struct i2c_msg *msg)
621 {
622 	if (msg->flags & I2C_M_RD) {
623 		lpi2c_imx_read_init(lpi2c_imx, msg);
624 		return lpi2c_imx_read_atomic(lpi2c_imx, msg);
625 	}
626 
627 	return lpi2c_imx_write_atomic(lpi2c_imx, msg);
628 }
629 
lpi2c_imx_dma_timeout_calculate(struct lpi2c_imx_struct * lpi2c_imx)630 static int lpi2c_imx_dma_timeout_calculate(struct lpi2c_imx_struct *lpi2c_imx)
631 {
632 	unsigned long time = 0;
633 
634 	time = 8 * lpi2c_imx->dma->dma_len * 1000 / lpi2c_imx->bitrate;
635 
636 	/* Add extra second for scheduler related activities */
637 	time += 1;
638 
639 	/* Double calculated time */
640 	return secs_to_jiffies(time);
641 }
642 
lpi2c_imx_alloc_rx_cmd_buf(struct lpi2c_imx_struct * lpi2c_imx)643 static int lpi2c_imx_alloc_rx_cmd_buf(struct lpi2c_imx_struct *lpi2c_imx)
644 {
645 	struct lpi2c_imx_dma *dma = lpi2c_imx->dma;
646 	u16 rx_remain = dma->dma_len;
647 	int cmd_num;
648 	u16 temp;
649 
650 	/*
651 	 * Calculate the number of rx command words via the DMA TX channel
652 	 * writing into command register based on the i2c msg len, and build
653 	 * the rx command words buffer.
654 	 */
655 	cmd_num = DIV_ROUND_UP(rx_remain, CHUNK_DATA);
656 	dma->rx_cmd_buf = kcalloc(cmd_num, sizeof(u16), GFP_KERNEL);
657 	dma->rx_cmd_buf_len = cmd_num * sizeof(u16);
658 
659 	if (!dma->rx_cmd_buf) {
660 		dev_err(&lpi2c_imx->adapter.dev, "Alloc RX cmd buffer failed\n");
661 		return -ENOMEM;
662 	}
663 
664 	for (int i = 0; i < cmd_num ; i++) {
665 		temp = rx_remain > CHUNK_DATA ? CHUNK_DATA - 1 : rx_remain - 1;
666 		temp |= (RECV_DATA << 8);
667 		rx_remain -= CHUNK_DATA;
668 		dma->rx_cmd_buf[i] = temp;
669 	}
670 
671 	return 0;
672 }
673 
lpi2c_imx_dma_msg_complete(struct lpi2c_imx_struct * lpi2c_imx)674 static int lpi2c_imx_dma_msg_complete(struct lpi2c_imx_struct *lpi2c_imx)
675 {
676 	unsigned long time_left, time;
677 
678 	time = lpi2c_imx_dma_timeout_calculate(lpi2c_imx);
679 	time_left = wait_for_completion_timeout(&lpi2c_imx->complete, time);
680 	if (time_left == 0) {
681 		dev_err(&lpi2c_imx->adapter.dev, "I/O Error in DMA Data Transfer\n");
682 		return -ETIMEDOUT;
683 	}
684 
685 	return 0;
686 }
687 
lpi2c_dma_unmap(struct lpi2c_imx_dma * dma)688 static void lpi2c_dma_unmap(struct lpi2c_imx_dma *dma)
689 {
690 	struct dma_chan *chan = dma->dma_data_dir == DMA_FROM_DEVICE
691 				? dma->chan_rx : dma->chan_tx;
692 
693 	dma_unmap_single(chan->device->dev, dma->dma_addr,
694 			 dma->dma_len, dma->dma_data_dir);
695 
696 	dma->dma_data_dir = DMA_NONE;
697 }
698 
lpi2c_cleanup_rx_cmd_dma(struct lpi2c_imx_dma * dma)699 static void lpi2c_cleanup_rx_cmd_dma(struct lpi2c_imx_dma *dma)
700 {
701 	dmaengine_terminate_sync(dma->chan_tx);
702 	dma_unmap_single(dma->chan_tx->device->dev, dma->dma_tx_addr,
703 			 dma->rx_cmd_buf_len, DMA_TO_DEVICE);
704 }
705 
lpi2c_cleanup_dma(struct lpi2c_imx_dma * dma)706 static void lpi2c_cleanup_dma(struct lpi2c_imx_dma *dma)
707 {
708 	if (dma->dma_data_dir == DMA_FROM_DEVICE)
709 		dmaengine_terminate_sync(dma->chan_rx);
710 	else if (dma->dma_data_dir == DMA_TO_DEVICE)
711 		dmaengine_terminate_sync(dma->chan_tx);
712 
713 	lpi2c_dma_unmap(dma);
714 }
715 
lpi2c_dma_callback(void * data)716 static void lpi2c_dma_callback(void *data)
717 {
718 	struct lpi2c_imx_struct *lpi2c_imx = (struct lpi2c_imx_struct *)data;
719 
720 	complete(&lpi2c_imx->complete);
721 }
722 
lpi2c_dma_rx_cmd_submit(struct lpi2c_imx_struct * lpi2c_imx)723 static int lpi2c_dma_rx_cmd_submit(struct lpi2c_imx_struct *lpi2c_imx)
724 {
725 	struct dma_async_tx_descriptor *rx_cmd_desc;
726 	struct lpi2c_imx_dma *dma = lpi2c_imx->dma;
727 	struct dma_chan *txchan = dma->chan_tx;
728 	dma_cookie_t cookie;
729 
730 	dma->dma_tx_addr = dma_map_single(txchan->device->dev,
731 					  dma->rx_cmd_buf, dma->rx_cmd_buf_len,
732 					  DMA_TO_DEVICE);
733 	if (dma_mapping_error(txchan->device->dev, dma->dma_tx_addr)) {
734 		dev_err(&lpi2c_imx->adapter.dev, "DMA map failed, use pio\n");
735 		return -EINVAL;
736 	}
737 
738 	rx_cmd_desc = dmaengine_prep_slave_single(txchan, dma->dma_tx_addr,
739 						  dma->rx_cmd_buf_len, DMA_MEM_TO_DEV,
740 						  DMA_PREP_INTERRUPT | DMA_CTRL_ACK);
741 	if (!rx_cmd_desc) {
742 		dev_err(&lpi2c_imx->adapter.dev, "DMA prep slave sg failed, use pio\n");
743 		goto desc_prepare_err_exit;
744 	}
745 
746 	cookie = dmaengine_submit(rx_cmd_desc);
747 	if (dma_submit_error(cookie)) {
748 		dev_err(&lpi2c_imx->adapter.dev, "submitting DMA failed, use pio\n");
749 		goto submit_err_exit;
750 	}
751 
752 	dma_async_issue_pending(txchan);
753 
754 	return 0;
755 
756 desc_prepare_err_exit:
757 	dma_unmap_single(txchan->device->dev, dma->dma_tx_addr,
758 			 dma->rx_cmd_buf_len, DMA_TO_DEVICE);
759 	return -EINVAL;
760 
761 submit_err_exit:
762 	dma_unmap_single(txchan->device->dev, dma->dma_tx_addr,
763 			 dma->rx_cmd_buf_len, DMA_TO_DEVICE);
764 	dmaengine_desc_free(rx_cmd_desc);
765 	return -EINVAL;
766 }
767 
lpi2c_dma_submit(struct lpi2c_imx_struct * lpi2c_imx)768 static int lpi2c_dma_submit(struct lpi2c_imx_struct *lpi2c_imx)
769 {
770 	struct lpi2c_imx_dma *dma = lpi2c_imx->dma;
771 	struct dma_async_tx_descriptor *desc;
772 	struct dma_chan *chan;
773 	dma_cookie_t cookie;
774 
775 	if (dma->dma_msg_flag & I2C_M_RD) {
776 		chan = dma->chan_rx;
777 		dma->dma_data_dir = DMA_FROM_DEVICE;
778 		dma->dma_transfer_dir = DMA_DEV_TO_MEM;
779 	} else {
780 		chan = dma->chan_tx;
781 		dma->dma_data_dir = DMA_TO_DEVICE;
782 		dma->dma_transfer_dir = DMA_MEM_TO_DEV;
783 	}
784 
785 	dma->dma_addr = dma_map_single(chan->device->dev,
786 				       dma->dma_buf, dma->dma_len, dma->dma_data_dir);
787 	if (dma_mapping_error(chan->device->dev, dma->dma_addr)) {
788 		dev_err(&lpi2c_imx->adapter.dev, "DMA map failed, use pio\n");
789 		return -EINVAL;
790 	}
791 
792 	desc = dmaengine_prep_slave_single(chan, dma->dma_addr,
793 					   dma->dma_len, dma->dma_transfer_dir,
794 					   DMA_PREP_INTERRUPT | DMA_CTRL_ACK);
795 	if (!desc) {
796 		dev_err(&lpi2c_imx->adapter.dev, "DMA prep slave sg failed, use pio\n");
797 		goto desc_prepare_err_exit;
798 	}
799 
800 	reinit_completion(&lpi2c_imx->complete);
801 	desc->callback = lpi2c_dma_callback;
802 	desc->callback_param = lpi2c_imx;
803 
804 	cookie = dmaengine_submit(desc);
805 	if (dma_submit_error(cookie)) {
806 		dev_err(&lpi2c_imx->adapter.dev, "submitting DMA failed, use pio\n");
807 		goto submit_err_exit;
808 	}
809 
810 	/* Can't switch to PIO mode when DMA have started transfer */
811 	dma->using_pio_mode = false;
812 
813 	dma_async_issue_pending(chan);
814 
815 	return 0;
816 
817 desc_prepare_err_exit:
818 	lpi2c_dma_unmap(dma);
819 	return -EINVAL;
820 
821 submit_err_exit:
822 	lpi2c_dma_unmap(dma);
823 	dmaengine_desc_free(desc);
824 	return -EINVAL;
825 }
826 
lpi2c_imx_find_max_burst_num(unsigned int fifosize,unsigned int len)827 static int lpi2c_imx_find_max_burst_num(unsigned int fifosize, unsigned int len)
828 {
829 	unsigned int i;
830 
831 	for (i = fifosize / 2; i > 0; i--)
832 		if (!(len % i))
833 			break;
834 
835 	return i;
836 }
837 
838 /*
839  * For a highest DMA efficiency, tx/rx burst number should be calculated according
840  * to the FIFO depth.
841  */
lpi2c_imx_dma_burst_num_calculate(struct lpi2c_imx_struct * lpi2c_imx)842 static void lpi2c_imx_dma_burst_num_calculate(struct lpi2c_imx_struct *lpi2c_imx)
843 {
844 	struct lpi2c_imx_dma *dma = lpi2c_imx->dma;
845 	unsigned int cmd_num;
846 
847 	if (dma->dma_msg_flag & I2C_M_RD) {
848 		/*
849 		 * One RX cmd word can trigger DMA receive no more than 256 bytes.
850 		 * The number of RX cmd words should be calculated based on the data
851 		 * length.
852 		 */
853 		cmd_num = DIV_ROUND_UP(dma->dma_len, CHUNK_DATA);
854 		dma->tx_burst_num = lpi2c_imx_find_max_burst_num(lpi2c_imx->txfifosize,
855 								 cmd_num);
856 		dma->rx_burst_num = lpi2c_imx_find_max_burst_num(lpi2c_imx->rxfifosize,
857 								 dma->dma_len);
858 	} else {
859 		dma->tx_burst_num = lpi2c_imx_find_max_burst_num(lpi2c_imx->txfifosize,
860 								 dma->dma_len);
861 	}
862 }
863 
lpi2c_dma_config(struct lpi2c_imx_struct * lpi2c_imx)864 static int lpi2c_dma_config(struct lpi2c_imx_struct *lpi2c_imx)
865 {
866 	struct lpi2c_imx_dma *dma = lpi2c_imx->dma;
867 	struct dma_slave_config rx = {}, tx = {};
868 	int ret;
869 
870 	lpi2c_imx_dma_burst_num_calculate(lpi2c_imx);
871 
872 	if (dma->dma_msg_flag & I2C_M_RD) {
873 		tx.dst_addr = dma->phy_addr + LPI2C_MTDR;
874 		tx.dst_addr_width = DMA_SLAVE_BUSWIDTH_2_BYTES;
875 		tx.dst_maxburst = dma->tx_burst_num;
876 		tx.direction = DMA_MEM_TO_DEV;
877 		ret = dmaengine_slave_config(dma->chan_tx, &tx);
878 		if (ret < 0)
879 			return ret;
880 
881 		rx.src_addr = dma->phy_addr + LPI2C_MRDR;
882 		rx.src_addr_width = DMA_SLAVE_BUSWIDTH_1_BYTE;
883 		rx.src_maxburst = dma->rx_burst_num;
884 		rx.direction = DMA_DEV_TO_MEM;
885 		ret = dmaengine_slave_config(dma->chan_rx, &rx);
886 		if (ret < 0)
887 			return ret;
888 	} else {
889 		tx.dst_addr = dma->phy_addr + LPI2C_MTDR;
890 		tx.dst_addr_width = DMA_SLAVE_BUSWIDTH_1_BYTE;
891 		tx.dst_maxburst = dma->tx_burst_num;
892 		tx.direction = DMA_MEM_TO_DEV;
893 		ret = dmaengine_slave_config(dma->chan_tx, &tx);
894 		if (ret < 0)
895 			return ret;
896 	}
897 
898 	return 0;
899 }
900 
lpi2c_dma_enable(struct lpi2c_imx_struct * lpi2c_imx)901 static void lpi2c_dma_enable(struct lpi2c_imx_struct *lpi2c_imx)
902 {
903 	struct lpi2c_imx_dma *dma = lpi2c_imx->dma;
904 	/*
905 	 * TX interrupt will be triggered when the number of words in
906 	 * the transmit FIFO is equal or less than TX watermark.
907 	 * RX interrupt will be triggered when the number of words in
908 	 * the receive FIFO is greater than RX watermark.
909 	 * In order to trigger the DMA interrupt, TX watermark should be
910 	 * set equal to the DMA TX burst number but RX watermark should
911 	 * be set less than the DMA RX burst number.
912 	 */
913 	if (dma->dma_msg_flag & I2C_M_RD) {
914 		/* Set I2C TX/RX watermark */
915 		writel(dma->tx_burst_num | (dma->rx_burst_num - 1) << 16,
916 		       lpi2c_imx->base + LPI2C_MFCR);
917 		/* Enable I2C DMA TX/RX function */
918 		writel(MDER_TDDE | MDER_RDDE, lpi2c_imx->base + LPI2C_MDER);
919 	} else {
920 		/* Set I2C TX watermark */
921 		writel(dma->tx_burst_num, lpi2c_imx->base + LPI2C_MFCR);
922 		/* Enable I2C DMA TX function */
923 		writel(MDER_TDDE, lpi2c_imx->base + LPI2C_MDER);
924 	}
925 
926 	/* Enable NACK detected */
927 	lpi2c_imx_intctrl(lpi2c_imx, MIER_NDIE);
928 };
929 
930 /*
931  * When lpi2c is in TX DMA mode we can use one DMA TX channel to write
932  * data word into TXFIFO, but in RX DMA mode it is different.
933  *
934  * The LPI2C MTDR register is a command data and transmit data register.
935  * Bits 8-10 are the command data field and Bits 0-7 are the transmit
936  * data field. When the LPI2C master needs to read data, the number of
937  * bytes to read should be set in the command field and RECV_DATA should
938  * be set into the command data field to receive (DATA[7:0] + 1) bytes.
939  * The recv data command word is made of RECV_DATA in the command data
940  * field and the number of bytes to read in transmit data field. When the
941  * length of data to be read exceeds 256 bytes, recv data command word
942  * needs to be written to TXFIFO multiple times.
943  *
944  * So when in RX DMA mode, the TX channel also must to be configured to
945  * send RX command words and the RX command word must be set in advance
946  * before transmitting.
947  */
lpi2c_imx_dma_xfer(struct lpi2c_imx_struct * lpi2c_imx,struct i2c_msg * msg)948 static int lpi2c_imx_dma_xfer(struct lpi2c_imx_struct *lpi2c_imx,
949 			      struct i2c_msg *msg)
950 {
951 	struct lpi2c_imx_dma *dma = lpi2c_imx->dma;
952 	int ret;
953 
954 	/* When DMA mode fails before transferring, CPU mode can be used. */
955 	dma->using_pio_mode = true;
956 
957 	dma->dma_len = msg->len;
958 	dma->dma_msg_flag = msg->flags;
959 	dma->dma_buf = i2c_get_dma_safe_msg_buf(msg, I2C_DMA_THRESHOLD);
960 	if (!dma->dma_buf)
961 		return -ENOMEM;
962 
963 	ret = lpi2c_dma_config(lpi2c_imx);
964 	if (ret) {
965 		dev_err(&lpi2c_imx->adapter.dev, "Failed to configure DMA (%d)\n", ret);
966 		goto disable_dma;
967 	}
968 
969 	lpi2c_dma_enable(lpi2c_imx);
970 
971 	ret = lpi2c_dma_submit(lpi2c_imx);
972 	if (ret) {
973 		dev_err(&lpi2c_imx->adapter.dev, "DMA submission failed (%d)\n", ret);
974 		goto disable_dma;
975 	}
976 
977 	if (dma->dma_msg_flag & I2C_M_RD) {
978 		ret = lpi2c_imx_alloc_rx_cmd_buf(lpi2c_imx);
979 		if (ret)
980 			goto disable_cleanup_data_dma;
981 
982 		ret = lpi2c_dma_rx_cmd_submit(lpi2c_imx);
983 		if (ret)
984 			goto disable_cleanup_data_dma;
985 	}
986 
987 	ret = lpi2c_imx_dma_msg_complete(lpi2c_imx);
988 	if (ret)
989 		goto disable_cleanup_all_dma;
990 
991 	/* When encountering NACK in transfer, clean up all DMA transfers */
992 	if ((readl(lpi2c_imx->base + LPI2C_MSR) & MSR_NDF) && !ret) {
993 		ret = -EIO;
994 		goto disable_cleanup_all_dma;
995 	}
996 
997 	if (dma->dma_msg_flag & I2C_M_RD)
998 		dma_unmap_single(dma->chan_tx->device->dev, dma->dma_tx_addr,
999 				 dma->rx_cmd_buf_len, DMA_TO_DEVICE);
1000 	lpi2c_dma_unmap(dma);
1001 
1002 	goto disable_dma;
1003 
1004 disable_cleanup_all_dma:
1005 	if (dma->dma_msg_flag & I2C_M_RD)
1006 		lpi2c_cleanup_rx_cmd_dma(dma);
1007 disable_cleanup_data_dma:
1008 	lpi2c_cleanup_dma(dma);
1009 disable_dma:
1010 	/* Disable I2C DMA function */
1011 	writel(0, lpi2c_imx->base + LPI2C_MDER);
1012 
1013 	if (dma->dma_msg_flag & I2C_M_RD)
1014 		kfree(dma->rx_cmd_buf);
1015 
1016 	if (ret)
1017 		i2c_put_dma_safe_msg_buf(dma->dma_buf, msg, false);
1018 	else
1019 		i2c_put_dma_safe_msg_buf(dma->dma_buf, msg, true);
1020 
1021 	return ret;
1022 }
1023 
lpi2c_imx_xfer_common(struct i2c_adapter * adapter,struct i2c_msg * msgs,int num,bool atomic)1024 static int lpi2c_imx_xfer_common(struct i2c_adapter *adapter,
1025 				 struct i2c_msg *msgs, int num, bool atomic)
1026 {
1027 	struct lpi2c_imx_struct *lpi2c_imx = i2c_get_adapdata(adapter);
1028 	unsigned int temp;
1029 	int i, result;
1030 
1031 	result = lpi2c_imx_master_enable(lpi2c_imx);
1032 	if (result)
1033 		return result;
1034 
1035 	for (i = 0; i < num; i++) {
1036 		result = lpi2c_imx_start(lpi2c_imx, &msgs[i], atomic);
1037 		if (result)
1038 			goto disable;
1039 
1040 		/* quick smbus */
1041 		if (num == 1 && msgs[0].len == 0)
1042 			goto stop;
1043 
1044 		lpi2c_imx->rx_buf = NULL;
1045 		lpi2c_imx->tx_buf = NULL;
1046 		lpi2c_imx->delivered = 0;
1047 		lpi2c_imx->msglen = msgs[i].len;
1048 
1049 		if (atomic) {
1050 			result = lpi2c_imx_pio_xfer_atomic(lpi2c_imx, &msgs[i]);
1051 		} else {
1052 			init_completion(&lpi2c_imx->complete);
1053 
1054 			if (is_use_dma(lpi2c_imx, &msgs[i])) {
1055 				result = lpi2c_imx_dma_xfer(lpi2c_imx, &msgs[i]);
1056 				if (result && lpi2c_imx->dma->using_pio_mode)
1057 					result = lpi2c_imx_pio_xfer(lpi2c_imx, &msgs[i]);
1058 			} else {
1059 				result = lpi2c_imx_pio_xfer(lpi2c_imx, &msgs[i]);
1060 			}
1061 		}
1062 
1063 		if (result)
1064 			goto stop;
1065 
1066 		if (!(msgs[i].flags & I2C_M_RD)) {
1067 			result = lpi2c_imx_txfifo_empty(lpi2c_imx, atomic);
1068 			if (result)
1069 				goto stop;
1070 		}
1071 	}
1072 
1073 stop:
1074 	lpi2c_imx_stop(lpi2c_imx, atomic);
1075 
1076 	temp = readl(lpi2c_imx->base + LPI2C_MSR);
1077 	if ((temp & MSR_NDF) && !result)
1078 		result = -EIO;
1079 
1080 disable:
1081 	lpi2c_imx_master_disable(lpi2c_imx);
1082 
1083 	dev_dbg(&lpi2c_imx->adapter.dev, "<%s> exit with: %s: %d\n", __func__,
1084 		(result < 0) ? "error" : "success msg",
1085 		(result < 0) ? result : num);
1086 
1087 	return (result < 0) ? result : num;
1088 }
1089 
lpi2c_imx_xfer(struct i2c_adapter * adapter,struct i2c_msg * msgs,int num)1090 static int lpi2c_imx_xfer(struct i2c_adapter *adapter, struct i2c_msg *msgs, int num)
1091 {
1092 	return lpi2c_imx_xfer_common(adapter, msgs, num, false);
1093 }
1094 
lpi2c_imx_xfer_atomic(struct i2c_adapter * adapter,struct i2c_msg * msgs,int num)1095 static int lpi2c_imx_xfer_atomic(struct i2c_adapter *adapter, struct i2c_msg *msgs, int num)
1096 {
1097 	return lpi2c_imx_xfer_common(adapter, msgs, num, true);
1098 }
1099 
lpi2c_imx_target_isr(struct lpi2c_imx_struct * lpi2c_imx,u32 ssr,u32 sier_filter)1100 static irqreturn_t lpi2c_imx_target_isr(struct lpi2c_imx_struct *lpi2c_imx,
1101 					u32 ssr, u32 sier_filter)
1102 {
1103 	u8 value;
1104 	u32 sasr;
1105 
1106 	/* Arbitration lost */
1107 	if (sier_filter & SSR_BEF) {
1108 		writel(0, lpi2c_imx->base + LPI2C_SIER);
1109 		return IRQ_HANDLED;
1110 	}
1111 
1112 	/* Address detected */
1113 	if (sier_filter & SSR_AVF) {
1114 		sasr = readl(lpi2c_imx->base + LPI2C_SASR);
1115 		if (SASR_READ_REQ & sasr) {
1116 			/* Read request */
1117 			i2c_slave_event(lpi2c_imx->target, I2C_SLAVE_READ_REQUESTED, &value);
1118 			writel(value, lpi2c_imx->base + LPI2C_STDR);
1119 			goto ret;
1120 		} else {
1121 			/* Write request */
1122 			i2c_slave_event(lpi2c_imx->target, I2C_SLAVE_WRITE_REQUESTED, &value);
1123 		}
1124 	}
1125 
1126 	if (sier_filter & SSR_SDF)
1127 		/* STOP */
1128 		i2c_slave_event(lpi2c_imx->target, I2C_SLAVE_STOP, &value);
1129 
1130 	if (sier_filter & SSR_TDF) {
1131 		/* Target send data */
1132 		i2c_slave_event(lpi2c_imx->target, I2C_SLAVE_READ_PROCESSED, &value);
1133 		writel(value, lpi2c_imx->base + LPI2C_STDR);
1134 	}
1135 
1136 	if (sier_filter & SSR_RDF) {
1137 		/* Target receive data */
1138 		value = readl(lpi2c_imx->base + LPI2C_SRDR);
1139 		i2c_slave_event(lpi2c_imx->target, I2C_SLAVE_WRITE_RECEIVED, &value);
1140 	}
1141 
1142 ret:
1143 	/* Clear SSR */
1144 	writel(ssr & SSR_CLEAR_BITS, lpi2c_imx->base + LPI2C_SSR);
1145 	return IRQ_HANDLED;
1146 }
1147 
lpi2c_imx_master_isr(struct lpi2c_imx_struct * lpi2c_imx)1148 static irqreturn_t lpi2c_imx_master_isr(struct lpi2c_imx_struct *lpi2c_imx)
1149 {
1150 	unsigned int enabled;
1151 	unsigned int temp;
1152 
1153 	enabled = readl(lpi2c_imx->base + LPI2C_MIER);
1154 
1155 	lpi2c_imx_intctrl(lpi2c_imx, 0);
1156 	temp = readl(lpi2c_imx->base + LPI2C_MSR);
1157 	temp &= enabled;
1158 
1159 	if (temp & MSR_NDF)
1160 		complete(&lpi2c_imx->complete);
1161 	else if (temp & MSR_RDF)
1162 		lpi2c_imx_read_rxfifo(lpi2c_imx, false);
1163 	else if (temp & MSR_TDF)
1164 		lpi2c_imx_write_txfifo(lpi2c_imx, false);
1165 
1166 	return IRQ_HANDLED;
1167 }
1168 
lpi2c_imx_isr(int irq,void * dev_id)1169 static irqreturn_t lpi2c_imx_isr(int irq, void *dev_id)
1170 {
1171 	struct lpi2c_imx_struct *lpi2c_imx = dev_id;
1172 
1173 	if (lpi2c_imx->target) {
1174 		u32 scr = readl(lpi2c_imx->base + LPI2C_SCR);
1175 		u32 ssr = readl(lpi2c_imx->base + LPI2C_SSR);
1176 		u32 sier_filter = ssr & readl(lpi2c_imx->base + LPI2C_SIER);
1177 
1178 		/*
1179 		 * The target is enabled and an interrupt has been triggered.
1180 		 * Enter the target's irq handler.
1181 		 */
1182 		if ((scr & SCR_SEN) && sier_filter)
1183 			return lpi2c_imx_target_isr(lpi2c_imx, ssr, sier_filter);
1184 	}
1185 
1186 	/*
1187 	 * Otherwise the interrupt has been triggered by the master.
1188 	 * Enter the master's irq handler.
1189 	 */
1190 	return lpi2c_imx_master_isr(lpi2c_imx);
1191 }
1192 
lpi2c_imx_target_init(struct lpi2c_imx_struct * lpi2c_imx)1193 static void lpi2c_imx_target_init(struct lpi2c_imx_struct *lpi2c_imx)
1194 {
1195 	u32 temp;
1196 
1197 	/* reset target module */
1198 	writel(SCR_RST, lpi2c_imx->base + LPI2C_SCR);
1199 	writel(0, lpi2c_imx->base + LPI2C_SCR);
1200 
1201 	/* Set target address */
1202 	writel((lpi2c_imx->target->addr << 1), lpi2c_imx->base + LPI2C_SAMR);
1203 
1204 	writel(SCFGR1_RXSTALL | SCFGR1_TXDSTALL, lpi2c_imx->base + LPI2C_SCFGR1);
1205 
1206 	/*
1207 	 * set SCFGR2: FILTSDA, FILTSCL and CLKHOLD
1208 	 *
1209 	 * FILTSCL/FILTSDA can eliminate signal skew. It should generally be
1210 	 * set to the same value and should be set >= 50ns.
1211 	 *
1212 	 * CLKHOLD is only used when clock stretching is enabled, but it will
1213 	 * extend the clock stretching to ensure there is an additional delay
1214 	 * between the target driving SDA and the target releasing the SCL pin.
1215 	 *
1216 	 * CLKHOLD setting is crucial for lpi2c target. When master read data
1217 	 * from target, if there is a delay caused by cpu idle, excessive load,
1218 	 * or other delays between two bytes in one message transmission, it
1219 	 * will cause a short interval time between the driving SDA signal and
1220 	 * releasing SCL signal. The lpi2c master will mistakenly think it is a stop
1221 	 * signal resulting in an arbitration failure. This issue can be avoided
1222 	 * by setting CLKHOLD.
1223 	 *
1224 	 * In order to ensure lpi2c function normally when the lpi2c speed is as
1225 	 * low as 100kHz, CLKHOLD should be set to 3 and it is also compatible with
1226 	 * higher clock frequency like 400kHz and 1MHz.
1227 	 */
1228 	temp = SCFGR2_FILTSDA(2) | SCFGR2_FILTSCL(2) | SCFGR2_CLKHOLD(3);
1229 	writel(temp, lpi2c_imx->base + LPI2C_SCFGR2);
1230 
1231 	/*
1232 	 * Enable module:
1233 	 * SCR_FILTEN can enable digital filter and output delay counter for LPI2C
1234 	 * target mode. So SCR_FILTEN need be asserted when enable SDA/SCL FILTER
1235 	 * and CLKHOLD.
1236 	 */
1237 	writel(SCR_SEN | SCR_FILTEN, lpi2c_imx->base + LPI2C_SCR);
1238 
1239 	/* Enable interrupt from i2c module */
1240 	writel(SLAVE_INT_FLAG, lpi2c_imx->base + LPI2C_SIER);
1241 }
1242 
lpi2c_imx_register_target(struct i2c_client * client)1243 static int lpi2c_imx_register_target(struct i2c_client *client)
1244 {
1245 	struct lpi2c_imx_struct *lpi2c_imx = i2c_get_adapdata(client->adapter);
1246 	int ret;
1247 
1248 	if (lpi2c_imx->target)
1249 		return -EBUSY;
1250 
1251 	lpi2c_imx->target = client;
1252 
1253 	ret = pm_runtime_resume_and_get(lpi2c_imx->adapter.dev.parent);
1254 	if (ret < 0) {
1255 		dev_err(&lpi2c_imx->adapter.dev, "failed to resume i2c controller");
1256 		return ret;
1257 	}
1258 
1259 	lpi2c_imx_target_init(lpi2c_imx);
1260 
1261 	return 0;
1262 }
1263 
lpi2c_imx_unregister_target(struct i2c_client * client)1264 static int lpi2c_imx_unregister_target(struct i2c_client *client)
1265 {
1266 	struct lpi2c_imx_struct *lpi2c_imx = i2c_get_adapdata(client->adapter);
1267 	int ret;
1268 
1269 	if (!lpi2c_imx->target)
1270 		return -EINVAL;
1271 
1272 	/* Reset target address. */
1273 	writel(0, lpi2c_imx->base + LPI2C_SAMR);
1274 
1275 	writel(SCR_RST, lpi2c_imx->base + LPI2C_SCR);
1276 	writel(0, lpi2c_imx->base + LPI2C_SCR);
1277 
1278 	lpi2c_imx->target = NULL;
1279 
1280 	ret = pm_runtime_put_sync(lpi2c_imx->adapter.dev.parent);
1281 	if (ret < 0)
1282 		dev_err(&lpi2c_imx->adapter.dev, "failed to suspend i2c controller");
1283 
1284 	return ret;
1285 }
1286 
lpi2c_imx_init_recovery_info(struct lpi2c_imx_struct * lpi2c_imx,struct platform_device * pdev)1287 static int lpi2c_imx_init_recovery_info(struct lpi2c_imx_struct *lpi2c_imx,
1288 				  struct platform_device *pdev)
1289 {
1290 	struct i2c_bus_recovery_info *bri = &lpi2c_imx->rinfo;
1291 
1292 	bri->pinctrl = devm_pinctrl_get(&pdev->dev);
1293 	if (IS_ERR(bri->pinctrl))
1294 		return PTR_ERR(bri->pinctrl);
1295 
1296 	lpi2c_imx->adapter.bus_recovery_info = bri;
1297 
1298 	return 0;
1299 }
1300 
dma_exit(struct device * dev,struct lpi2c_imx_dma * dma)1301 static void dma_exit(struct device *dev, struct lpi2c_imx_dma *dma)
1302 {
1303 	if (dma->chan_rx)
1304 		dma_release_channel(dma->chan_rx);
1305 
1306 	if (dma->chan_tx)
1307 		dma_release_channel(dma->chan_tx);
1308 
1309 	devm_kfree(dev, dma);
1310 }
1311 
lpi2c_dma_init(struct device * dev,dma_addr_t phy_addr)1312 static int lpi2c_dma_init(struct device *dev, dma_addr_t phy_addr)
1313 {
1314 	struct lpi2c_imx_struct *lpi2c_imx = dev_get_drvdata(dev);
1315 	struct lpi2c_imx_dma *dma;
1316 	int ret;
1317 
1318 	dma = devm_kzalloc(dev, sizeof(*dma), GFP_KERNEL);
1319 	if (!dma)
1320 		return -ENOMEM;
1321 
1322 	dma->phy_addr = phy_addr;
1323 
1324 	/* Prepare for TX DMA: */
1325 	dma->chan_tx = dma_request_chan(dev, "tx");
1326 	if (IS_ERR(dma->chan_tx)) {
1327 		ret = PTR_ERR(dma->chan_tx);
1328 		if (ret != -ENODEV && ret != -EPROBE_DEFER)
1329 			dev_err(dev, "can't request DMA tx channel (%d)\n", ret);
1330 		dma->chan_tx = NULL;
1331 		goto dma_exit;
1332 	}
1333 
1334 	/* Prepare for RX DMA: */
1335 	dma->chan_rx = dma_request_chan(dev, "rx");
1336 	if (IS_ERR(dma->chan_rx)) {
1337 		ret = PTR_ERR(dma->chan_rx);
1338 		if (ret != -ENODEV && ret != -EPROBE_DEFER)
1339 			dev_err(dev, "can't request DMA rx channel (%d)\n", ret);
1340 		dma->chan_rx = NULL;
1341 		goto dma_exit;
1342 	}
1343 
1344 	lpi2c_imx->can_use_dma = true;
1345 	lpi2c_imx->dma = dma;
1346 	return 0;
1347 
1348 dma_exit:
1349 	dma_exit(dev, dma);
1350 	return ret;
1351 }
1352 
lpi2c_imx_func(struct i2c_adapter * adapter)1353 static u32 lpi2c_imx_func(struct i2c_adapter *adapter)
1354 {
1355 	return I2C_FUNC_I2C | I2C_FUNC_SMBUS_EMUL |
1356 		I2C_FUNC_SMBUS_READ_BLOCK_DATA;
1357 }
1358 
1359 static const struct i2c_algorithm lpi2c_imx_algo = {
1360 	.xfer = lpi2c_imx_xfer,
1361 	.xfer_atomic = lpi2c_imx_xfer_atomic,
1362 	.functionality = lpi2c_imx_func,
1363 	.reg_target = lpi2c_imx_register_target,
1364 	.unreg_target = lpi2c_imx_unregister_target,
1365 };
1366 
1367 static const struct of_device_id lpi2c_imx_of_match[] = {
1368 	{ .compatible = "fsl,imx7ulp-lpi2c" },
1369 	{ }
1370 };
1371 MODULE_DEVICE_TABLE(of, lpi2c_imx_of_match);
1372 
lpi2c_imx_probe(struct platform_device * pdev)1373 static int lpi2c_imx_probe(struct platform_device *pdev)
1374 {
1375 	struct lpi2c_imx_struct *lpi2c_imx;
1376 	struct resource *res;
1377 	dma_addr_t phy_addr;
1378 	unsigned int temp;
1379 	int irq, ret;
1380 
1381 	lpi2c_imx = devm_kzalloc(&pdev->dev, sizeof(*lpi2c_imx), GFP_KERNEL);
1382 	if (!lpi2c_imx)
1383 		return -ENOMEM;
1384 
1385 	lpi2c_imx->base = devm_platform_get_and_ioremap_resource(pdev, 0, &res);
1386 	if (IS_ERR(lpi2c_imx->base))
1387 		return PTR_ERR(lpi2c_imx->base);
1388 
1389 	irq = platform_get_irq(pdev, 0);
1390 	if (irq < 0)
1391 		return irq;
1392 
1393 	lpi2c_imx->adapter.owner	= THIS_MODULE;
1394 	lpi2c_imx->adapter.algo		= &lpi2c_imx_algo;
1395 	lpi2c_imx->adapter.dev.parent	= &pdev->dev;
1396 	lpi2c_imx->adapter.dev.of_node	= pdev->dev.of_node;
1397 	strscpy(lpi2c_imx->adapter.name, pdev->name,
1398 		sizeof(lpi2c_imx->adapter.name));
1399 	phy_addr = (dma_addr_t)res->start;
1400 
1401 	ret = devm_clk_bulk_get_all(&pdev->dev, &lpi2c_imx->clks);
1402 	if (ret < 0)
1403 		return dev_err_probe(&pdev->dev, ret, "can't get I2C peripheral clock\n");
1404 	lpi2c_imx->num_clks = ret;
1405 
1406 	ret = of_property_read_u32(pdev->dev.of_node,
1407 				   "clock-frequency", &lpi2c_imx->bitrate);
1408 	if (ret)
1409 		lpi2c_imx->bitrate = I2C_MAX_STANDARD_MODE_FREQ;
1410 
1411 	ret = devm_request_irq(&pdev->dev, irq, lpi2c_imx_isr, IRQF_NO_SUSPEND,
1412 			       pdev->name, lpi2c_imx);
1413 	if (ret)
1414 		return dev_err_probe(&pdev->dev, ret, "can't claim irq %d\n", irq);
1415 
1416 	i2c_set_adapdata(&lpi2c_imx->adapter, lpi2c_imx);
1417 	platform_set_drvdata(pdev, lpi2c_imx);
1418 
1419 	ret = clk_bulk_prepare_enable(lpi2c_imx->num_clks, lpi2c_imx->clks);
1420 	if (ret)
1421 		return ret;
1422 
1423 	/*
1424 	 * Lock the parent clock rate to avoid getting parent clock upon
1425 	 * each transfer
1426 	 */
1427 	ret = devm_clk_rate_exclusive_get(&pdev->dev, lpi2c_imx->clks[0].clk);
1428 	if (ret)
1429 		return dev_err_probe(&pdev->dev, ret,
1430 				     "can't lock I2C peripheral clock rate\n");
1431 
1432 	lpi2c_imx->rate_per = clk_get_rate(lpi2c_imx->clks[0].clk);
1433 	if (!lpi2c_imx->rate_per)
1434 		return dev_err_probe(&pdev->dev, -EINVAL,
1435 				     "can't get I2C peripheral clock rate\n");
1436 
1437 	pm_runtime_set_autosuspend_delay(&pdev->dev, I2C_PM_TIMEOUT);
1438 	pm_runtime_use_autosuspend(&pdev->dev);
1439 	pm_runtime_get_noresume(&pdev->dev);
1440 	pm_runtime_set_active(&pdev->dev);
1441 	pm_runtime_enable(&pdev->dev);
1442 
1443 	temp = readl(lpi2c_imx->base + LPI2C_PARAM);
1444 	lpi2c_imx->txfifosize = 1 << (temp & 0x0f);
1445 	lpi2c_imx->rxfifosize = 1 << ((temp >> 8) & 0x0f);
1446 
1447 	/* Init optional bus recovery function */
1448 	ret = lpi2c_imx_init_recovery_info(lpi2c_imx, pdev);
1449 	/* Give it another chance if pinctrl used is not ready yet */
1450 	if (ret == -EPROBE_DEFER)
1451 		goto rpm_disable;
1452 
1453 	/* Init DMA */
1454 	ret = lpi2c_dma_init(&pdev->dev, phy_addr);
1455 	if (ret) {
1456 		if (ret == -EPROBE_DEFER)
1457 			goto rpm_disable;
1458 		dev_info(&pdev->dev, "use pio mode\n");
1459 	}
1460 
1461 	ret = i2c_add_adapter(&lpi2c_imx->adapter);
1462 	if (ret)
1463 		goto rpm_disable;
1464 
1465 	pm_runtime_mark_last_busy(&pdev->dev);
1466 	pm_runtime_put_autosuspend(&pdev->dev);
1467 
1468 	dev_info(&lpi2c_imx->adapter.dev, "LPI2C adapter registered\n");
1469 
1470 	return 0;
1471 
1472 rpm_disable:
1473 	pm_runtime_dont_use_autosuspend(&pdev->dev);
1474 	pm_runtime_put_sync(&pdev->dev);
1475 	pm_runtime_disable(&pdev->dev);
1476 
1477 	return ret;
1478 }
1479 
lpi2c_imx_remove(struct platform_device * pdev)1480 static void lpi2c_imx_remove(struct platform_device *pdev)
1481 {
1482 	struct lpi2c_imx_struct *lpi2c_imx = platform_get_drvdata(pdev);
1483 
1484 	i2c_del_adapter(&lpi2c_imx->adapter);
1485 
1486 	pm_runtime_disable(&pdev->dev);
1487 	pm_runtime_dont_use_autosuspend(&pdev->dev);
1488 }
1489 
lpi2c_runtime_suspend(struct device * dev)1490 static int __maybe_unused lpi2c_runtime_suspend(struct device *dev)
1491 {
1492 	struct lpi2c_imx_struct *lpi2c_imx = dev_get_drvdata(dev);
1493 
1494 	clk_bulk_disable(lpi2c_imx->num_clks, lpi2c_imx->clks);
1495 	pinctrl_pm_select_sleep_state(dev);
1496 
1497 	return 0;
1498 }
1499 
lpi2c_runtime_resume(struct device * dev)1500 static int __maybe_unused lpi2c_runtime_resume(struct device *dev)
1501 {
1502 	struct lpi2c_imx_struct *lpi2c_imx = dev_get_drvdata(dev);
1503 	int ret;
1504 
1505 	pinctrl_pm_select_default_state(dev);
1506 	ret = clk_bulk_enable(lpi2c_imx->num_clks, lpi2c_imx->clks);
1507 	if (ret) {
1508 		dev_err(dev, "failed to enable I2C clock, ret=%d\n", ret);
1509 		return ret;
1510 	}
1511 
1512 	return 0;
1513 }
1514 
lpi2c_suspend_noirq(struct device * dev)1515 static int __maybe_unused lpi2c_suspend_noirq(struct device *dev)
1516 {
1517 	return pm_runtime_force_suspend(dev);
1518 }
1519 
lpi2c_resume_noirq(struct device * dev)1520 static int __maybe_unused lpi2c_resume_noirq(struct device *dev)
1521 {
1522 	struct lpi2c_imx_struct *lpi2c_imx = dev_get_drvdata(dev);
1523 	int ret;
1524 
1525 	ret = pm_runtime_force_resume(dev);
1526 	if (ret)
1527 		return ret;
1528 
1529 	/*
1530 	 * If the I2C module powers down during system suspend,
1531 	 * the register values will be lost. Therefore, reinitialize
1532 	 * the target when the system resumes.
1533 	 */
1534 	if (lpi2c_imx->target)
1535 		lpi2c_imx_target_init(lpi2c_imx);
1536 
1537 	return 0;
1538 }
1539 
lpi2c_suspend(struct device * dev)1540 static int lpi2c_suspend(struct device *dev)
1541 {
1542 	/*
1543 	 * Some I2C devices may need the I2C controller to remain active
1544 	 * during resume_noirq() or suspend_noirq(). If the controller is
1545 	 * autosuspended, there is no way to wake it up once runtime PM is
1546 	 * disabled (in suspend_late()).
1547 	 *
1548 	 * During system resume, the I2C controller will be available only
1549 	 * after runtime PM is re-enabled (in resume_early()). However, this
1550 	 * may be too late for some devices.
1551 	 *
1552 	 * Wake up the controller in the suspend() callback while runtime PM
1553 	 * is still enabled. The I2C controller will remain available until
1554 	 * the suspend_noirq() callback (pm_runtime_force_suspend()) is
1555 	 * called. During resume, the I2C controller can be restored by the
1556 	 * resume_noirq() callback (pm_runtime_force_resume()).
1557 	 *
1558 	 * Finally, the resume() callback re-enables autosuspend, ensuring
1559 	 * the I2C controller remains available until the system enters
1560 	 * suspend_noirq() and from resume_noirq().
1561 	 */
1562 	return pm_runtime_resume_and_get(dev);
1563 }
1564 
lpi2c_resume(struct device * dev)1565 static int lpi2c_resume(struct device *dev)
1566 {
1567 	pm_runtime_mark_last_busy(dev);
1568 	pm_runtime_put_autosuspend(dev);
1569 
1570 	return 0;
1571 }
1572 
1573 static const struct dev_pm_ops lpi2c_pm_ops = {
1574 	SET_NOIRQ_SYSTEM_SLEEP_PM_OPS(lpi2c_suspend_noirq,
1575 				      lpi2c_resume_noirq)
1576 	SYSTEM_SLEEP_PM_OPS(lpi2c_suspend, lpi2c_resume)
1577 	SET_RUNTIME_PM_OPS(lpi2c_runtime_suspend,
1578 			   lpi2c_runtime_resume, NULL)
1579 };
1580 
1581 static struct platform_driver lpi2c_imx_driver = {
1582 	.probe = lpi2c_imx_probe,
1583 	.remove = lpi2c_imx_remove,
1584 	.driver = {
1585 		.name = DRIVER_NAME,
1586 		.of_match_table = lpi2c_imx_of_match,
1587 		.pm = &lpi2c_pm_ops,
1588 	},
1589 };
1590 
1591 module_platform_driver(lpi2c_imx_driver);
1592 
1593 MODULE_AUTHOR("Gao Pan <pandy.gao@nxp.com>");
1594 MODULE_DESCRIPTION("I2C adapter driver for LPI2C bus");
1595 MODULE_LICENSE("GPL");
1596