1 // SPDX-License-Identifier: GPL-2.0+
2 /*
3 * This is i.MX low power i2c controller driver.
4 *
5 * Copyright 2016 Freescale Semiconductor, Inc.
6 */
7
8 #include <linux/clk.h>
9 #include <linux/completion.h>
10 #include <linux/delay.h>
11 #include <linux/dma-mapping.h>
12 #include <linux/dmaengine.h>
13 #include <linux/err.h>
14 #include <linux/errno.h>
15 #include <linux/i2c.h>
16 #include <linux/init.h>
17 #include <linux/interrupt.h>
18 #include <linux/io.h>
19 #include <linux/iopoll.h>
20 #include <linux/kernel.h>
21 #include <linux/module.h>
22 #include <linux/of.h>
23 #include <linux/pinctrl/consumer.h>
24 #include <linux/platform_device.h>
25 #include <linux/pm_runtime.h>
26 #include <linux/sched.h>
27 #include <linux/slab.h>
28
29 #define DRIVER_NAME "imx-lpi2c"
30
31 #define LPI2C_PARAM 0x04 /* i2c RX/TX FIFO size */
32 #define LPI2C_MCR 0x10 /* i2c contrl register */
33 #define LPI2C_MSR 0x14 /* i2c status register */
34 #define LPI2C_MIER 0x18 /* i2c interrupt enable */
35 #define LPI2C_MDER 0x1C /* i2c DMA enable */
36 #define LPI2C_MCFGR0 0x20 /* i2c master configuration */
37 #define LPI2C_MCFGR1 0x24 /* i2c master configuration */
38 #define LPI2C_MCFGR2 0x28 /* i2c master configuration */
39 #define LPI2C_MCFGR3 0x2C /* i2c master configuration */
40 #define LPI2C_MCCR0 0x48 /* i2c master clk configuration */
41 #define LPI2C_MCCR1 0x50 /* i2c master clk configuration */
42 #define LPI2C_MFCR 0x58 /* i2c master FIFO control */
43 #define LPI2C_MFSR 0x5C /* i2c master FIFO status */
44 #define LPI2C_MTDR 0x60 /* i2c master TX data register */
45 #define LPI2C_MRDR 0x70 /* i2c master RX data register */
46
47 #define LPI2C_SCR 0x110 /* i2c target control register */
48 #define LPI2C_SSR 0x114 /* i2c target status register */
49 #define LPI2C_SIER 0x118 /* i2c target interrupt enable */
50 #define LPI2C_SDER 0x11C /* i2c target DMA enable */
51 #define LPI2C_SCFGR0 0x120 /* i2c target configuration */
52 #define LPI2C_SCFGR1 0x124 /* i2c target configuration */
53 #define LPI2C_SCFGR2 0x128 /* i2c target configuration */
54 #define LPI2C_SAMR 0x140 /* i2c target address match */
55 #define LPI2C_SASR 0x150 /* i2c target address status */
56 #define LPI2C_STAR 0x154 /* i2c target transmit ACK */
57 #define LPI2C_STDR 0x160 /* i2c target transmit data */
58 #define LPI2C_SRDR 0x170 /* i2c target receive data */
59 #define LPI2C_SRDROR 0x178 /* i2c target receive data read only */
60
61 /* i2c command */
62 #define TRAN_DATA 0X00
63 #define RECV_DATA 0X01
64 #define GEN_STOP 0X02
65 #define RECV_DISCARD 0X03
66 #define GEN_START 0X04
67 #define START_NACK 0X05
68 #define START_HIGH 0X06
69 #define START_HIGH_NACK 0X07
70
71 #define MCR_MEN BIT(0)
72 #define MCR_RST BIT(1)
73 #define MCR_DOZEN BIT(2)
74 #define MCR_DBGEN BIT(3)
75 #define MCR_RTF BIT(8)
76 #define MCR_RRF BIT(9)
77 #define MSR_TDF BIT(0)
78 #define MSR_RDF BIT(1)
79 #define MSR_SDF BIT(9)
80 #define MSR_NDF BIT(10)
81 #define MSR_ALF BIT(11)
82 #define MSR_MBF BIT(24)
83 #define MSR_BBF BIT(25)
84 #define MIER_TDIE BIT(0)
85 #define MIER_RDIE BIT(1)
86 #define MIER_SDIE BIT(9)
87 #define MIER_NDIE BIT(10)
88 #define MCFGR1_AUTOSTOP BIT(8)
89 #define MCFGR1_IGNACK BIT(9)
90 #define MRDR_RXEMPTY BIT(14)
91 #define MDER_TDDE BIT(0)
92 #define MDER_RDDE BIT(1)
93
94 #define SCR_SEN BIT(0)
95 #define SCR_RST BIT(1)
96 #define SCR_FILTEN BIT(4)
97 #define SCR_RTF BIT(8)
98 #define SCR_RRF BIT(9)
99 #define SSR_TDF BIT(0)
100 #define SSR_RDF BIT(1)
101 #define SSR_AVF BIT(2)
102 #define SSR_TAF BIT(3)
103 #define SSR_RSF BIT(8)
104 #define SSR_SDF BIT(9)
105 #define SSR_BEF BIT(10)
106 #define SSR_FEF BIT(11)
107 #define SSR_SBF BIT(24)
108 #define SSR_BBF BIT(25)
109 #define SSR_CLEAR_BITS (SSR_RSF | SSR_SDF | SSR_BEF | SSR_FEF)
110 #define SIER_TDIE BIT(0)
111 #define SIER_RDIE BIT(1)
112 #define SIER_AVIE BIT(2)
113 #define SIER_TAIE BIT(3)
114 #define SIER_RSIE BIT(8)
115 #define SIER_SDIE BIT(9)
116 #define SIER_BEIE BIT(10)
117 #define SIER_FEIE BIT(11)
118 #define SIER_AM0F BIT(12)
119 #define SCFGR1_RXSTALL BIT(1)
120 #define SCFGR1_TXDSTALL BIT(2)
121 #define SCFGR2_FILTSDA_SHIFT 24
122 #define SCFGR2_FILTSCL_SHIFT 16
123 #define SCFGR2_CLKHOLD(x) (x)
124 #define SCFGR2_FILTSDA(x) ((x) << SCFGR2_FILTSDA_SHIFT)
125 #define SCFGR2_FILTSCL(x) ((x) << SCFGR2_FILTSCL_SHIFT)
126 #define SASR_READ_REQ 0x1
127 #define SLAVE_INT_FLAG (SIER_TDIE | SIER_RDIE | SIER_AVIE | \
128 SIER_SDIE | SIER_BEIE)
129
130 #define I2C_CLK_RATIO 2
131 #define CHUNK_DATA 256
132
133 #define I2C_PM_TIMEOUT 10 /* ms */
134 #define I2C_DMA_THRESHOLD 8 /* bytes */
135
136 enum lpi2c_imx_mode {
137 STANDARD, /* 100+Kbps */
138 FAST, /* 400+Kbps */
139 FAST_PLUS, /* 1.0+Mbps */
140 HS, /* 3.4+Mbps */
141 ULTRA_FAST, /* 5.0+Mbps */
142 };
143
144 enum lpi2c_imx_pincfg {
145 TWO_PIN_OD,
146 TWO_PIN_OO,
147 TWO_PIN_PP,
148 FOUR_PIN_PP,
149 };
150
151 struct lpi2c_imx_dma {
152 bool using_pio_mode;
153 u8 rx_cmd_buf_len;
154 u8 *dma_buf;
155 u16 *rx_cmd_buf;
156 unsigned int dma_len;
157 unsigned int tx_burst_num;
158 unsigned int rx_burst_num;
159 unsigned long dma_msg_flag;
160 resource_size_t phy_addr;
161 dma_addr_t dma_tx_addr;
162 dma_addr_t dma_addr;
163 enum dma_data_direction dma_data_dir;
164 enum dma_transfer_direction dma_transfer_dir;
165 struct dma_chan *chan_tx;
166 struct dma_chan *chan_rx;
167 };
168
169 struct lpi2c_imx_struct {
170 struct i2c_adapter adapter;
171 int num_clks;
172 struct clk_bulk_data *clks;
173 void __iomem *base;
174 __u8 *rx_buf;
175 __u8 *tx_buf;
176 struct completion complete;
177 unsigned long rate_per;
178 unsigned int msglen;
179 unsigned int delivered;
180 unsigned int block_data;
181 unsigned int bitrate;
182 unsigned int txfifosize;
183 unsigned int rxfifosize;
184 enum lpi2c_imx_mode mode;
185 struct i2c_bus_recovery_info rinfo;
186 bool can_use_dma;
187 struct lpi2c_imx_dma *dma;
188 struct i2c_client *target;
189 };
190
191 #define lpi2c_imx_read_msr_poll_timeout(atomic, val, cond) \
192 (atomic ? readl_poll_timeout_atomic(lpi2c_imx->base + LPI2C_MSR, val, \
193 cond, 0, 500000) : \
194 readl_poll_timeout(lpi2c_imx->base + LPI2C_MSR, val, cond, \
195 0, 500000))
196
lpi2c_imx_intctrl(struct lpi2c_imx_struct * lpi2c_imx,unsigned int enable)197 static void lpi2c_imx_intctrl(struct lpi2c_imx_struct *lpi2c_imx,
198 unsigned int enable)
199 {
200 writel(enable, lpi2c_imx->base + LPI2C_MIER);
201 }
202
lpi2c_imx_bus_busy(struct lpi2c_imx_struct * lpi2c_imx,bool atomic)203 static int lpi2c_imx_bus_busy(struct lpi2c_imx_struct *lpi2c_imx, bool atomic)
204 {
205 unsigned int temp;
206 int err;
207
208 err = lpi2c_imx_read_msr_poll_timeout(atomic, temp,
209 temp & (MSR_ALF | MSR_BBF | MSR_MBF));
210
211 /* check for arbitration lost, clear if set */
212 if (temp & MSR_ALF) {
213 writel(temp, lpi2c_imx->base + LPI2C_MSR);
214 return -EAGAIN;
215 }
216
217 /* check for bus not busy */
218 if (err) {
219 dev_dbg(&lpi2c_imx->adapter.dev, "bus not work\n");
220 if (lpi2c_imx->adapter.bus_recovery_info)
221 i2c_recover_bus(&lpi2c_imx->adapter);
222 return -ETIMEDOUT;
223 }
224
225 return 0;
226 }
227
lpi2c_imx_txfifo_cnt(struct lpi2c_imx_struct * lpi2c_imx)228 static u32 lpi2c_imx_txfifo_cnt(struct lpi2c_imx_struct *lpi2c_imx)
229 {
230 return readl(lpi2c_imx->base + LPI2C_MFSR) & 0xff;
231 }
232
lpi2c_imx_set_mode(struct lpi2c_imx_struct * lpi2c_imx)233 static void lpi2c_imx_set_mode(struct lpi2c_imx_struct *lpi2c_imx)
234 {
235 unsigned int bitrate = lpi2c_imx->bitrate;
236 enum lpi2c_imx_mode mode;
237
238 if (bitrate < I2C_MAX_FAST_MODE_FREQ)
239 mode = STANDARD;
240 else if (bitrate < I2C_MAX_FAST_MODE_PLUS_FREQ)
241 mode = FAST;
242 else if (bitrate < I2C_MAX_HIGH_SPEED_MODE_FREQ)
243 mode = FAST_PLUS;
244 else if (bitrate < I2C_MAX_ULTRA_FAST_MODE_FREQ)
245 mode = HS;
246 else
247 mode = ULTRA_FAST;
248
249 lpi2c_imx->mode = mode;
250 }
251
lpi2c_imx_start(struct lpi2c_imx_struct * lpi2c_imx,struct i2c_msg * msgs,bool atomic)252 static int lpi2c_imx_start(struct lpi2c_imx_struct *lpi2c_imx,
253 struct i2c_msg *msgs, bool atomic)
254 {
255 unsigned int temp;
256
257 temp = readl(lpi2c_imx->base + LPI2C_MCR);
258 temp |= MCR_RRF | MCR_RTF;
259 writel(temp, lpi2c_imx->base + LPI2C_MCR);
260 writel(0x7f00, lpi2c_imx->base + LPI2C_MSR);
261
262 temp = i2c_8bit_addr_from_msg(msgs) | (GEN_START << 8);
263 writel(temp, lpi2c_imx->base + LPI2C_MTDR);
264
265 return lpi2c_imx_bus_busy(lpi2c_imx, atomic);
266 }
267
lpi2c_imx_stop(struct lpi2c_imx_struct * lpi2c_imx,bool atomic)268 static void lpi2c_imx_stop(struct lpi2c_imx_struct *lpi2c_imx, bool atomic)
269 {
270 unsigned int temp;
271 int err;
272
273 writel(GEN_STOP << 8, lpi2c_imx->base + LPI2C_MTDR);
274
275 err = lpi2c_imx_read_msr_poll_timeout(atomic, temp, temp & MSR_SDF);
276
277 if (err) {
278 dev_dbg(&lpi2c_imx->adapter.dev, "stop timeout\n");
279 if (lpi2c_imx->adapter.bus_recovery_info)
280 i2c_recover_bus(&lpi2c_imx->adapter);
281 }
282 }
283
284 /* CLKLO = I2C_CLK_RATIO * CLKHI, SETHOLD = CLKHI, DATAVD = CLKHI/2 */
lpi2c_imx_config(struct lpi2c_imx_struct * lpi2c_imx)285 static int lpi2c_imx_config(struct lpi2c_imx_struct *lpi2c_imx)
286 {
287 u8 prescale, filt, sethold, datavd;
288 unsigned int clk_rate, clk_cycle, clkhi, clklo;
289 enum lpi2c_imx_pincfg pincfg;
290 unsigned int temp;
291
292 lpi2c_imx_set_mode(lpi2c_imx);
293
294 clk_rate = lpi2c_imx->rate_per;
295
296 if (lpi2c_imx->mode == HS || lpi2c_imx->mode == ULTRA_FAST)
297 filt = 0;
298 else
299 filt = 2;
300
301 for (prescale = 0; prescale <= 7; prescale++) {
302 clk_cycle = clk_rate / ((1 << prescale) * lpi2c_imx->bitrate)
303 - 3 - (filt >> 1);
304 clkhi = DIV_ROUND_UP(clk_cycle, I2C_CLK_RATIO + 1);
305 clklo = clk_cycle - clkhi;
306 if (clklo < 64)
307 break;
308 }
309
310 if (prescale > 7)
311 return -EINVAL;
312
313 /* set MCFGR1: PINCFG, PRESCALE, IGNACK */
314 if (lpi2c_imx->mode == ULTRA_FAST)
315 pincfg = TWO_PIN_OO;
316 else
317 pincfg = TWO_PIN_OD;
318 temp = prescale | pincfg << 24;
319
320 if (lpi2c_imx->mode == ULTRA_FAST)
321 temp |= MCFGR1_IGNACK;
322
323 writel(temp, lpi2c_imx->base + LPI2C_MCFGR1);
324
325 /* set MCFGR2: FILTSDA, FILTSCL */
326 temp = (filt << 16) | (filt << 24);
327 writel(temp, lpi2c_imx->base + LPI2C_MCFGR2);
328
329 /* set MCCR: DATAVD, SETHOLD, CLKHI, CLKLO */
330 sethold = clkhi;
331 datavd = clkhi >> 1;
332 temp = datavd << 24 | sethold << 16 | clkhi << 8 | clklo;
333
334 if (lpi2c_imx->mode == HS)
335 writel(temp, lpi2c_imx->base + LPI2C_MCCR1);
336 else
337 writel(temp, lpi2c_imx->base + LPI2C_MCCR0);
338
339 return 0;
340 }
341
lpi2c_imx_master_enable(struct lpi2c_imx_struct * lpi2c_imx)342 static int lpi2c_imx_master_enable(struct lpi2c_imx_struct *lpi2c_imx)
343 {
344 unsigned int temp;
345 int ret;
346
347 ret = pm_runtime_resume_and_get(lpi2c_imx->adapter.dev.parent);
348 if (ret < 0)
349 return ret;
350
351 temp = MCR_RST;
352 writel(temp, lpi2c_imx->base + LPI2C_MCR);
353 writel(0, lpi2c_imx->base + LPI2C_MCR);
354
355 ret = lpi2c_imx_config(lpi2c_imx);
356 if (ret)
357 goto rpm_put;
358
359 temp = readl(lpi2c_imx->base + LPI2C_MCR);
360 temp |= MCR_MEN;
361 writel(temp, lpi2c_imx->base + LPI2C_MCR);
362
363 return 0;
364
365 rpm_put:
366 pm_runtime_put_autosuspend(lpi2c_imx->adapter.dev.parent);
367
368 return ret;
369 }
370
lpi2c_imx_master_disable(struct lpi2c_imx_struct * lpi2c_imx)371 static int lpi2c_imx_master_disable(struct lpi2c_imx_struct *lpi2c_imx)
372 {
373 u32 temp;
374
375 temp = readl(lpi2c_imx->base + LPI2C_MCR);
376 temp &= ~MCR_MEN;
377 writel(temp, lpi2c_imx->base + LPI2C_MCR);
378
379 pm_runtime_put_autosuspend(lpi2c_imx->adapter.dev.parent);
380
381 return 0;
382 }
383
lpi2c_imx_pio_msg_complete(struct lpi2c_imx_struct * lpi2c_imx)384 static int lpi2c_imx_pio_msg_complete(struct lpi2c_imx_struct *lpi2c_imx)
385 {
386 unsigned long time_left;
387
388 time_left = wait_for_completion_timeout(&lpi2c_imx->complete, HZ);
389
390 return time_left ? 0 : -ETIMEDOUT;
391 }
392
lpi2c_imx_txfifo_empty(struct lpi2c_imx_struct * lpi2c_imx,bool atomic)393 static int lpi2c_imx_txfifo_empty(struct lpi2c_imx_struct *lpi2c_imx, bool atomic)
394 {
395 unsigned int temp;
396 int err;
397
398 err = lpi2c_imx_read_msr_poll_timeout(atomic, temp,
399 (temp & MSR_NDF) || !lpi2c_imx_txfifo_cnt(lpi2c_imx));
400
401 if (temp & MSR_NDF) {
402 dev_dbg(&lpi2c_imx->adapter.dev, "NDF detected\n");
403 return -EIO;
404 }
405
406 if (err) {
407 dev_dbg(&lpi2c_imx->adapter.dev, "txfifo empty timeout\n");
408 if (lpi2c_imx->adapter.bus_recovery_info)
409 i2c_recover_bus(&lpi2c_imx->adapter);
410 return -ETIMEDOUT;
411 }
412
413 return 0;
414 }
415
lpi2c_imx_set_tx_watermark(struct lpi2c_imx_struct * lpi2c_imx)416 static void lpi2c_imx_set_tx_watermark(struct lpi2c_imx_struct *lpi2c_imx)
417 {
418 writel(lpi2c_imx->txfifosize >> 1, lpi2c_imx->base + LPI2C_MFCR);
419 }
420
lpi2c_imx_set_rx_watermark(struct lpi2c_imx_struct * lpi2c_imx)421 static void lpi2c_imx_set_rx_watermark(struct lpi2c_imx_struct *lpi2c_imx)
422 {
423 unsigned int temp, remaining;
424
425 remaining = lpi2c_imx->msglen - lpi2c_imx->delivered;
426
427 if (remaining > (lpi2c_imx->rxfifosize >> 1))
428 temp = lpi2c_imx->rxfifosize >> 1;
429 else
430 temp = 0;
431
432 writel(temp << 16, lpi2c_imx->base + LPI2C_MFCR);
433 }
434
lpi2c_imx_write_txfifo(struct lpi2c_imx_struct * lpi2c_imx,bool atomic)435 static bool lpi2c_imx_write_txfifo(struct lpi2c_imx_struct *lpi2c_imx, bool atomic)
436 {
437 unsigned int data, txcnt;
438
439 txcnt = readl(lpi2c_imx->base + LPI2C_MFSR) & 0xff;
440
441 while (txcnt < lpi2c_imx->txfifosize) {
442 if (lpi2c_imx->delivered == lpi2c_imx->msglen)
443 break;
444
445 data = lpi2c_imx->tx_buf[lpi2c_imx->delivered++];
446 writel(data, lpi2c_imx->base + LPI2C_MTDR);
447 txcnt++;
448 }
449
450 if (lpi2c_imx->delivered < lpi2c_imx->msglen) {
451 if (!atomic)
452 lpi2c_imx_intctrl(lpi2c_imx, MIER_TDIE | MIER_NDIE);
453 return false;
454 }
455
456 if (!atomic)
457 complete(&lpi2c_imx->complete);
458
459 return true;
460 }
461
lpi2c_imx_read_rxfifo(struct lpi2c_imx_struct * lpi2c_imx,bool atomic)462 static bool lpi2c_imx_read_rxfifo(struct lpi2c_imx_struct *lpi2c_imx, bool atomic)
463 {
464 unsigned int blocklen, remaining;
465 unsigned int temp, data;
466
467 do {
468 data = readl(lpi2c_imx->base + LPI2C_MRDR);
469 if (data & MRDR_RXEMPTY)
470 break;
471
472 lpi2c_imx->rx_buf[lpi2c_imx->delivered++] = data & 0xff;
473 } while (1);
474
475 /*
476 * First byte is the length of remaining packet in the SMBus block
477 * data read. Add it to msgs->len.
478 */
479 if (lpi2c_imx->block_data) {
480 blocklen = lpi2c_imx->rx_buf[0];
481 lpi2c_imx->msglen += blocklen;
482 }
483
484 remaining = lpi2c_imx->msglen - lpi2c_imx->delivered;
485
486 if (!remaining) {
487 if (!atomic)
488 complete(&lpi2c_imx->complete);
489 return true;
490 }
491
492 /* not finished, still waiting for rx data */
493 lpi2c_imx_set_rx_watermark(lpi2c_imx);
494
495 /* multiple receive commands */
496 if (lpi2c_imx->block_data) {
497 lpi2c_imx->block_data = 0;
498 temp = remaining;
499 temp |= (RECV_DATA << 8);
500 writel(temp, lpi2c_imx->base + LPI2C_MTDR);
501 } else if (!(lpi2c_imx->delivered & 0xff)) {
502 temp = (remaining > CHUNK_DATA ? CHUNK_DATA : remaining) - 1;
503 temp |= (RECV_DATA << 8);
504 writel(temp, lpi2c_imx->base + LPI2C_MTDR);
505 }
506
507 if (!atomic)
508 lpi2c_imx_intctrl(lpi2c_imx, MIER_RDIE);
509
510 return false;
511 }
512
lpi2c_imx_write(struct lpi2c_imx_struct * lpi2c_imx,struct i2c_msg * msgs)513 static void lpi2c_imx_write(struct lpi2c_imx_struct *lpi2c_imx,
514 struct i2c_msg *msgs)
515 {
516 lpi2c_imx->tx_buf = msgs->buf;
517 lpi2c_imx_set_tx_watermark(lpi2c_imx);
518 lpi2c_imx_write_txfifo(lpi2c_imx, false);
519 }
520
lpi2c_imx_write_atomic(struct lpi2c_imx_struct * lpi2c_imx,struct i2c_msg * msgs)521 static int lpi2c_imx_write_atomic(struct lpi2c_imx_struct *lpi2c_imx,
522 struct i2c_msg *msgs)
523 {
524 u32 temp;
525 int err;
526
527 lpi2c_imx->tx_buf = msgs->buf;
528
529 err = lpi2c_imx_read_msr_poll_timeout(true, temp,
530 (temp & MSR_NDF) ||
531 lpi2c_imx_write_txfifo(lpi2c_imx, true));
532
533 if (temp & MSR_NDF)
534 return -EIO;
535
536 return err;
537 }
538
lpi2c_imx_read_init(struct lpi2c_imx_struct * lpi2c_imx,struct i2c_msg * msgs)539 static void lpi2c_imx_read_init(struct lpi2c_imx_struct *lpi2c_imx,
540 struct i2c_msg *msgs)
541 {
542 unsigned int temp;
543
544 lpi2c_imx->rx_buf = msgs->buf;
545 lpi2c_imx->block_data = msgs->flags & I2C_M_RECV_LEN;
546
547 lpi2c_imx_set_rx_watermark(lpi2c_imx);
548 temp = msgs->len > CHUNK_DATA ? CHUNK_DATA - 1 : msgs->len - 1;
549 temp |= (RECV_DATA << 8);
550 writel(temp, lpi2c_imx->base + LPI2C_MTDR);
551 }
552
lpi2c_imx_read_chunk_atomic(struct lpi2c_imx_struct * lpi2c_imx)553 static bool lpi2c_imx_read_chunk_atomic(struct lpi2c_imx_struct *lpi2c_imx)
554 {
555 u32 rxcnt;
556
557 rxcnt = (readl(lpi2c_imx->base + LPI2C_MFSR) >> 16) & 0xFF;
558 if (!rxcnt)
559 return false;
560
561 if (!lpi2c_imx_read_rxfifo(lpi2c_imx, true))
562 return false;
563
564 return true;
565 }
566
lpi2c_imx_read_atomic(struct lpi2c_imx_struct * lpi2c_imx,struct i2c_msg * msgs)567 static int lpi2c_imx_read_atomic(struct lpi2c_imx_struct *lpi2c_imx,
568 struct i2c_msg *msgs)
569 {
570 u32 temp;
571 int tmo_us;
572
573 tmo_us = 1000000;
574 do {
575 if (lpi2c_imx_read_chunk_atomic(lpi2c_imx))
576 return 0;
577
578 temp = readl(lpi2c_imx->base + LPI2C_MSR);
579
580 if (temp & MSR_NDF)
581 return -EIO;
582
583 udelay(100);
584 tmo_us -= 100;
585 } while (tmo_us > 0);
586
587 return -ETIMEDOUT;
588 }
589
is_use_dma(struct lpi2c_imx_struct * lpi2c_imx,struct i2c_msg * msg)590 static bool is_use_dma(struct lpi2c_imx_struct *lpi2c_imx, struct i2c_msg *msg)
591 {
592 if (!lpi2c_imx->can_use_dma)
593 return false;
594
595 /*
596 * When the length of data is less than I2C_DMA_THRESHOLD,
597 * cpu mode is used directly to avoid low performance.
598 */
599 return !(msg->len < I2C_DMA_THRESHOLD);
600 }
601
lpi2c_imx_pio_xfer(struct lpi2c_imx_struct * lpi2c_imx,struct i2c_msg * msg)602 static int lpi2c_imx_pio_xfer(struct lpi2c_imx_struct *lpi2c_imx,
603 struct i2c_msg *msg)
604 {
605 reinit_completion(&lpi2c_imx->complete);
606
607 if (msg->flags & I2C_M_RD) {
608 lpi2c_imx_read_init(lpi2c_imx, msg);
609 lpi2c_imx_intctrl(lpi2c_imx, MIER_RDIE | MIER_NDIE);
610 } else {
611 lpi2c_imx_write(lpi2c_imx, msg);
612 }
613
614 return lpi2c_imx_pio_msg_complete(lpi2c_imx);
615 }
616
lpi2c_imx_pio_xfer_atomic(struct lpi2c_imx_struct * lpi2c_imx,struct i2c_msg * msg)617 static int lpi2c_imx_pio_xfer_atomic(struct lpi2c_imx_struct *lpi2c_imx,
618 struct i2c_msg *msg)
619 {
620 if (msg->flags & I2C_M_RD) {
621 lpi2c_imx_read_init(lpi2c_imx, msg);
622 return lpi2c_imx_read_atomic(lpi2c_imx, msg);
623 }
624
625 return lpi2c_imx_write_atomic(lpi2c_imx, msg);
626 }
627
lpi2c_imx_dma_timeout_calculate(struct lpi2c_imx_struct * lpi2c_imx)628 static int lpi2c_imx_dma_timeout_calculate(struct lpi2c_imx_struct *lpi2c_imx)
629 {
630 unsigned long time = 0;
631
632 time = 8 * lpi2c_imx->dma->dma_len * 1000 / lpi2c_imx->bitrate;
633
634 /* Add extra second for scheduler related activities */
635 time += 1;
636
637 /* Double calculated time */
638 return secs_to_jiffies(time);
639 }
640
lpi2c_imx_alloc_rx_cmd_buf(struct lpi2c_imx_struct * lpi2c_imx)641 static int lpi2c_imx_alloc_rx_cmd_buf(struct lpi2c_imx_struct *lpi2c_imx)
642 {
643 struct lpi2c_imx_dma *dma = lpi2c_imx->dma;
644 u16 rx_remain = dma->dma_len;
645 int cmd_num;
646 u16 temp;
647
648 /*
649 * Calculate the number of rx command words via the DMA TX channel
650 * writing into command register based on the i2c msg len, and build
651 * the rx command words buffer.
652 */
653 cmd_num = DIV_ROUND_UP(rx_remain, CHUNK_DATA);
654 dma->rx_cmd_buf = kcalloc(cmd_num, sizeof(u16), GFP_KERNEL);
655 dma->rx_cmd_buf_len = cmd_num * sizeof(u16);
656
657 if (!dma->rx_cmd_buf) {
658 dev_err(&lpi2c_imx->adapter.dev, "Alloc RX cmd buffer failed\n");
659 return -ENOMEM;
660 }
661
662 for (int i = 0; i < cmd_num ; i++) {
663 temp = rx_remain > CHUNK_DATA ? CHUNK_DATA - 1 : rx_remain - 1;
664 temp |= (RECV_DATA << 8);
665 rx_remain -= CHUNK_DATA;
666 dma->rx_cmd_buf[i] = temp;
667 }
668
669 return 0;
670 }
671
lpi2c_imx_dma_msg_complete(struct lpi2c_imx_struct * lpi2c_imx)672 static int lpi2c_imx_dma_msg_complete(struct lpi2c_imx_struct *lpi2c_imx)
673 {
674 unsigned long time_left, time;
675
676 time = lpi2c_imx_dma_timeout_calculate(lpi2c_imx);
677 time_left = wait_for_completion_timeout(&lpi2c_imx->complete, time);
678 if (time_left == 0) {
679 dev_err(&lpi2c_imx->adapter.dev, "I/O Error in DMA Data Transfer\n");
680 return -ETIMEDOUT;
681 }
682
683 return 0;
684 }
685
lpi2c_dma_unmap(struct lpi2c_imx_dma * dma)686 static void lpi2c_dma_unmap(struct lpi2c_imx_dma *dma)
687 {
688 struct dma_chan *chan = dma->dma_data_dir == DMA_FROM_DEVICE
689 ? dma->chan_rx : dma->chan_tx;
690
691 dma_unmap_single(chan->device->dev, dma->dma_addr,
692 dma->dma_len, dma->dma_data_dir);
693
694 dma->dma_data_dir = DMA_NONE;
695 }
696
lpi2c_cleanup_rx_cmd_dma(struct lpi2c_imx_dma * dma)697 static void lpi2c_cleanup_rx_cmd_dma(struct lpi2c_imx_dma *dma)
698 {
699 dmaengine_terminate_sync(dma->chan_tx);
700 dma_unmap_single(dma->chan_tx->device->dev, dma->dma_tx_addr,
701 dma->rx_cmd_buf_len, DMA_TO_DEVICE);
702 }
703
lpi2c_cleanup_dma(struct lpi2c_imx_dma * dma)704 static void lpi2c_cleanup_dma(struct lpi2c_imx_dma *dma)
705 {
706 if (dma->dma_data_dir == DMA_FROM_DEVICE)
707 dmaengine_terminate_sync(dma->chan_rx);
708 else if (dma->dma_data_dir == DMA_TO_DEVICE)
709 dmaengine_terminate_sync(dma->chan_tx);
710
711 lpi2c_dma_unmap(dma);
712 }
713
lpi2c_dma_callback(void * data)714 static void lpi2c_dma_callback(void *data)
715 {
716 struct lpi2c_imx_struct *lpi2c_imx = (struct lpi2c_imx_struct *)data;
717
718 complete(&lpi2c_imx->complete);
719 }
720
lpi2c_dma_rx_cmd_submit(struct lpi2c_imx_struct * lpi2c_imx)721 static int lpi2c_dma_rx_cmd_submit(struct lpi2c_imx_struct *lpi2c_imx)
722 {
723 struct dma_async_tx_descriptor *rx_cmd_desc;
724 struct lpi2c_imx_dma *dma = lpi2c_imx->dma;
725 struct dma_chan *txchan = dma->chan_tx;
726 dma_cookie_t cookie;
727
728 dma->dma_tx_addr = dma_map_single(txchan->device->dev,
729 dma->rx_cmd_buf, dma->rx_cmd_buf_len,
730 DMA_TO_DEVICE);
731 if (dma_mapping_error(txchan->device->dev, dma->dma_tx_addr)) {
732 dev_err(&lpi2c_imx->adapter.dev, "DMA map failed, use pio\n");
733 return -EINVAL;
734 }
735
736 rx_cmd_desc = dmaengine_prep_slave_single(txchan, dma->dma_tx_addr,
737 dma->rx_cmd_buf_len, DMA_MEM_TO_DEV,
738 DMA_PREP_INTERRUPT | DMA_CTRL_ACK);
739 if (!rx_cmd_desc) {
740 dev_err(&lpi2c_imx->adapter.dev, "DMA prep slave sg failed, use pio\n");
741 goto desc_prepare_err_exit;
742 }
743
744 cookie = dmaengine_submit(rx_cmd_desc);
745 if (dma_submit_error(cookie)) {
746 dev_err(&lpi2c_imx->adapter.dev, "submitting DMA failed, use pio\n");
747 goto submit_err_exit;
748 }
749
750 dma_async_issue_pending(txchan);
751
752 return 0;
753
754 desc_prepare_err_exit:
755 dma_unmap_single(txchan->device->dev, dma->dma_tx_addr,
756 dma->rx_cmd_buf_len, DMA_TO_DEVICE);
757 return -EINVAL;
758
759 submit_err_exit:
760 dma_unmap_single(txchan->device->dev, dma->dma_tx_addr,
761 dma->rx_cmd_buf_len, DMA_TO_DEVICE);
762 dmaengine_desc_free(rx_cmd_desc);
763 return -EINVAL;
764 }
765
lpi2c_dma_submit(struct lpi2c_imx_struct * lpi2c_imx)766 static int lpi2c_dma_submit(struct lpi2c_imx_struct *lpi2c_imx)
767 {
768 struct lpi2c_imx_dma *dma = lpi2c_imx->dma;
769 struct dma_async_tx_descriptor *desc;
770 struct dma_chan *chan;
771 dma_cookie_t cookie;
772
773 if (dma->dma_msg_flag & I2C_M_RD) {
774 chan = dma->chan_rx;
775 dma->dma_data_dir = DMA_FROM_DEVICE;
776 dma->dma_transfer_dir = DMA_DEV_TO_MEM;
777 } else {
778 chan = dma->chan_tx;
779 dma->dma_data_dir = DMA_TO_DEVICE;
780 dma->dma_transfer_dir = DMA_MEM_TO_DEV;
781 }
782
783 dma->dma_addr = dma_map_single(chan->device->dev,
784 dma->dma_buf, dma->dma_len, dma->dma_data_dir);
785 if (dma_mapping_error(chan->device->dev, dma->dma_addr)) {
786 dev_err(&lpi2c_imx->adapter.dev, "DMA map failed, use pio\n");
787 return -EINVAL;
788 }
789
790 desc = dmaengine_prep_slave_single(chan, dma->dma_addr,
791 dma->dma_len, dma->dma_transfer_dir,
792 DMA_PREP_INTERRUPT | DMA_CTRL_ACK);
793 if (!desc) {
794 dev_err(&lpi2c_imx->adapter.dev, "DMA prep slave sg failed, use pio\n");
795 goto desc_prepare_err_exit;
796 }
797
798 reinit_completion(&lpi2c_imx->complete);
799 desc->callback = lpi2c_dma_callback;
800 desc->callback_param = lpi2c_imx;
801
802 cookie = dmaengine_submit(desc);
803 if (dma_submit_error(cookie)) {
804 dev_err(&lpi2c_imx->adapter.dev, "submitting DMA failed, use pio\n");
805 goto submit_err_exit;
806 }
807
808 /* Can't switch to PIO mode when DMA have started transfer */
809 dma->using_pio_mode = false;
810
811 dma_async_issue_pending(chan);
812
813 return 0;
814
815 desc_prepare_err_exit:
816 lpi2c_dma_unmap(dma);
817 return -EINVAL;
818
819 submit_err_exit:
820 lpi2c_dma_unmap(dma);
821 dmaengine_desc_free(desc);
822 return -EINVAL;
823 }
824
lpi2c_imx_find_max_burst_num(unsigned int fifosize,unsigned int len)825 static int lpi2c_imx_find_max_burst_num(unsigned int fifosize, unsigned int len)
826 {
827 unsigned int i;
828
829 for (i = fifosize / 2; i > 0; i--)
830 if (!(len % i))
831 break;
832
833 return i;
834 }
835
836 /*
837 * For a highest DMA efficiency, tx/rx burst number should be calculated according
838 * to the FIFO depth.
839 */
lpi2c_imx_dma_burst_num_calculate(struct lpi2c_imx_struct * lpi2c_imx)840 static void lpi2c_imx_dma_burst_num_calculate(struct lpi2c_imx_struct *lpi2c_imx)
841 {
842 struct lpi2c_imx_dma *dma = lpi2c_imx->dma;
843 unsigned int cmd_num;
844
845 if (dma->dma_msg_flag & I2C_M_RD) {
846 /*
847 * One RX cmd word can trigger DMA receive no more than 256 bytes.
848 * The number of RX cmd words should be calculated based on the data
849 * length.
850 */
851 cmd_num = DIV_ROUND_UP(dma->dma_len, CHUNK_DATA);
852 dma->tx_burst_num = lpi2c_imx_find_max_burst_num(lpi2c_imx->txfifosize,
853 cmd_num);
854 dma->rx_burst_num = lpi2c_imx_find_max_burst_num(lpi2c_imx->rxfifosize,
855 dma->dma_len);
856 } else {
857 dma->tx_burst_num = lpi2c_imx_find_max_burst_num(lpi2c_imx->txfifosize,
858 dma->dma_len);
859 }
860 }
861
lpi2c_dma_config(struct lpi2c_imx_struct * lpi2c_imx)862 static int lpi2c_dma_config(struct lpi2c_imx_struct *lpi2c_imx)
863 {
864 struct lpi2c_imx_dma *dma = lpi2c_imx->dma;
865 struct dma_slave_config rx = {}, tx = {};
866 int ret;
867
868 lpi2c_imx_dma_burst_num_calculate(lpi2c_imx);
869
870 if (dma->dma_msg_flag & I2C_M_RD) {
871 tx.dst_addr = dma->phy_addr + LPI2C_MTDR;
872 tx.dst_addr_width = DMA_SLAVE_BUSWIDTH_2_BYTES;
873 tx.dst_maxburst = dma->tx_burst_num;
874 tx.direction = DMA_MEM_TO_DEV;
875 ret = dmaengine_slave_config(dma->chan_tx, &tx);
876 if (ret < 0)
877 return ret;
878
879 rx.src_addr = dma->phy_addr + LPI2C_MRDR;
880 rx.src_addr_width = DMA_SLAVE_BUSWIDTH_1_BYTE;
881 rx.src_maxburst = dma->rx_burst_num;
882 rx.direction = DMA_DEV_TO_MEM;
883 ret = dmaengine_slave_config(dma->chan_rx, &rx);
884 if (ret < 0)
885 return ret;
886 } else {
887 tx.dst_addr = dma->phy_addr + LPI2C_MTDR;
888 tx.dst_addr_width = DMA_SLAVE_BUSWIDTH_1_BYTE;
889 tx.dst_maxburst = dma->tx_burst_num;
890 tx.direction = DMA_MEM_TO_DEV;
891 ret = dmaengine_slave_config(dma->chan_tx, &tx);
892 if (ret < 0)
893 return ret;
894 }
895
896 return 0;
897 }
898
lpi2c_dma_enable(struct lpi2c_imx_struct * lpi2c_imx)899 static void lpi2c_dma_enable(struct lpi2c_imx_struct *lpi2c_imx)
900 {
901 struct lpi2c_imx_dma *dma = lpi2c_imx->dma;
902 /*
903 * TX interrupt will be triggered when the number of words in
904 * the transmit FIFO is equal or less than TX watermark.
905 * RX interrupt will be triggered when the number of words in
906 * the receive FIFO is greater than RX watermark.
907 * In order to trigger the DMA interrupt, TX watermark should be
908 * set equal to the DMA TX burst number but RX watermark should
909 * be set less than the DMA RX burst number.
910 */
911 if (dma->dma_msg_flag & I2C_M_RD) {
912 /* Set I2C TX/RX watermark */
913 writel(dma->tx_burst_num | (dma->rx_burst_num - 1) << 16,
914 lpi2c_imx->base + LPI2C_MFCR);
915 /* Enable I2C DMA TX/RX function */
916 writel(MDER_TDDE | MDER_RDDE, lpi2c_imx->base + LPI2C_MDER);
917 } else {
918 /* Set I2C TX watermark */
919 writel(dma->tx_burst_num, lpi2c_imx->base + LPI2C_MFCR);
920 /* Enable I2C DMA TX function */
921 writel(MDER_TDDE, lpi2c_imx->base + LPI2C_MDER);
922 }
923
924 /* Enable NACK detected */
925 lpi2c_imx_intctrl(lpi2c_imx, MIER_NDIE);
926 };
927
928 /*
929 * When lpi2c is in TX DMA mode we can use one DMA TX channel to write
930 * data word into TXFIFO, but in RX DMA mode it is different.
931 *
932 * The LPI2C MTDR register is a command data and transmit data register.
933 * Bits 8-10 are the command data field and Bits 0-7 are the transmit
934 * data field. When the LPI2C master needs to read data, the number of
935 * bytes to read should be set in the command field and RECV_DATA should
936 * be set into the command data field to receive (DATA[7:0] + 1) bytes.
937 * The recv data command word is made of RECV_DATA in the command data
938 * field and the number of bytes to read in transmit data field. When the
939 * length of data to be read exceeds 256 bytes, recv data command word
940 * needs to be written to TXFIFO multiple times.
941 *
942 * So when in RX DMA mode, the TX channel also must to be configured to
943 * send RX command words and the RX command word must be set in advance
944 * before transmitting.
945 */
lpi2c_imx_dma_xfer(struct lpi2c_imx_struct * lpi2c_imx,struct i2c_msg * msg)946 static int lpi2c_imx_dma_xfer(struct lpi2c_imx_struct *lpi2c_imx,
947 struct i2c_msg *msg)
948 {
949 struct lpi2c_imx_dma *dma = lpi2c_imx->dma;
950 int ret;
951
952 /* When DMA mode fails before transferring, CPU mode can be used. */
953 dma->using_pio_mode = true;
954
955 dma->dma_len = msg->len;
956 dma->dma_msg_flag = msg->flags;
957 dma->dma_buf = i2c_get_dma_safe_msg_buf(msg, I2C_DMA_THRESHOLD);
958 if (!dma->dma_buf)
959 return -ENOMEM;
960
961 ret = lpi2c_dma_config(lpi2c_imx);
962 if (ret) {
963 dev_err(&lpi2c_imx->adapter.dev, "Failed to configure DMA (%d)\n", ret);
964 goto disable_dma;
965 }
966
967 lpi2c_dma_enable(lpi2c_imx);
968
969 ret = lpi2c_dma_submit(lpi2c_imx);
970 if (ret) {
971 dev_err(&lpi2c_imx->adapter.dev, "DMA submission failed (%d)\n", ret);
972 goto disable_dma;
973 }
974
975 if (dma->dma_msg_flag & I2C_M_RD) {
976 ret = lpi2c_imx_alloc_rx_cmd_buf(lpi2c_imx);
977 if (ret)
978 goto disable_cleanup_data_dma;
979
980 ret = lpi2c_dma_rx_cmd_submit(lpi2c_imx);
981 if (ret)
982 goto disable_cleanup_data_dma;
983 }
984
985 ret = lpi2c_imx_dma_msg_complete(lpi2c_imx);
986 if (ret)
987 goto disable_cleanup_all_dma;
988
989 /* When encountering NACK in transfer, clean up all DMA transfers */
990 if ((readl(lpi2c_imx->base + LPI2C_MSR) & MSR_NDF) && !ret) {
991 ret = -EIO;
992 goto disable_cleanup_all_dma;
993 }
994
995 if (dma->dma_msg_flag & I2C_M_RD)
996 dma_unmap_single(dma->chan_tx->device->dev, dma->dma_tx_addr,
997 dma->rx_cmd_buf_len, DMA_TO_DEVICE);
998 lpi2c_dma_unmap(dma);
999
1000 goto disable_dma;
1001
1002 disable_cleanup_all_dma:
1003 if (dma->dma_msg_flag & I2C_M_RD)
1004 lpi2c_cleanup_rx_cmd_dma(dma);
1005 disable_cleanup_data_dma:
1006 lpi2c_cleanup_dma(dma);
1007 disable_dma:
1008 /* Disable I2C DMA function */
1009 writel(0, lpi2c_imx->base + LPI2C_MDER);
1010
1011 if (dma->dma_msg_flag & I2C_M_RD)
1012 kfree(dma->rx_cmd_buf);
1013
1014 if (ret)
1015 i2c_put_dma_safe_msg_buf(dma->dma_buf, msg, false);
1016 else
1017 i2c_put_dma_safe_msg_buf(dma->dma_buf, msg, true);
1018
1019 return ret;
1020 }
1021
lpi2c_imx_xfer_common(struct i2c_adapter * adapter,struct i2c_msg * msgs,int num,bool atomic)1022 static int lpi2c_imx_xfer_common(struct i2c_adapter *adapter,
1023 struct i2c_msg *msgs, int num, bool atomic)
1024 {
1025 struct lpi2c_imx_struct *lpi2c_imx = i2c_get_adapdata(adapter);
1026 unsigned int temp;
1027 int i, result;
1028
1029 result = lpi2c_imx_master_enable(lpi2c_imx);
1030 if (result)
1031 return result;
1032
1033 for (i = 0; i < num; i++) {
1034 result = lpi2c_imx_start(lpi2c_imx, &msgs[i], atomic);
1035 if (result)
1036 goto disable;
1037
1038 /* quick smbus */
1039 if (num == 1 && msgs[0].len == 0)
1040 goto stop;
1041
1042 lpi2c_imx->rx_buf = NULL;
1043 lpi2c_imx->tx_buf = NULL;
1044 lpi2c_imx->delivered = 0;
1045 lpi2c_imx->msglen = msgs[i].len;
1046
1047 if (atomic) {
1048 result = lpi2c_imx_pio_xfer_atomic(lpi2c_imx, &msgs[i]);
1049 } else {
1050 init_completion(&lpi2c_imx->complete);
1051
1052 if (is_use_dma(lpi2c_imx, &msgs[i])) {
1053 result = lpi2c_imx_dma_xfer(lpi2c_imx, &msgs[i]);
1054 if (result && lpi2c_imx->dma->using_pio_mode)
1055 result = lpi2c_imx_pio_xfer(lpi2c_imx, &msgs[i]);
1056 } else {
1057 result = lpi2c_imx_pio_xfer(lpi2c_imx, &msgs[i]);
1058 }
1059 }
1060
1061 if (result)
1062 goto stop;
1063
1064 if (!(msgs[i].flags & I2C_M_RD)) {
1065 result = lpi2c_imx_txfifo_empty(lpi2c_imx, atomic);
1066 if (result)
1067 goto stop;
1068 }
1069 }
1070
1071 stop:
1072 lpi2c_imx_stop(lpi2c_imx, atomic);
1073
1074 temp = readl(lpi2c_imx->base + LPI2C_MSR);
1075 if ((temp & MSR_NDF) && !result)
1076 result = -EIO;
1077
1078 disable:
1079 lpi2c_imx_master_disable(lpi2c_imx);
1080
1081 dev_dbg(&lpi2c_imx->adapter.dev, "<%s> exit with: %s: %d\n", __func__,
1082 (result < 0) ? "error" : "success msg",
1083 (result < 0) ? result : num);
1084
1085 return (result < 0) ? result : num;
1086 }
1087
lpi2c_imx_xfer(struct i2c_adapter * adapter,struct i2c_msg * msgs,int num)1088 static int lpi2c_imx_xfer(struct i2c_adapter *adapter, struct i2c_msg *msgs, int num)
1089 {
1090 return lpi2c_imx_xfer_common(adapter, msgs, num, false);
1091 }
1092
lpi2c_imx_xfer_atomic(struct i2c_adapter * adapter,struct i2c_msg * msgs,int num)1093 static int lpi2c_imx_xfer_atomic(struct i2c_adapter *adapter, struct i2c_msg *msgs, int num)
1094 {
1095 return lpi2c_imx_xfer_common(adapter, msgs, num, true);
1096 }
1097
lpi2c_imx_target_isr(struct lpi2c_imx_struct * lpi2c_imx,u32 ssr,u32 sier_filter)1098 static irqreturn_t lpi2c_imx_target_isr(struct lpi2c_imx_struct *lpi2c_imx,
1099 u32 ssr, u32 sier_filter)
1100 {
1101 u8 value;
1102 u32 sasr;
1103
1104 /* Arbitration lost */
1105 if (sier_filter & SSR_BEF) {
1106 writel(0, lpi2c_imx->base + LPI2C_SIER);
1107 return IRQ_HANDLED;
1108 }
1109
1110 /* Address detected */
1111 if (sier_filter & SSR_AVF) {
1112 sasr = readl(lpi2c_imx->base + LPI2C_SASR);
1113 if (SASR_READ_REQ & sasr) {
1114 /* Read request */
1115 i2c_slave_event(lpi2c_imx->target, I2C_SLAVE_READ_REQUESTED, &value);
1116 writel(value, lpi2c_imx->base + LPI2C_STDR);
1117 goto ret;
1118 } else {
1119 /* Write request */
1120 i2c_slave_event(lpi2c_imx->target, I2C_SLAVE_WRITE_REQUESTED, &value);
1121 }
1122 }
1123
1124 if (sier_filter & SSR_SDF)
1125 /* STOP */
1126 i2c_slave_event(lpi2c_imx->target, I2C_SLAVE_STOP, &value);
1127
1128 if (sier_filter & SSR_TDF) {
1129 /* Target send data */
1130 i2c_slave_event(lpi2c_imx->target, I2C_SLAVE_READ_PROCESSED, &value);
1131 writel(value, lpi2c_imx->base + LPI2C_STDR);
1132 }
1133
1134 if (sier_filter & SSR_RDF) {
1135 /* Target receive data */
1136 value = readl(lpi2c_imx->base + LPI2C_SRDR);
1137 i2c_slave_event(lpi2c_imx->target, I2C_SLAVE_WRITE_RECEIVED, &value);
1138 }
1139
1140 ret:
1141 /* Clear SSR */
1142 writel(ssr & SSR_CLEAR_BITS, lpi2c_imx->base + LPI2C_SSR);
1143 return IRQ_HANDLED;
1144 }
1145
lpi2c_imx_master_isr(struct lpi2c_imx_struct * lpi2c_imx)1146 static irqreturn_t lpi2c_imx_master_isr(struct lpi2c_imx_struct *lpi2c_imx)
1147 {
1148 unsigned int enabled;
1149 unsigned int temp;
1150
1151 enabled = readl(lpi2c_imx->base + LPI2C_MIER);
1152
1153 lpi2c_imx_intctrl(lpi2c_imx, 0);
1154 temp = readl(lpi2c_imx->base + LPI2C_MSR);
1155 temp &= enabled;
1156
1157 if (temp & MSR_NDF)
1158 complete(&lpi2c_imx->complete);
1159 else if (temp & MSR_RDF)
1160 lpi2c_imx_read_rxfifo(lpi2c_imx, false);
1161 else if (temp & MSR_TDF)
1162 lpi2c_imx_write_txfifo(lpi2c_imx, false);
1163
1164 return IRQ_HANDLED;
1165 }
1166
lpi2c_imx_isr(int irq,void * dev_id)1167 static irqreturn_t lpi2c_imx_isr(int irq, void *dev_id)
1168 {
1169 struct lpi2c_imx_struct *lpi2c_imx = dev_id;
1170
1171 if (lpi2c_imx->target) {
1172 u32 scr = readl(lpi2c_imx->base + LPI2C_SCR);
1173 u32 ssr = readl(lpi2c_imx->base + LPI2C_SSR);
1174 u32 sier_filter = ssr & readl(lpi2c_imx->base + LPI2C_SIER);
1175
1176 /*
1177 * The target is enabled and an interrupt has been triggered.
1178 * Enter the target's irq handler.
1179 */
1180 if ((scr & SCR_SEN) && sier_filter)
1181 return lpi2c_imx_target_isr(lpi2c_imx, ssr, sier_filter);
1182 }
1183
1184 /*
1185 * Otherwise the interrupt has been triggered by the master.
1186 * Enter the master's irq handler.
1187 */
1188 return lpi2c_imx_master_isr(lpi2c_imx);
1189 }
1190
lpi2c_imx_target_init(struct lpi2c_imx_struct * lpi2c_imx)1191 static void lpi2c_imx_target_init(struct lpi2c_imx_struct *lpi2c_imx)
1192 {
1193 u32 temp;
1194
1195 /* reset target module */
1196 writel(SCR_RST, lpi2c_imx->base + LPI2C_SCR);
1197 writel(0, lpi2c_imx->base + LPI2C_SCR);
1198
1199 /* Set target address */
1200 writel((lpi2c_imx->target->addr << 1), lpi2c_imx->base + LPI2C_SAMR);
1201
1202 writel(SCFGR1_RXSTALL | SCFGR1_TXDSTALL, lpi2c_imx->base + LPI2C_SCFGR1);
1203
1204 /*
1205 * set SCFGR2: FILTSDA, FILTSCL and CLKHOLD
1206 *
1207 * FILTSCL/FILTSDA can eliminate signal skew. It should generally be
1208 * set to the same value and should be set >= 50ns.
1209 *
1210 * CLKHOLD is only used when clock stretching is enabled, but it will
1211 * extend the clock stretching to ensure there is an additional delay
1212 * between the target driving SDA and the target releasing the SCL pin.
1213 *
1214 * CLKHOLD setting is crucial for lpi2c target. When master read data
1215 * from target, if there is a delay caused by cpu idle, excessive load,
1216 * or other delays between two bytes in one message transmission, it
1217 * will cause a short interval time between the driving SDA signal and
1218 * releasing SCL signal. The lpi2c master will mistakenly think it is a stop
1219 * signal resulting in an arbitration failure. This issue can be avoided
1220 * by setting CLKHOLD.
1221 *
1222 * In order to ensure lpi2c function normally when the lpi2c speed is as
1223 * low as 100kHz, CLKHOLD should be set to 3 and it is also compatible with
1224 * higher clock frequency like 400kHz and 1MHz.
1225 */
1226 temp = SCFGR2_FILTSDA(2) | SCFGR2_FILTSCL(2) | SCFGR2_CLKHOLD(3);
1227 writel(temp, lpi2c_imx->base + LPI2C_SCFGR2);
1228
1229 /*
1230 * Enable module:
1231 * SCR_FILTEN can enable digital filter and output delay counter for LPI2C
1232 * target mode. So SCR_FILTEN need be asserted when enable SDA/SCL FILTER
1233 * and CLKHOLD.
1234 */
1235 writel(SCR_SEN | SCR_FILTEN, lpi2c_imx->base + LPI2C_SCR);
1236
1237 /* Enable interrupt from i2c module */
1238 writel(SLAVE_INT_FLAG, lpi2c_imx->base + LPI2C_SIER);
1239 }
1240
lpi2c_imx_register_target(struct i2c_client * client)1241 static int lpi2c_imx_register_target(struct i2c_client *client)
1242 {
1243 struct lpi2c_imx_struct *lpi2c_imx = i2c_get_adapdata(client->adapter);
1244 int ret;
1245
1246 if (lpi2c_imx->target)
1247 return -EBUSY;
1248
1249 lpi2c_imx->target = client;
1250
1251 ret = pm_runtime_resume_and_get(lpi2c_imx->adapter.dev.parent);
1252 if (ret < 0) {
1253 dev_err(&lpi2c_imx->adapter.dev, "failed to resume i2c controller");
1254 return ret;
1255 }
1256
1257 lpi2c_imx_target_init(lpi2c_imx);
1258
1259 return 0;
1260 }
1261
lpi2c_imx_unregister_target(struct i2c_client * client)1262 static int lpi2c_imx_unregister_target(struct i2c_client *client)
1263 {
1264 struct lpi2c_imx_struct *lpi2c_imx = i2c_get_adapdata(client->adapter);
1265 int ret;
1266
1267 if (!lpi2c_imx->target)
1268 return -EINVAL;
1269
1270 /* Reset target address. */
1271 writel(0, lpi2c_imx->base + LPI2C_SAMR);
1272
1273 writel(SCR_RST, lpi2c_imx->base + LPI2C_SCR);
1274 writel(0, lpi2c_imx->base + LPI2C_SCR);
1275
1276 lpi2c_imx->target = NULL;
1277
1278 ret = pm_runtime_put_sync(lpi2c_imx->adapter.dev.parent);
1279 if (ret < 0)
1280 dev_err(&lpi2c_imx->adapter.dev, "failed to suspend i2c controller");
1281
1282 return ret;
1283 }
1284
lpi2c_imx_init_recovery_info(struct lpi2c_imx_struct * lpi2c_imx,struct platform_device * pdev)1285 static int lpi2c_imx_init_recovery_info(struct lpi2c_imx_struct *lpi2c_imx,
1286 struct platform_device *pdev)
1287 {
1288 struct i2c_bus_recovery_info *bri = &lpi2c_imx->rinfo;
1289
1290 bri->pinctrl = devm_pinctrl_get(&pdev->dev);
1291 if (IS_ERR(bri->pinctrl))
1292 return PTR_ERR(bri->pinctrl);
1293
1294 lpi2c_imx->adapter.bus_recovery_info = bri;
1295
1296 return 0;
1297 }
1298
dma_exit(struct device * dev,struct lpi2c_imx_dma * dma)1299 static void dma_exit(struct device *dev, struct lpi2c_imx_dma *dma)
1300 {
1301 if (dma->chan_rx)
1302 dma_release_channel(dma->chan_rx);
1303
1304 if (dma->chan_tx)
1305 dma_release_channel(dma->chan_tx);
1306
1307 devm_kfree(dev, dma);
1308 }
1309
lpi2c_dma_init(struct device * dev,dma_addr_t phy_addr)1310 static int lpi2c_dma_init(struct device *dev, dma_addr_t phy_addr)
1311 {
1312 struct lpi2c_imx_struct *lpi2c_imx = dev_get_drvdata(dev);
1313 struct lpi2c_imx_dma *dma;
1314 int ret;
1315
1316 dma = devm_kzalloc(dev, sizeof(*dma), GFP_KERNEL);
1317 if (!dma)
1318 return -ENOMEM;
1319
1320 dma->phy_addr = phy_addr;
1321
1322 /* Prepare for TX DMA: */
1323 dma->chan_tx = dma_request_chan(dev, "tx");
1324 if (IS_ERR(dma->chan_tx)) {
1325 ret = PTR_ERR(dma->chan_tx);
1326 if (ret != -ENODEV && ret != -EPROBE_DEFER)
1327 dev_err(dev, "can't request DMA tx channel (%d)\n", ret);
1328 dma->chan_tx = NULL;
1329 goto dma_exit;
1330 }
1331
1332 /* Prepare for RX DMA: */
1333 dma->chan_rx = dma_request_chan(dev, "rx");
1334 if (IS_ERR(dma->chan_rx)) {
1335 ret = PTR_ERR(dma->chan_rx);
1336 if (ret != -ENODEV && ret != -EPROBE_DEFER)
1337 dev_err(dev, "can't request DMA rx channel (%d)\n", ret);
1338 dma->chan_rx = NULL;
1339 goto dma_exit;
1340 }
1341
1342 lpi2c_imx->can_use_dma = true;
1343 lpi2c_imx->dma = dma;
1344 return 0;
1345
1346 dma_exit:
1347 dma_exit(dev, dma);
1348 return ret;
1349 }
1350
lpi2c_imx_func(struct i2c_adapter * adapter)1351 static u32 lpi2c_imx_func(struct i2c_adapter *adapter)
1352 {
1353 return I2C_FUNC_I2C | I2C_FUNC_SMBUS_EMUL |
1354 I2C_FUNC_SMBUS_READ_BLOCK_DATA;
1355 }
1356
1357 static const struct i2c_algorithm lpi2c_imx_algo = {
1358 .xfer = lpi2c_imx_xfer,
1359 .xfer_atomic = lpi2c_imx_xfer_atomic,
1360 .functionality = lpi2c_imx_func,
1361 .reg_target = lpi2c_imx_register_target,
1362 .unreg_target = lpi2c_imx_unregister_target,
1363 };
1364
1365 static const struct of_device_id lpi2c_imx_of_match[] = {
1366 { .compatible = "fsl,imx7ulp-lpi2c" },
1367 { }
1368 };
1369 MODULE_DEVICE_TABLE(of, lpi2c_imx_of_match);
1370
lpi2c_imx_probe(struct platform_device * pdev)1371 static int lpi2c_imx_probe(struct platform_device *pdev)
1372 {
1373 struct lpi2c_imx_struct *lpi2c_imx;
1374 struct resource *res;
1375 dma_addr_t phy_addr;
1376 unsigned int temp;
1377 int irq, ret;
1378
1379 lpi2c_imx = devm_kzalloc(&pdev->dev, sizeof(*lpi2c_imx), GFP_KERNEL);
1380 if (!lpi2c_imx)
1381 return -ENOMEM;
1382
1383 lpi2c_imx->base = devm_platform_get_and_ioremap_resource(pdev, 0, &res);
1384 if (IS_ERR(lpi2c_imx->base))
1385 return PTR_ERR(lpi2c_imx->base);
1386
1387 irq = platform_get_irq(pdev, 0);
1388 if (irq < 0)
1389 return irq;
1390
1391 lpi2c_imx->adapter.owner = THIS_MODULE;
1392 lpi2c_imx->adapter.algo = &lpi2c_imx_algo;
1393 lpi2c_imx->adapter.dev.parent = &pdev->dev;
1394 lpi2c_imx->adapter.dev.of_node = pdev->dev.of_node;
1395 strscpy(lpi2c_imx->adapter.name, pdev->name,
1396 sizeof(lpi2c_imx->adapter.name));
1397 phy_addr = (dma_addr_t)res->start;
1398
1399 ret = devm_clk_bulk_get_all(&pdev->dev, &lpi2c_imx->clks);
1400 if (ret < 0)
1401 return dev_err_probe(&pdev->dev, ret, "can't get I2C peripheral clock\n");
1402 lpi2c_imx->num_clks = ret;
1403
1404 ret = of_property_read_u32(pdev->dev.of_node,
1405 "clock-frequency", &lpi2c_imx->bitrate);
1406 if (ret)
1407 lpi2c_imx->bitrate = I2C_MAX_STANDARD_MODE_FREQ;
1408
1409 ret = devm_request_irq(&pdev->dev, irq, lpi2c_imx_isr, IRQF_NO_SUSPEND,
1410 pdev->name, lpi2c_imx);
1411 if (ret)
1412 return dev_err_probe(&pdev->dev, ret, "can't claim irq %d\n", irq);
1413
1414 i2c_set_adapdata(&lpi2c_imx->adapter, lpi2c_imx);
1415 platform_set_drvdata(pdev, lpi2c_imx);
1416
1417 ret = clk_bulk_prepare_enable(lpi2c_imx->num_clks, lpi2c_imx->clks);
1418 if (ret)
1419 return ret;
1420
1421 /*
1422 * Lock the parent clock rate to avoid getting parent clock upon
1423 * each transfer
1424 */
1425 ret = devm_clk_rate_exclusive_get(&pdev->dev, lpi2c_imx->clks[0].clk);
1426 if (ret)
1427 return dev_err_probe(&pdev->dev, ret,
1428 "can't lock I2C peripheral clock rate\n");
1429
1430 lpi2c_imx->rate_per = clk_get_rate(lpi2c_imx->clks[0].clk);
1431 if (!lpi2c_imx->rate_per)
1432 return dev_err_probe(&pdev->dev, -EINVAL,
1433 "can't get I2C peripheral clock rate\n");
1434
1435 pm_runtime_set_autosuspend_delay(&pdev->dev, I2C_PM_TIMEOUT);
1436 pm_runtime_use_autosuspend(&pdev->dev);
1437 pm_runtime_get_noresume(&pdev->dev);
1438 pm_runtime_set_active(&pdev->dev);
1439 pm_runtime_enable(&pdev->dev);
1440
1441 temp = readl(lpi2c_imx->base + LPI2C_PARAM);
1442 lpi2c_imx->txfifosize = 1 << (temp & 0x0f);
1443 lpi2c_imx->rxfifosize = 1 << ((temp >> 8) & 0x0f);
1444
1445 /* Init optional bus recovery function */
1446 ret = lpi2c_imx_init_recovery_info(lpi2c_imx, pdev);
1447 /* Give it another chance if pinctrl used is not ready yet */
1448 if (ret == -EPROBE_DEFER)
1449 goto rpm_disable;
1450
1451 /* Init DMA */
1452 ret = lpi2c_dma_init(&pdev->dev, phy_addr);
1453 if (ret) {
1454 if (ret == -EPROBE_DEFER)
1455 goto rpm_disable;
1456 dev_info(&pdev->dev, "use pio mode\n");
1457 }
1458
1459 ret = i2c_add_adapter(&lpi2c_imx->adapter);
1460 if (ret)
1461 goto rpm_disable;
1462
1463 pm_runtime_put_autosuspend(&pdev->dev);
1464
1465 dev_info(&lpi2c_imx->adapter.dev, "LPI2C adapter registered\n");
1466
1467 return 0;
1468
1469 rpm_disable:
1470 pm_runtime_dont_use_autosuspend(&pdev->dev);
1471 pm_runtime_put_sync(&pdev->dev);
1472 pm_runtime_disable(&pdev->dev);
1473
1474 return ret;
1475 }
1476
lpi2c_imx_remove(struct platform_device * pdev)1477 static void lpi2c_imx_remove(struct platform_device *pdev)
1478 {
1479 struct lpi2c_imx_struct *lpi2c_imx = platform_get_drvdata(pdev);
1480
1481 i2c_del_adapter(&lpi2c_imx->adapter);
1482
1483 pm_runtime_disable(&pdev->dev);
1484 pm_runtime_dont_use_autosuspend(&pdev->dev);
1485 }
1486
lpi2c_runtime_suspend(struct device * dev)1487 static int __maybe_unused lpi2c_runtime_suspend(struct device *dev)
1488 {
1489 struct lpi2c_imx_struct *lpi2c_imx = dev_get_drvdata(dev);
1490
1491 clk_bulk_disable(lpi2c_imx->num_clks, lpi2c_imx->clks);
1492 pinctrl_pm_select_sleep_state(dev);
1493
1494 return 0;
1495 }
1496
lpi2c_runtime_resume(struct device * dev)1497 static int __maybe_unused lpi2c_runtime_resume(struct device *dev)
1498 {
1499 struct lpi2c_imx_struct *lpi2c_imx = dev_get_drvdata(dev);
1500 int ret;
1501
1502 pinctrl_pm_select_default_state(dev);
1503 ret = clk_bulk_enable(lpi2c_imx->num_clks, lpi2c_imx->clks);
1504 if (ret) {
1505 dev_err(dev, "failed to enable I2C clock, ret=%d\n", ret);
1506 return ret;
1507 }
1508
1509 return 0;
1510 }
1511
lpi2c_suspend_noirq(struct device * dev)1512 static int __maybe_unused lpi2c_suspend_noirq(struct device *dev)
1513 {
1514 return pm_runtime_force_suspend(dev);
1515 }
1516
lpi2c_resume_noirq(struct device * dev)1517 static int __maybe_unused lpi2c_resume_noirq(struct device *dev)
1518 {
1519 struct lpi2c_imx_struct *lpi2c_imx = dev_get_drvdata(dev);
1520 int ret;
1521
1522 ret = pm_runtime_force_resume(dev);
1523 if (ret)
1524 return ret;
1525
1526 /*
1527 * If the I2C module powers down during system suspend,
1528 * the register values will be lost. Therefore, reinitialize
1529 * the target when the system resumes.
1530 */
1531 if (lpi2c_imx->target)
1532 lpi2c_imx_target_init(lpi2c_imx);
1533
1534 return 0;
1535 }
1536
lpi2c_suspend(struct device * dev)1537 static int lpi2c_suspend(struct device *dev)
1538 {
1539 /*
1540 * Some I2C devices may need the I2C controller to remain active
1541 * during resume_noirq() or suspend_noirq(). If the controller is
1542 * autosuspended, there is no way to wake it up once runtime PM is
1543 * disabled (in suspend_late()).
1544 *
1545 * During system resume, the I2C controller will be available only
1546 * after runtime PM is re-enabled (in resume_early()). However, this
1547 * may be too late for some devices.
1548 *
1549 * Wake up the controller in the suspend() callback while runtime PM
1550 * is still enabled. The I2C controller will remain available until
1551 * the suspend_noirq() callback (pm_runtime_force_suspend()) is
1552 * called. During resume, the I2C controller can be restored by the
1553 * resume_noirq() callback (pm_runtime_force_resume()).
1554 *
1555 * Finally, the resume() callback re-enables autosuspend, ensuring
1556 * the I2C controller remains available until the system enters
1557 * suspend_noirq() and from resume_noirq().
1558 */
1559 return pm_runtime_resume_and_get(dev);
1560 }
1561
lpi2c_resume(struct device * dev)1562 static int lpi2c_resume(struct device *dev)
1563 {
1564 pm_runtime_put_autosuspend(dev);
1565
1566 return 0;
1567 }
1568
1569 static const struct dev_pm_ops lpi2c_pm_ops = {
1570 SET_NOIRQ_SYSTEM_SLEEP_PM_OPS(lpi2c_suspend_noirq,
1571 lpi2c_resume_noirq)
1572 SYSTEM_SLEEP_PM_OPS(lpi2c_suspend, lpi2c_resume)
1573 SET_RUNTIME_PM_OPS(lpi2c_runtime_suspend,
1574 lpi2c_runtime_resume, NULL)
1575 };
1576
1577 static struct platform_driver lpi2c_imx_driver = {
1578 .probe = lpi2c_imx_probe,
1579 .remove = lpi2c_imx_remove,
1580 .driver = {
1581 .name = DRIVER_NAME,
1582 .of_match_table = lpi2c_imx_of_match,
1583 .pm = &lpi2c_pm_ops,
1584 },
1585 };
1586
1587 module_platform_driver(lpi2c_imx_driver);
1588
1589 MODULE_AUTHOR("Gao Pan <pandy.gao@nxp.com>");
1590 MODULE_DESCRIPTION("I2C adapter driver for LPI2C bus");
1591 MODULE_LICENSE("GPL");
1592