1 // SPDX-License-Identifier: GPL-2.0
2 /*
3 * SuperH Mobile I2C Controller
4 *
5 * Copyright (C) 2014-19 Wolfram Sang <wsa@sang-engineering.com>
6 * Copyright (C) 2008 Magnus Damm
7 *
8 * Portions of the code based on out-of-tree driver i2c-sh7343.c
9 * Copyright (c) 2006 Carlos Munoz <carlos@kenati.com>
10 */
11
12 #include <linux/clk.h>
13 #include <linux/delay.h>
14 #include <linux/dmaengine.h>
15 #include <linux/dma-mapping.h>
16 #include <linux/err.h>
17 #include <linux/i2c.h>
18 #include <linux/init.h>
19 #include <linux/interrupt.h>
20 #include <linux/io.h>
21 #include <linux/kernel.h>
22 #include <linux/module.h>
23 #include <linux/of.h>
24 #include <linux/platform_device.h>
25 #include <linux/pm_runtime.h>
26 #include <linux/slab.h>
27 #include <linux/string_choices.h>
28
29 /* Transmit operation: */
30 /* */
31 /* 0 byte transmit */
32 /* BUS: S A8 ACK P(*) */
33 /* IRQ: DTE WAIT */
34 /* ICIC: */
35 /* ICCR: 0x94 0x90 */
36 /* ICDR: A8 */
37 /* */
38 /* 1 byte transmit */
39 /* BUS: S A8 ACK D8(1) ACK P(*) */
40 /* IRQ: DTE WAIT WAIT */
41 /* ICIC: -DTE */
42 /* ICCR: 0x94 0x90 */
43 /* ICDR: A8 D8(1) */
44 /* */
45 /* 2 byte transmit */
46 /* BUS: S A8 ACK D8(1) ACK D8(2) ACK P(*) */
47 /* IRQ: DTE WAIT WAIT WAIT */
48 /* ICIC: -DTE */
49 /* ICCR: 0x94 0x90 */
50 /* ICDR: A8 D8(1) D8(2) */
51 /* */
52 /* 3 bytes or more, +---------+ gets repeated */
53 /* */
54 /* */
55 /* Receive operation: */
56 /* */
57 /* 0 byte receive - not supported since slave may hold SDA low */
58 /* */
59 /* 1 byte receive [TX] | [RX] */
60 /* BUS: S A8 ACK | D8(1) ACK P(*) */
61 /* IRQ: DTE WAIT | WAIT DTE */
62 /* ICIC: -DTE | +DTE */
63 /* ICCR: 0x94 0x81 | 0xc0 */
64 /* ICDR: A8 | D8(1) */
65 /* */
66 /* 2 byte receive [TX]| [RX] */
67 /* BUS: S A8 ACK | D8(1) ACK D8(2) ACK P(*) */
68 /* IRQ: DTE WAIT | WAIT WAIT DTE */
69 /* ICIC: -DTE | +DTE */
70 /* ICCR: 0x94 0x81 | 0xc0 */
71 /* ICDR: A8 | D8(1) D8(2) */
72 /* */
73 /* 3 byte receive [TX] | [RX] (*) */
74 /* BUS: S A8 ACK | D8(1) ACK D8(2) ACK D8(3) ACK P */
75 /* IRQ: DTE WAIT | WAIT WAIT WAIT DTE */
76 /* ICIC: -DTE | +DTE */
77 /* ICCR: 0x94 0x81 | 0xc0 */
78 /* ICDR: A8 | D8(1) D8(2) D8(3) */
79 /* */
80 /* 4 bytes or more, this part is repeated +---------+ */
81 /* */
82 /* */
83 /* Interrupt order and BUSY flag */
84 /* ___ _ */
85 /* SDA ___\___XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXAAAAAAAAA___/ */
86 /* SCL \_/1\_/2\_/3\_/4\_/5\_/6\_/7\_/8\___/9\_____/ */
87 /* */
88 /* S D7 D6 D5 D4 D3 D2 D1 D0 P(*) */
89 /* ___ */
90 /* WAIT IRQ ________________________________/ \___________ */
91 /* TACK IRQ ____________________________________/ \_______ */
92 /* DTE IRQ __________________________________________/ \_ */
93 /* AL IRQ XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX */
94 /* _______________________________________________ */
95 /* BUSY __/ \_ */
96 /* */
97 /* (*) The STOP condition is only sent by the master at the end of the last */
98 /* I2C message or if the I2C_M_STOP flag is set. Similarly, the BUSY bit is */
99 /* only cleared after the STOP condition, so, between messages we have to */
100 /* poll for the DTE bit. */
101 /* */
102
103 enum sh_mobile_i2c_op {
104 OP_START = 0,
105 OP_TX_FIRST,
106 OP_TX,
107 OP_TX_STOP,
108 OP_TX_TO_RX,
109 OP_RX,
110 OP_RX_STOP,
111 OP_RX_STOP_DATA,
112 };
113
114 struct sh_mobile_i2c_data {
115 struct device *dev;
116 void __iomem *reg;
117 struct i2c_adapter adap;
118 unsigned long bus_speed;
119 unsigned int clks_per_count;
120 struct clk *clk;
121 u_int8_t icic;
122 u_int8_t flags;
123 u_int16_t iccl;
124 u_int16_t icch;
125
126 spinlock_t lock;
127 wait_queue_head_t wait;
128 struct i2c_msg *msg;
129 int pos;
130 int sr;
131 bool send_stop;
132 bool stop_after_dma;
133 bool atomic_xfer;
134
135 struct resource *res;
136 struct dma_chan *dma_tx;
137 struct dma_chan *dma_rx;
138 struct scatterlist sg;
139 enum dma_data_direction dma_direction;
140 u8 *dma_buf;
141 };
142
143 struct sh_mobile_dt_config {
144 int clks_per_count;
145 int (*setup)(struct sh_mobile_i2c_data *pd);
146 };
147
148 #define IIC_FLAG_HAS_ICIC67 (1 << 0)
149
150 /* Register offsets */
151 #define ICDR 0x00
152 #define ICCR 0x04
153 #define ICSR 0x08
154 #define ICIC 0x0c
155 #define ICCL 0x10
156 #define ICCH 0x14
157 #define ICSTART 0x70
158
159 /* Register bits */
160 #define ICCR_ICE 0x80
161 #define ICCR_RACK 0x40
162 #define ICCR_TRS 0x10
163 #define ICCR_BBSY 0x04
164 #define ICCR_SCP 0x01
165
166 #define ICSR_SCLM 0x80
167 #define ICSR_SDAM 0x40
168 #define SW_DONE 0x20
169 #define ICSR_BUSY 0x10
170 #define ICSR_AL 0x08
171 #define ICSR_TACK 0x04
172 #define ICSR_WAIT 0x02
173 #define ICSR_DTE 0x01
174
175 #define ICIC_ICCLB8 0x80
176 #define ICIC_ICCHB8 0x40
177 #define ICIC_TDMAE 0x20
178 #define ICIC_RDMAE 0x10
179 #define ICIC_ALE 0x08
180 #define ICIC_TACKE 0x04
181 #define ICIC_WAITE 0x02
182 #define ICIC_DTEE 0x01
183
184 #define ICSTART_ICSTART 0x10
185
iic_wr(struct sh_mobile_i2c_data * pd,int offs,unsigned char data)186 static void iic_wr(struct sh_mobile_i2c_data *pd, int offs, unsigned char data)
187 {
188 if (offs == ICIC)
189 data |= pd->icic;
190
191 iowrite8(data, pd->reg + offs);
192 }
193
iic_rd(struct sh_mobile_i2c_data * pd,int offs)194 static unsigned char iic_rd(struct sh_mobile_i2c_data *pd, int offs)
195 {
196 return ioread8(pd->reg + offs);
197 }
198
iic_set_clr(struct sh_mobile_i2c_data * pd,int offs,unsigned char set,unsigned char clr)199 static void iic_set_clr(struct sh_mobile_i2c_data *pd, int offs,
200 unsigned char set, unsigned char clr)
201 {
202 iic_wr(pd, offs, (iic_rd(pd, offs) | set) & ~clr);
203 }
204
sh_mobile_i2c_iccl(unsigned long count_khz,u32 tLOW,u32 tf)205 static u32 sh_mobile_i2c_iccl(unsigned long count_khz, u32 tLOW, u32 tf)
206 {
207 /*
208 * Conditional expression:
209 * ICCL >= COUNT_CLK * (tLOW + tf)
210 *
211 * SH-Mobile IIC hardware starts counting the LOW period of
212 * the SCL signal (tLOW) as soon as it pulls the SCL line.
213 * In order to meet the tLOW timing spec, we need to take into
214 * account the fall time of SCL signal (tf). Default tf value
215 * should be 0.3 us, for safety.
216 */
217 return (((count_khz * (tLOW + tf)) + 5000) / 10000);
218 }
219
sh_mobile_i2c_icch(unsigned long count_khz,u32 tHIGH,u32 tf)220 static u32 sh_mobile_i2c_icch(unsigned long count_khz, u32 tHIGH, u32 tf)
221 {
222 /*
223 * Conditional expression:
224 * ICCH >= COUNT_CLK * (tHIGH + tf)
225 *
226 * SH-Mobile IIC hardware is aware of SCL transition period 'tr',
227 * and can ignore it. SH-Mobile IIC controller starts counting
228 * the HIGH period of the SCL signal (tHIGH) after the SCL input
229 * voltage increases at VIH.
230 *
231 * Afterward it turned out calculating ICCH using only tHIGH spec
232 * will result in violation of the tHD;STA timing spec. We need
233 * to take into account the fall time of SDA signal (tf) at START
234 * condition, in order to meet both tHIGH and tHD;STA specs.
235 */
236 return (((count_khz * (tHIGH + tf)) + 5000) / 10000);
237 }
238
sh_mobile_i2c_check_timing(struct sh_mobile_i2c_data * pd)239 static int sh_mobile_i2c_check_timing(struct sh_mobile_i2c_data *pd)
240 {
241 u16 max_val = pd->flags & IIC_FLAG_HAS_ICIC67 ? 0x1ff : 0xff;
242
243 if (pd->iccl > max_val || pd->icch > max_val) {
244 dev_err(pd->dev, "timing values out of range: L/H=0x%x/0x%x\n",
245 pd->iccl, pd->icch);
246 return -EINVAL;
247 }
248
249 /* one more bit of ICCL in ICIC */
250 if (pd->iccl & 0x100)
251 pd->icic |= ICIC_ICCLB8;
252 else
253 pd->icic &= ~ICIC_ICCLB8;
254
255 /* one more bit of ICCH in ICIC */
256 if (pd->icch & 0x100)
257 pd->icic |= ICIC_ICCHB8;
258 else
259 pd->icic &= ~ICIC_ICCHB8;
260
261 dev_dbg(pd->dev, "timing values: L/H=0x%x/0x%x\n", pd->iccl, pd->icch);
262 return 0;
263 }
264
sh_mobile_i2c_init(struct sh_mobile_i2c_data * pd)265 static int sh_mobile_i2c_init(struct sh_mobile_i2c_data *pd)
266 {
267 unsigned long i2c_clk_khz;
268 u32 tHIGH, tLOW, tf;
269
270 i2c_clk_khz = clk_get_rate(pd->clk) / 1000 / pd->clks_per_count;
271
272 if (pd->bus_speed == I2C_MAX_STANDARD_MODE_FREQ) {
273 tLOW = 47; /* tLOW = 4.7 us */
274 tHIGH = 40; /* tHD;STA = tHIGH = 4.0 us */
275 tf = 3; /* tf = 0.3 us */
276 } else if (pd->bus_speed == I2C_MAX_FAST_MODE_FREQ) {
277 tLOW = 13; /* tLOW = 1.3 us */
278 tHIGH = 6; /* tHD;STA = tHIGH = 0.6 us */
279 tf = 3; /* tf = 0.3 us */
280 } else {
281 dev_err(pd->dev, "unrecognized bus speed %lu Hz\n",
282 pd->bus_speed);
283 return -EINVAL;
284 }
285
286 pd->iccl = sh_mobile_i2c_iccl(i2c_clk_khz, tLOW, tf);
287 pd->icch = sh_mobile_i2c_icch(i2c_clk_khz, tHIGH, tf);
288
289 return sh_mobile_i2c_check_timing(pd);
290 }
291
sh_mobile_i2c_v2_init(struct sh_mobile_i2c_data * pd)292 static int sh_mobile_i2c_v2_init(struct sh_mobile_i2c_data *pd)
293 {
294 unsigned long clks_per_cycle;
295
296 /* L = 5, H = 4, L + H = 9 */
297 clks_per_cycle = clk_get_rate(pd->clk) / pd->bus_speed;
298 pd->iccl = DIV_ROUND_UP(clks_per_cycle * 5 / 9 - 1, pd->clks_per_count);
299 pd->icch = DIV_ROUND_UP(clks_per_cycle * 4 / 9 - 5, pd->clks_per_count);
300
301 return sh_mobile_i2c_check_timing(pd);
302 }
303
i2c_op(struct sh_mobile_i2c_data * pd,enum sh_mobile_i2c_op op)304 static unsigned char i2c_op(struct sh_mobile_i2c_data *pd, enum sh_mobile_i2c_op op)
305 {
306 unsigned char ret = 0;
307 unsigned long flags;
308
309 dev_dbg(pd->dev, "op %d\n", op);
310
311 spin_lock_irqsave(&pd->lock, flags);
312
313 switch (op) {
314 case OP_START: /* issue start and trigger DTE interrupt */
315 iic_wr(pd, ICCR, ICCR_ICE | ICCR_TRS | ICCR_BBSY);
316 break;
317 case OP_TX_FIRST: /* disable DTE interrupt and write client address */
318 iic_wr(pd, ICIC, ICIC_WAITE | ICIC_ALE | ICIC_TACKE);
319 iic_wr(pd, ICDR, i2c_8bit_addr_from_msg(pd->msg));
320 break;
321 case OP_TX: /* write data */
322 iic_wr(pd, ICDR, pd->msg->buf[pd->pos]);
323 break;
324 case OP_TX_STOP: /* issue a stop (or rep_start) */
325 iic_wr(pd, ICCR, pd->send_stop ? ICCR_ICE | ICCR_TRS
326 : ICCR_ICE | ICCR_TRS | ICCR_BBSY);
327 break;
328 case OP_TX_TO_RX: /* select read mode */
329 iic_wr(pd, ICCR, ICCR_ICE | ICCR_SCP);
330 break;
331 case OP_RX: /* just read data */
332 ret = iic_rd(pd, ICDR);
333 break;
334 case OP_RX_STOP: /* enable DTE interrupt, issue stop */
335 if (!pd->atomic_xfer)
336 iic_wr(pd, ICIC,
337 ICIC_DTEE | ICIC_WAITE | ICIC_ALE | ICIC_TACKE);
338 iic_wr(pd, ICCR, ICCR_ICE | ICCR_RACK);
339 break;
340 case OP_RX_STOP_DATA: /* enable DTE interrupt, read data, issue stop */
341 if (!pd->atomic_xfer)
342 iic_wr(pd, ICIC,
343 ICIC_DTEE | ICIC_WAITE | ICIC_ALE | ICIC_TACKE);
344 ret = iic_rd(pd, ICDR);
345 iic_wr(pd, ICCR, ICCR_ICE | ICCR_RACK);
346 break;
347 }
348
349 spin_unlock_irqrestore(&pd->lock, flags);
350
351 dev_dbg(pd->dev, "op %d, data out 0x%02x\n", op, ret);
352 return ret;
353 }
354
sh_mobile_i2c_isr_tx(struct sh_mobile_i2c_data * pd)355 static int sh_mobile_i2c_isr_tx(struct sh_mobile_i2c_data *pd)
356 {
357 if (pd->pos == pd->msg->len) {
358 i2c_op(pd, OP_TX_STOP);
359 return 1;
360 }
361
362 if (pd->pos == -1)
363 i2c_op(pd, OP_TX_FIRST);
364 else
365 i2c_op(pd, OP_TX);
366
367 pd->pos++;
368 return 0;
369 }
370
sh_mobile_i2c_isr_rx(struct sh_mobile_i2c_data * pd)371 static int sh_mobile_i2c_isr_rx(struct sh_mobile_i2c_data *pd)
372 {
373 int real_pos;
374
375 /* switch from TX (address) to RX (data) adds two interrupts */
376 real_pos = pd->pos - 2;
377
378 if (pd->pos == -1) {
379 i2c_op(pd, OP_TX_FIRST);
380 } else if (pd->pos == 0) {
381 i2c_op(pd, OP_TX_TO_RX);
382 } else if (pd->pos == pd->msg->len) {
383 if (pd->stop_after_dma) {
384 /* Simulate PIO end condition after DMA transfer */
385 i2c_op(pd, OP_RX_STOP);
386 pd->pos++;
387 goto done;
388 }
389
390 if (real_pos < 0)
391 i2c_op(pd, OP_RX_STOP);
392 else
393 pd->msg->buf[real_pos] = i2c_op(pd, OP_RX_STOP_DATA);
394 } else if (real_pos >= 0) {
395 pd->msg->buf[real_pos] = i2c_op(pd, OP_RX);
396 }
397
398 done:
399 pd->pos++;
400 return pd->pos == (pd->msg->len + 2);
401 }
402
sh_mobile_i2c_isr(int irq,void * dev_id)403 static irqreturn_t sh_mobile_i2c_isr(int irq, void *dev_id)
404 {
405 struct sh_mobile_i2c_data *pd = dev_id;
406 unsigned char sr;
407 int wakeup = 0;
408
409 sr = iic_rd(pd, ICSR);
410 pd->sr |= sr; /* remember state */
411
412 dev_dbg(pd->dev, "i2c_isr 0x%02x 0x%02x %s %d %d!\n", sr, pd->sr,
413 str_read_write(pd->msg->flags & I2C_M_RD),
414 pd->pos, pd->msg->len);
415
416 /* Kick off TxDMA after preface was done */
417 if (pd->dma_direction == DMA_TO_DEVICE && pd->pos == 0)
418 iic_set_clr(pd, ICIC, ICIC_TDMAE, 0);
419 else if (sr & (ICSR_AL | ICSR_TACK))
420 /* don't interrupt transaction - continue to issue stop */
421 iic_wr(pd, ICSR, sr & ~(ICSR_AL | ICSR_TACK));
422 else if (pd->msg->flags & I2C_M_RD)
423 wakeup = sh_mobile_i2c_isr_rx(pd);
424 else
425 wakeup = sh_mobile_i2c_isr_tx(pd);
426
427 /* Kick off RxDMA after preface was done */
428 if (pd->dma_direction == DMA_FROM_DEVICE && pd->pos == 1)
429 iic_set_clr(pd, ICIC, ICIC_RDMAE, 0);
430
431 if (sr & ICSR_WAIT) /* TODO: add delay here to support slow acks */
432 iic_wr(pd, ICSR, sr & ~ICSR_WAIT);
433
434 if (wakeup) {
435 pd->sr |= SW_DONE;
436 if (!pd->atomic_xfer)
437 wake_up(&pd->wait);
438 }
439
440 /* defeat write posting to avoid spurious WAIT interrupts */
441 iic_rd(pd, ICSR);
442
443 return IRQ_HANDLED;
444 }
445
sh_mobile_i2c_cleanup_dma(struct sh_mobile_i2c_data * pd,bool terminate)446 static void sh_mobile_i2c_cleanup_dma(struct sh_mobile_i2c_data *pd, bool terminate)
447 {
448 struct dma_chan *chan = pd->dma_direction == DMA_FROM_DEVICE
449 ? pd->dma_rx : pd->dma_tx;
450
451 /* only allowed from thread context! */
452 if (terminate)
453 dmaengine_terminate_sync(chan);
454
455 dma_unmap_single(chan->device->dev, sg_dma_address(&pd->sg),
456 pd->msg->len, pd->dma_direction);
457
458 pd->dma_direction = DMA_NONE;
459 }
460
sh_mobile_i2c_dma_callback(void * data)461 static void sh_mobile_i2c_dma_callback(void *data)
462 {
463 struct sh_mobile_i2c_data *pd = data;
464
465 sh_mobile_i2c_cleanup_dma(pd, false);
466 pd->pos = pd->msg->len;
467 pd->stop_after_dma = true;
468
469 iic_set_clr(pd, ICIC, 0, ICIC_TDMAE | ICIC_RDMAE);
470 }
471
sh_mobile_i2c_request_dma_chan(struct device * dev,enum dma_transfer_direction dir,dma_addr_t port_addr)472 static struct dma_chan *sh_mobile_i2c_request_dma_chan(struct device *dev,
473 enum dma_transfer_direction dir, dma_addr_t port_addr)
474 {
475 struct dma_chan *chan;
476 struct dma_slave_config cfg;
477 char *chan_name = dir == DMA_MEM_TO_DEV ? "tx" : "rx";
478 int ret;
479
480 chan = dma_request_chan(dev, chan_name);
481 if (IS_ERR(chan)) {
482 dev_dbg(dev, "request_channel failed for %s (%ld)\n", chan_name,
483 PTR_ERR(chan));
484 return chan;
485 }
486
487 memset(&cfg, 0, sizeof(cfg));
488 cfg.direction = dir;
489 if (dir == DMA_MEM_TO_DEV) {
490 cfg.dst_addr = port_addr;
491 cfg.dst_addr_width = DMA_SLAVE_BUSWIDTH_1_BYTE;
492 } else {
493 cfg.src_addr = port_addr;
494 cfg.src_addr_width = DMA_SLAVE_BUSWIDTH_1_BYTE;
495 }
496
497 ret = dmaengine_slave_config(chan, &cfg);
498 if (ret) {
499 dev_dbg(dev, "slave_config failed for %s (%d)\n", chan_name, ret);
500 dma_release_channel(chan);
501 return ERR_PTR(ret);
502 }
503
504 dev_dbg(dev, "got DMA channel for %s\n", chan_name);
505 return chan;
506 }
507
sh_mobile_i2c_xfer_dma(struct sh_mobile_i2c_data * pd)508 static void sh_mobile_i2c_xfer_dma(struct sh_mobile_i2c_data *pd)
509 {
510 bool read = pd->msg->flags & I2C_M_RD;
511 enum dma_data_direction dir = read ? DMA_FROM_DEVICE : DMA_TO_DEVICE;
512 struct dma_chan *chan = read ? pd->dma_rx : pd->dma_tx;
513 struct dma_async_tx_descriptor *txdesc;
514 dma_addr_t dma_addr;
515 dma_cookie_t cookie;
516
517 if (PTR_ERR(chan) == -EPROBE_DEFER) {
518 if (read)
519 chan = pd->dma_rx = sh_mobile_i2c_request_dma_chan(pd->dev, DMA_DEV_TO_MEM,
520 pd->res->start + ICDR);
521 else
522 chan = pd->dma_tx = sh_mobile_i2c_request_dma_chan(pd->dev, DMA_MEM_TO_DEV,
523 pd->res->start + ICDR);
524 }
525
526 if (IS_ERR(chan))
527 return;
528
529 dma_addr = dma_map_single(chan->device->dev, pd->dma_buf, pd->msg->len, dir);
530 if (dma_mapping_error(chan->device->dev, dma_addr)) {
531 dev_dbg(pd->dev, "dma map failed, using PIO\n");
532 return;
533 }
534
535 sg_dma_len(&pd->sg) = pd->msg->len;
536 sg_dma_address(&pd->sg) = dma_addr;
537
538 pd->dma_direction = dir;
539
540 txdesc = dmaengine_prep_slave_sg(chan, &pd->sg, 1,
541 read ? DMA_DEV_TO_MEM : DMA_MEM_TO_DEV,
542 DMA_PREP_INTERRUPT | DMA_CTRL_ACK);
543 if (!txdesc) {
544 dev_dbg(pd->dev, "dma prep slave sg failed, using PIO\n");
545 sh_mobile_i2c_cleanup_dma(pd, false);
546 return;
547 }
548
549 txdesc->callback = sh_mobile_i2c_dma_callback;
550 txdesc->callback_param = pd;
551
552 cookie = dmaengine_submit(txdesc);
553 if (dma_submit_error(cookie)) {
554 dev_dbg(pd->dev, "submitting dma failed, using PIO\n");
555 sh_mobile_i2c_cleanup_dma(pd, false);
556 return;
557 }
558
559 dma_async_issue_pending(chan);
560 }
561
start_ch(struct sh_mobile_i2c_data * pd,struct i2c_msg * usr_msg,bool do_init)562 static void start_ch(struct sh_mobile_i2c_data *pd, struct i2c_msg *usr_msg,
563 bool do_init)
564 {
565 if (do_init) {
566 /* Initialize channel registers */
567 iic_wr(pd, ICCR, ICCR_SCP);
568
569 /* Enable channel and configure rx ack */
570 iic_wr(pd, ICCR, ICCR_ICE | ICCR_SCP);
571
572 /* Set the clock */
573 iic_wr(pd, ICCL, pd->iccl & 0xff);
574 iic_wr(pd, ICCH, pd->icch & 0xff);
575 }
576
577 pd->msg = usr_msg;
578 pd->pos = -1;
579 pd->sr = 0;
580
581 if (pd->atomic_xfer)
582 return;
583
584 pd->dma_buf = i2c_get_dma_safe_msg_buf(pd->msg, 8);
585 if (pd->dma_buf)
586 sh_mobile_i2c_xfer_dma(pd);
587
588 /* Enable all interrupts to begin with */
589 iic_wr(pd, ICIC, ICIC_DTEE | ICIC_WAITE | ICIC_ALE | ICIC_TACKE);
590 }
591
poll_dte(struct sh_mobile_i2c_data * pd)592 static int poll_dte(struct sh_mobile_i2c_data *pd)
593 {
594 int i;
595
596 for (i = 1000; i; i--) {
597 u_int8_t val = iic_rd(pd, ICSR);
598
599 if (val & ICSR_DTE)
600 break;
601
602 if (val & ICSR_TACK)
603 return -ENXIO;
604
605 udelay(10);
606 }
607
608 return i ? 0 : -ETIMEDOUT;
609 }
610
poll_busy(struct sh_mobile_i2c_data * pd)611 static int poll_busy(struct sh_mobile_i2c_data *pd)
612 {
613 int i;
614
615 for (i = 1000; i; i--) {
616 u_int8_t val = iic_rd(pd, ICSR);
617
618 dev_dbg(pd->dev, "val 0x%02x pd->sr 0x%02x\n", val, pd->sr);
619
620 /* the interrupt handler may wake us up before the
621 * transfer is finished, so poll the hardware
622 * until we're done.
623 */
624 if (!(val & ICSR_BUSY)) {
625 /* handle missing acknowledge and arbitration lost */
626 val |= pd->sr;
627 if (val & ICSR_TACK)
628 return -ENXIO;
629 if (val & ICSR_AL)
630 return -EAGAIN;
631 break;
632 }
633
634 udelay(10);
635 }
636
637 return i ? 0 : -ETIMEDOUT;
638 }
639
sh_mobile_xfer(struct sh_mobile_i2c_data * pd,struct i2c_msg * msgs,int num)640 static int sh_mobile_xfer(struct sh_mobile_i2c_data *pd,
641 struct i2c_msg *msgs, int num)
642 {
643 struct i2c_msg *msg;
644 int err = 0;
645 int i;
646 long time_left;
647
648 /* Wake up device and enable clock */
649 pm_runtime_get_sync(pd->dev);
650
651 /* Process all messages */
652 for (i = 0; i < num; i++) {
653 bool do_start = pd->send_stop || !i;
654 msg = &msgs[i];
655 pd->send_stop = i == num - 1 || msg->flags & I2C_M_STOP;
656 pd->stop_after_dma = false;
657
658 start_ch(pd, msg, do_start);
659
660 if (do_start)
661 i2c_op(pd, OP_START);
662
663 if (pd->atomic_xfer) {
664 unsigned long j = jiffies + pd->adap.timeout;
665
666 time_left = time_before_eq(jiffies, j);
667 while (time_left &&
668 !(pd->sr & (ICSR_TACK | SW_DONE))) {
669 unsigned char sr = iic_rd(pd, ICSR);
670
671 if (sr & (ICSR_AL | ICSR_TACK |
672 ICSR_WAIT | ICSR_DTE)) {
673 sh_mobile_i2c_isr(0, pd);
674 udelay(150);
675 } else {
676 cpu_relax();
677 }
678 time_left = time_before_eq(jiffies, j);
679 }
680 } else {
681 /* The interrupt handler takes care of the rest... */
682 time_left = wait_event_timeout(pd->wait,
683 pd->sr & (ICSR_TACK | SW_DONE),
684 pd->adap.timeout);
685
686 /* 'stop_after_dma' tells if DMA xfer was complete */
687 i2c_put_dma_safe_msg_buf(pd->dma_buf, pd->msg,
688 pd->stop_after_dma);
689 }
690
691 if (!time_left) {
692 if (pd->dma_direction != DMA_NONE)
693 sh_mobile_i2c_cleanup_dma(pd, true);
694
695 err = -ETIMEDOUT;
696 break;
697 }
698
699 if (pd->send_stop)
700 err = poll_busy(pd);
701 else
702 err = poll_dte(pd);
703 if (err < 0)
704 break;
705 }
706
707 /* Disable channel */
708 iic_wr(pd, ICCR, ICCR_SCP);
709
710 /* Disable clock and mark device as idle */
711 pm_runtime_put_sync(pd->dev);
712
713 return err ?: num;
714 }
715
sh_mobile_i2c_xfer(struct i2c_adapter * adapter,struct i2c_msg * msgs,int num)716 static int sh_mobile_i2c_xfer(struct i2c_adapter *adapter,
717 struct i2c_msg *msgs,
718 int num)
719 {
720 struct sh_mobile_i2c_data *pd = i2c_get_adapdata(adapter);
721
722 pd->atomic_xfer = false;
723 return sh_mobile_xfer(pd, msgs, num);
724 }
725
sh_mobile_i2c_xfer_atomic(struct i2c_adapter * adapter,struct i2c_msg * msgs,int num)726 static int sh_mobile_i2c_xfer_atomic(struct i2c_adapter *adapter,
727 struct i2c_msg *msgs,
728 int num)
729 {
730 struct sh_mobile_i2c_data *pd = i2c_get_adapdata(adapter);
731
732 pd->atomic_xfer = true;
733 return sh_mobile_xfer(pd, msgs, num);
734 }
735
sh_mobile_i2c_func(struct i2c_adapter * adapter)736 static u32 sh_mobile_i2c_func(struct i2c_adapter *adapter)
737 {
738 return I2C_FUNC_I2C | I2C_FUNC_SMBUS_EMUL | I2C_FUNC_PROTOCOL_MANGLING;
739 }
740
741 static const struct i2c_algorithm sh_mobile_i2c_algorithm = {
742 .functionality = sh_mobile_i2c_func,
743 .xfer = sh_mobile_i2c_xfer,
744 .xfer_atomic = sh_mobile_i2c_xfer_atomic,
745 };
746
747 static const struct i2c_adapter_quirks sh_mobile_i2c_quirks = {
748 .flags = I2C_AQ_NO_ZERO_LEN_READ,
749 };
750
751 /*
752 * r8a7740 has an errata regarding I2C I/O pad reset needing this workaround.
753 */
sh_mobile_i2c_r8a7740_workaround(struct sh_mobile_i2c_data * pd)754 static int sh_mobile_i2c_r8a7740_workaround(struct sh_mobile_i2c_data *pd)
755 {
756 iic_set_clr(pd, ICCR, ICCR_ICE, 0);
757 iic_rd(pd, ICCR); /* dummy read */
758
759 iic_set_clr(pd, ICSTART, ICSTART_ICSTART, 0);
760 iic_rd(pd, ICSTART); /* dummy read */
761
762 udelay(10);
763
764 iic_wr(pd, ICCR, ICCR_SCP);
765 iic_wr(pd, ICSTART, 0);
766
767 udelay(10);
768
769 iic_wr(pd, ICCR, ICCR_TRS);
770 udelay(10);
771 iic_wr(pd, ICCR, 0);
772 udelay(10);
773 iic_wr(pd, ICCR, ICCR_TRS);
774 udelay(10);
775
776 return sh_mobile_i2c_v2_init(pd);
777 }
778
779 static const struct sh_mobile_dt_config default_dt_config = {
780 .clks_per_count = 1,
781 .setup = sh_mobile_i2c_init,
782 };
783
784 static const struct sh_mobile_dt_config fast_clock_dt_config = {
785 .clks_per_count = 2,
786 .setup = sh_mobile_i2c_v2_init,
787 };
788
789 static const struct sh_mobile_dt_config r8a7740_dt_config = {
790 .clks_per_count = 1,
791 .setup = sh_mobile_i2c_r8a7740_workaround,
792 };
793
794 static const struct of_device_id sh_mobile_i2c_dt_ids[] = {
795 { .compatible = "renesas,iic-r8a73a4", .data = &fast_clock_dt_config },
796 { .compatible = "renesas,iic-r8a7740", .data = &r8a7740_dt_config },
797 { .compatible = "renesas,iic-r8a774c0", .data = &fast_clock_dt_config },
798 { .compatible = "renesas,iic-r8a7790", .data = &fast_clock_dt_config },
799 { .compatible = "renesas,iic-r8a7791", .data = &fast_clock_dt_config },
800 { .compatible = "renesas,iic-r8a7792", .data = &fast_clock_dt_config },
801 { .compatible = "renesas,iic-r8a7793", .data = &fast_clock_dt_config },
802 { .compatible = "renesas,iic-r8a7794", .data = &fast_clock_dt_config },
803 { .compatible = "renesas,iic-r8a7795", .data = &fast_clock_dt_config },
804 { .compatible = "renesas,iic-r8a77990", .data = &fast_clock_dt_config },
805 { .compatible = "renesas,iic-sh73a0", .data = &fast_clock_dt_config },
806 { .compatible = "renesas,rcar-gen2-iic", .data = &fast_clock_dt_config },
807 { .compatible = "renesas,rcar-gen3-iic", .data = &fast_clock_dt_config },
808 { .compatible = "renesas,rmobile-iic", .data = &default_dt_config },
809 {},
810 };
811 MODULE_DEVICE_TABLE(of, sh_mobile_i2c_dt_ids);
812
sh_mobile_i2c_release_dma(struct sh_mobile_i2c_data * pd)813 static void sh_mobile_i2c_release_dma(struct sh_mobile_i2c_data *pd)
814 {
815 if (!IS_ERR(pd->dma_tx)) {
816 dma_release_channel(pd->dma_tx);
817 pd->dma_tx = ERR_PTR(-EPROBE_DEFER);
818 }
819
820 if (!IS_ERR(pd->dma_rx)) {
821 dma_release_channel(pd->dma_rx);
822 pd->dma_rx = ERR_PTR(-EPROBE_DEFER);
823 }
824 }
825
sh_mobile_i2c_hook_irqs(struct platform_device * dev,struct sh_mobile_i2c_data * pd)826 static int sh_mobile_i2c_hook_irqs(struct platform_device *dev, struct sh_mobile_i2c_data *pd)
827 {
828 struct device_node *np = dev_of_node(&dev->dev);
829 int k = 0, ret;
830
831 if (np) {
832 int irq;
833
834 while ((irq = platform_get_irq_optional(dev, k)) != -ENXIO) {
835 if (irq < 0)
836 return irq;
837 ret = devm_request_irq(&dev->dev, irq, sh_mobile_i2c_isr,
838 0, dev_name(&dev->dev), pd);
839 if (ret) {
840 dev_err(&dev->dev, "cannot request IRQ %d\n", irq);
841 return ret;
842 }
843 k++;
844 }
845 } else {
846 struct resource *res;
847 resource_size_t n;
848
849 while ((res = platform_get_resource(dev, IORESOURCE_IRQ, k))) {
850 for (n = res->start; n <= res->end; n++) {
851 ret = devm_request_irq(&dev->dev, n, sh_mobile_i2c_isr,
852 0, dev_name(&dev->dev), pd);
853 if (ret) {
854 dev_err(&dev->dev, "cannot request IRQ %pa\n", &n);
855 return ret;
856 }
857 }
858 k++;
859 }
860 }
861
862 return k > 0 ? 0 : -ENOENT;
863 }
864
sh_mobile_i2c_probe(struct platform_device * dev)865 static int sh_mobile_i2c_probe(struct platform_device *dev)
866 {
867 struct sh_mobile_i2c_data *pd;
868 struct i2c_adapter *adap;
869 const struct sh_mobile_dt_config *config;
870 int ret;
871 u32 bus_speed;
872
873 pd = devm_kzalloc(&dev->dev, sizeof(struct sh_mobile_i2c_data), GFP_KERNEL);
874 if (!pd)
875 return -ENOMEM;
876
877 pd->clk = devm_clk_get(&dev->dev, NULL);
878 if (IS_ERR(pd->clk)) {
879 dev_err(&dev->dev, "cannot get clock\n");
880 return PTR_ERR(pd->clk);
881 }
882
883 ret = sh_mobile_i2c_hook_irqs(dev, pd);
884 if (ret)
885 return ret;
886
887 pd->dev = &dev->dev;
888 platform_set_drvdata(dev, pd);
889
890 pd->reg = devm_platform_get_and_ioremap_resource(dev, 0, &pd->res);
891 if (IS_ERR(pd->reg))
892 return PTR_ERR(pd->reg);
893
894 ret = of_property_read_u32(dev->dev.of_node, "clock-frequency", &bus_speed);
895 pd->bus_speed = (ret || !bus_speed) ? I2C_MAX_STANDARD_MODE_FREQ : bus_speed;
896 pd->clks_per_count = 1;
897
898 /* Newer variants come with two new bits in ICIC */
899 if (resource_size(pd->res) > 0x17)
900 pd->flags |= IIC_FLAG_HAS_ICIC67;
901
902 pm_runtime_enable(&dev->dev);
903 pm_runtime_get_sync(&dev->dev);
904
905 config = of_device_get_match_data(&dev->dev);
906 if (config) {
907 pd->clks_per_count = config->clks_per_count;
908 ret = config->setup(pd);
909 } else {
910 ret = sh_mobile_i2c_init(pd);
911 }
912
913 pm_runtime_put_sync(&dev->dev);
914 if (ret)
915 return ret;
916
917 /* Init DMA */
918 sg_init_table(&pd->sg, 1);
919 pd->dma_direction = DMA_NONE;
920 pd->dma_rx = pd->dma_tx = ERR_PTR(-EPROBE_DEFER);
921
922 /* setup the private data */
923 adap = &pd->adap;
924 i2c_set_adapdata(adap, pd);
925
926 adap->owner = THIS_MODULE;
927 adap->algo = &sh_mobile_i2c_algorithm;
928 adap->quirks = &sh_mobile_i2c_quirks;
929 adap->dev.parent = &dev->dev;
930 adap->retries = 5;
931 adap->nr = dev->id;
932 adap->dev.of_node = dev->dev.of_node;
933
934 strscpy(adap->name, dev->name, sizeof(adap->name));
935
936 spin_lock_init(&pd->lock);
937 init_waitqueue_head(&pd->wait);
938
939 ret = i2c_add_numbered_adapter(adap);
940 if (ret < 0) {
941 sh_mobile_i2c_release_dma(pd);
942 return ret;
943 }
944
945 dev_info(&dev->dev, "I2C adapter %d, bus speed %lu Hz\n", adap->nr, pd->bus_speed);
946
947 return 0;
948 }
949
sh_mobile_i2c_remove(struct platform_device * dev)950 static void sh_mobile_i2c_remove(struct platform_device *dev)
951 {
952 struct sh_mobile_i2c_data *pd = platform_get_drvdata(dev);
953
954 i2c_del_adapter(&pd->adap);
955 sh_mobile_i2c_release_dma(pd);
956 pm_runtime_disable(&dev->dev);
957 }
958
sh_mobile_i2c_suspend(struct device * dev)959 static int sh_mobile_i2c_suspend(struct device *dev)
960 {
961 struct sh_mobile_i2c_data *pd = dev_get_drvdata(dev);
962
963 i2c_mark_adapter_suspended(&pd->adap);
964 return 0;
965 }
966
sh_mobile_i2c_resume(struct device * dev)967 static int sh_mobile_i2c_resume(struct device *dev)
968 {
969 struct sh_mobile_i2c_data *pd = dev_get_drvdata(dev);
970
971 i2c_mark_adapter_resumed(&pd->adap);
972 return 0;
973 }
974
975 static const struct dev_pm_ops sh_mobile_i2c_pm_ops = {
976 NOIRQ_SYSTEM_SLEEP_PM_OPS(sh_mobile_i2c_suspend,
977 sh_mobile_i2c_resume)
978 };
979
980 static struct platform_driver sh_mobile_i2c_driver = {
981 .driver = {
982 .name = "i2c-sh_mobile",
983 .of_match_table = sh_mobile_i2c_dt_ids,
984 .pm = pm_sleep_ptr(&sh_mobile_i2c_pm_ops),
985 },
986 .probe = sh_mobile_i2c_probe,
987 .remove = sh_mobile_i2c_remove,
988 };
989
sh_mobile_i2c_adap_init(void)990 static int __init sh_mobile_i2c_adap_init(void)
991 {
992 return platform_driver_register(&sh_mobile_i2c_driver);
993 }
994 subsys_initcall(sh_mobile_i2c_adap_init);
995
sh_mobile_i2c_adap_exit(void)996 static void __exit sh_mobile_i2c_adap_exit(void)
997 {
998 platform_driver_unregister(&sh_mobile_i2c_driver);
999 }
1000 module_exit(sh_mobile_i2c_adap_exit);
1001
1002 MODULE_DESCRIPTION("SuperH Mobile I2C Bus Controller driver");
1003 MODULE_AUTHOR("Magnus Damm");
1004 MODULE_AUTHOR("Wolfram Sang");
1005 MODULE_LICENSE("GPL v2");
1006 MODULE_ALIAS("platform:i2c-sh_mobile");
1007