xref: /linux/drivers/gpu/drm/hisilicon/kirin/dw_drm_dsi.c (revision e5c86679d5e864947a52fb31e45a425dea3e7fa9)
1 /*
2  * DesignWare MIPI DSI Host Controller v1.02 driver
3  *
4  * Copyright (c) 2016 Linaro Limited.
5  * Copyright (c) 2014-2016 Hisilicon Limited.
6  *
7  * Author:
8  *	Xinliang Liu <z.liuxinliang@hisilicon.com>
9  *	Xinliang Liu <xinliang.liu@linaro.org>
10  *	Xinwei Kong <kong.kongxinwei@hisilicon.com>
11  *
12  * This program is free software; you can redistribute it and/or modify
13  * it under the terms of the GNU General Public License version 2 as
14  * published by the Free Software Foundation.
15  *
16  */
17 
18 #include <linux/clk.h>
19 #include <linux/component.h>
20 #include <linux/of_graph.h>
21 
22 #include <drm/drm_of.h>
23 #include <drm/drm_crtc_helper.h>
24 #include <drm/drm_mipi_dsi.h>
25 #include <drm/drm_encoder_slave.h>
26 #include <drm/drm_atomic_helper.h>
27 
28 #include "dw_dsi_reg.h"
29 
30 #define MAX_TX_ESC_CLK		10
31 #define ROUND(x, y)		((x) / (y) + \
32 				((x) % (y) * 10 / (y) >= 5 ? 1 : 0))
33 #define PHY_REF_CLK_RATE	19200000
34 #define PHY_REF_CLK_PERIOD_PS	(1000000000 / (PHY_REF_CLK_RATE / 1000))
35 
36 #define encoder_to_dsi(encoder) \
37 	container_of(encoder, struct dw_dsi, encoder)
38 #define host_to_dsi(host) \
39 	container_of(host, struct dw_dsi, host)
40 
41 struct mipi_phy_params {
42 	u32 clk_t_lpx;
43 	u32 clk_t_hs_prepare;
44 	u32 clk_t_hs_zero;
45 	u32 clk_t_hs_trial;
46 	u32 clk_t_wakeup;
47 	u32 data_t_lpx;
48 	u32 data_t_hs_prepare;
49 	u32 data_t_hs_zero;
50 	u32 data_t_hs_trial;
51 	u32 data_t_ta_go;
52 	u32 data_t_ta_get;
53 	u32 data_t_wakeup;
54 	u32 hstx_ckg_sel;
55 	u32 pll_fbd_div5f;
56 	u32 pll_fbd_div1f;
57 	u32 pll_fbd_2p;
58 	u32 pll_enbwt;
59 	u32 pll_fbd_p;
60 	u32 pll_fbd_s;
61 	u32 pll_pre_div1p;
62 	u32 pll_pre_p;
63 	u32 pll_vco_750M;
64 	u32 pll_lpf_rs;
65 	u32 pll_lpf_cs;
66 	u32 clklp2hs_time;
67 	u32 clkhs2lp_time;
68 	u32 lp2hs_time;
69 	u32 hs2lp_time;
70 	u32 clk_to_data_delay;
71 	u32 data_to_clk_delay;
72 	u32 lane_byte_clk_kHz;
73 	u32 clk_division;
74 };
75 
76 struct dsi_hw_ctx {
77 	void __iomem *base;
78 	struct clk *pclk;
79 };
80 
81 struct dw_dsi {
82 	struct drm_encoder encoder;
83 	struct drm_bridge *bridge;
84 	struct mipi_dsi_host host;
85 	struct drm_display_mode cur_mode;
86 	struct dsi_hw_ctx *ctx;
87 	struct mipi_phy_params phy;
88 
89 	u32 lanes;
90 	enum mipi_dsi_pixel_format format;
91 	unsigned long mode_flags;
92 	bool enable;
93 };
94 
95 struct dsi_data {
96 	struct dw_dsi dsi;
97 	struct dsi_hw_ctx ctx;
98 };
99 
100 struct dsi_phy_range {
101 	u32 min_range_kHz;
102 	u32 max_range_kHz;
103 	u32 pll_vco_750M;
104 	u32 hstx_ckg_sel;
105 };
106 
107 static const struct dsi_phy_range dphy_range_info[] = {
108 	{   46875,    62500,   1,    7 },
109 	{   62500,    93750,   0,    7 },
110 	{   93750,   125000,   1,    6 },
111 	{  125000,   187500,   0,    6 },
112 	{  187500,   250000,   1,    5 },
113 	{  250000,   375000,   0,    5 },
114 	{  375000,   500000,   1,    4 },
115 	{  500000,   750000,   0,    4 },
116 	{  750000,  1000000,   1,    0 },
117 	{ 1000000,  1500000,   0,    0 }
118 };
119 
120 static u32 dsi_calc_phy_rate(u32 req_kHz, struct mipi_phy_params *phy)
121 {
122 	u32 ref_clk_ps = PHY_REF_CLK_PERIOD_PS;
123 	u32 tmp_kHz = req_kHz;
124 	u32 i = 0;
125 	u32 q_pll = 1;
126 	u32 m_pll = 0;
127 	u32 n_pll = 0;
128 	u32 r_pll = 1;
129 	u32 m_n = 0;
130 	u32 m_n_int = 0;
131 	u32 f_kHz = 0;
132 	u64 temp;
133 
134 	/*
135 	 * Find a rate >= req_kHz.
136 	 */
137 	do {
138 		f_kHz = tmp_kHz;
139 
140 		for (i = 0; i < ARRAY_SIZE(dphy_range_info); i++)
141 			if (f_kHz >= dphy_range_info[i].min_range_kHz &&
142 			    f_kHz <= dphy_range_info[i].max_range_kHz)
143 				break;
144 
145 		if (i == ARRAY_SIZE(dphy_range_info)) {
146 			DRM_ERROR("%dkHz out of range\n", f_kHz);
147 			return 0;
148 		}
149 
150 		phy->pll_vco_750M = dphy_range_info[i].pll_vco_750M;
151 		phy->hstx_ckg_sel = dphy_range_info[i].hstx_ckg_sel;
152 
153 		if (phy->hstx_ckg_sel <= 7 &&
154 		    phy->hstx_ckg_sel >= 4)
155 			q_pll = 0x10 >> (7 - phy->hstx_ckg_sel);
156 
157 		temp = f_kHz * (u64)q_pll * (u64)ref_clk_ps;
158 		m_n_int = temp / (u64)1000000000;
159 		m_n = (temp % (u64)1000000000) / (u64)100000000;
160 
161 		if (m_n_int % 2 == 0) {
162 			if (m_n * 6 >= 50) {
163 				n_pll = 2;
164 				m_pll = (m_n_int + 1) * n_pll;
165 			} else if (m_n * 6 >= 30) {
166 				n_pll = 3;
167 				m_pll = m_n_int * n_pll + 2;
168 			} else {
169 				n_pll = 1;
170 				m_pll = m_n_int * n_pll;
171 			}
172 		} else {
173 			if (m_n * 6 >= 50) {
174 				n_pll = 1;
175 				m_pll = (m_n_int + 1) * n_pll;
176 			} else if (m_n * 6 >= 30) {
177 				n_pll = 1;
178 				m_pll = (m_n_int + 1) * n_pll;
179 			} else if (m_n * 6 >= 10) {
180 				n_pll = 3;
181 				m_pll = m_n_int * n_pll + 1;
182 			} else {
183 				n_pll = 2;
184 				m_pll = m_n_int * n_pll;
185 			}
186 		}
187 
188 		if (n_pll == 1) {
189 			phy->pll_fbd_p = 0;
190 			phy->pll_pre_div1p = 1;
191 		} else {
192 			phy->pll_fbd_p = n_pll;
193 			phy->pll_pre_div1p = 0;
194 		}
195 
196 		if (phy->pll_fbd_2p <= 7 && phy->pll_fbd_2p >= 4)
197 			r_pll = 0x10 >> (7 - phy->pll_fbd_2p);
198 
199 		if (m_pll == 2) {
200 			phy->pll_pre_p = 0;
201 			phy->pll_fbd_s = 0;
202 			phy->pll_fbd_div1f = 0;
203 			phy->pll_fbd_div5f = 1;
204 		} else if (m_pll >= 2 * 2 * r_pll && m_pll <= 2 * 4 * r_pll) {
205 			phy->pll_pre_p = m_pll / (2 * r_pll);
206 			phy->pll_fbd_s = 0;
207 			phy->pll_fbd_div1f = 1;
208 			phy->pll_fbd_div5f = 0;
209 		} else if (m_pll >= 2 * 5 * r_pll && m_pll <= 2 * 150 * r_pll) {
210 			if (((m_pll / (2 * r_pll)) % 2) == 0) {
211 				phy->pll_pre_p =
212 					(m_pll / (2 * r_pll)) / 2 - 1;
213 				phy->pll_fbd_s =
214 					(m_pll / (2 * r_pll)) % 2 + 2;
215 			} else {
216 				phy->pll_pre_p =
217 					(m_pll / (2 * r_pll)) / 2;
218 				phy->pll_fbd_s =
219 					(m_pll / (2 * r_pll)) % 2;
220 			}
221 			phy->pll_fbd_div1f = 0;
222 			phy->pll_fbd_div5f = 0;
223 		} else {
224 			phy->pll_pre_p = 0;
225 			phy->pll_fbd_s = 0;
226 			phy->pll_fbd_div1f = 0;
227 			phy->pll_fbd_div5f = 1;
228 		}
229 
230 		f_kHz = (u64)1000000000 * (u64)m_pll /
231 			((u64)ref_clk_ps * (u64)n_pll * (u64)q_pll);
232 
233 		if (f_kHz >= req_kHz)
234 			break;
235 
236 		tmp_kHz += 10;
237 
238 	} while (true);
239 
240 	return f_kHz;
241 }
242 
243 static void dsi_get_phy_params(u32 phy_req_kHz,
244 			       struct mipi_phy_params *phy)
245 {
246 	u32 ref_clk_ps = PHY_REF_CLK_PERIOD_PS;
247 	u32 phy_rate_kHz;
248 	u32 ui;
249 
250 	memset(phy, 0, sizeof(*phy));
251 
252 	phy_rate_kHz = dsi_calc_phy_rate(phy_req_kHz, phy);
253 	if (!phy_rate_kHz)
254 		return;
255 
256 	ui = 1000000 / phy_rate_kHz;
257 
258 	phy->clk_t_lpx = ROUND(50, 8 * ui);
259 	phy->clk_t_hs_prepare = ROUND(133, 16 * ui) - 1;
260 
261 	phy->clk_t_hs_zero = ROUND(262, 8 * ui);
262 	phy->clk_t_hs_trial = 2 * (ROUND(60, 8 * ui) - 1);
263 	phy->clk_t_wakeup = ROUND(1000000, (ref_clk_ps / 1000) - 1);
264 	if (phy->clk_t_wakeup > 0xff)
265 		phy->clk_t_wakeup = 0xff;
266 	phy->data_t_wakeup = phy->clk_t_wakeup;
267 	phy->data_t_lpx = phy->clk_t_lpx;
268 	phy->data_t_hs_prepare = ROUND(125 + 10 * ui, 16 * ui) - 1;
269 	phy->data_t_hs_zero = ROUND(105 + 6 * ui, 8 * ui);
270 	phy->data_t_hs_trial = 2 * (ROUND(60 + 4 * ui, 8 * ui) - 1);
271 	phy->data_t_ta_go = 3;
272 	phy->data_t_ta_get = 4;
273 
274 	phy->pll_enbwt = 1;
275 	phy->clklp2hs_time = ROUND(407, 8 * ui) + 12;
276 	phy->clkhs2lp_time = ROUND(105 + 12 * ui, 8 * ui);
277 	phy->lp2hs_time = ROUND(240 + 12 * ui, 8 * ui) + 1;
278 	phy->hs2lp_time = phy->clkhs2lp_time;
279 	phy->clk_to_data_delay = 1 + phy->clklp2hs_time;
280 	phy->data_to_clk_delay = ROUND(60 + 52 * ui, 8 * ui) +
281 				phy->clkhs2lp_time;
282 
283 	phy->lane_byte_clk_kHz = phy_rate_kHz / 8;
284 	phy->clk_division =
285 		DIV_ROUND_UP(phy->lane_byte_clk_kHz, MAX_TX_ESC_CLK);
286 }
287 
288 static u32 dsi_get_dpi_color_coding(enum mipi_dsi_pixel_format format)
289 {
290 	u32 val;
291 
292 	/*
293 	 * TODO: only support RGB888 now, to support more
294 	 */
295 	switch (format) {
296 	case MIPI_DSI_FMT_RGB888:
297 		val = DSI_24BITS_1;
298 		break;
299 	default:
300 		val = DSI_24BITS_1;
301 		break;
302 	}
303 
304 	return val;
305 }
306 
307 /*
308  * dsi phy reg write function
309  */
310 static void dsi_phy_tst_set(void __iomem *base, u32 reg, u32 val)
311 {
312 	u32 reg_write = 0x10000 + reg;
313 
314 	/*
315 	 * latch reg first
316 	 */
317 	writel(reg_write, base + PHY_TST_CTRL1);
318 	writel(0x02, base + PHY_TST_CTRL0);
319 	writel(0x00, base + PHY_TST_CTRL0);
320 
321 	/*
322 	 * then latch value
323 	 */
324 	writel(val, base + PHY_TST_CTRL1);
325 	writel(0x02, base + PHY_TST_CTRL0);
326 	writel(0x00, base + PHY_TST_CTRL0);
327 }
328 
329 static void dsi_set_phy_timer(void __iomem *base,
330 			      struct mipi_phy_params *phy,
331 			      u32 lanes)
332 {
333 	u32 val;
334 
335 	/*
336 	 * Set lane value and phy stop wait time.
337 	 */
338 	val = (lanes - 1) | (PHY_STOP_WAIT_TIME << 8);
339 	writel(val, base + PHY_IF_CFG);
340 
341 	/*
342 	 * Set phy clk division.
343 	 */
344 	val = readl(base + CLKMGR_CFG) | phy->clk_division;
345 	writel(val, base + CLKMGR_CFG);
346 
347 	/*
348 	 * Set lp and hs switching params.
349 	 */
350 	dw_update_bits(base + PHY_TMR_CFG, 24, MASK(8), phy->hs2lp_time);
351 	dw_update_bits(base + PHY_TMR_CFG, 16, MASK(8), phy->lp2hs_time);
352 	dw_update_bits(base + PHY_TMR_LPCLK_CFG, 16, MASK(10),
353 		       phy->clkhs2lp_time);
354 	dw_update_bits(base + PHY_TMR_LPCLK_CFG, 0, MASK(10),
355 		       phy->clklp2hs_time);
356 	dw_update_bits(base + CLK_DATA_TMR_CFG, 8, MASK(8),
357 		       phy->data_to_clk_delay);
358 	dw_update_bits(base + CLK_DATA_TMR_CFG, 0, MASK(8),
359 		       phy->clk_to_data_delay);
360 }
361 
362 static void dsi_set_mipi_phy(void __iomem *base,
363 			     struct mipi_phy_params *phy,
364 			     u32 lanes)
365 {
366 	u32 delay_count;
367 	u32 val;
368 	u32 i;
369 
370 	/* phy timer setting */
371 	dsi_set_phy_timer(base, phy, lanes);
372 
373 	/*
374 	 * Reset to clean up phy tst params.
375 	 */
376 	writel(0, base + PHY_RSTZ);
377 	writel(0, base + PHY_TST_CTRL0);
378 	writel(1, base + PHY_TST_CTRL0);
379 	writel(0, base + PHY_TST_CTRL0);
380 
381 	/*
382 	 * Clock lane timing control setting: TLPX, THS-PREPARE,
383 	 * THS-ZERO, THS-TRAIL, TWAKEUP.
384 	 */
385 	dsi_phy_tst_set(base, CLK_TLPX, phy->clk_t_lpx);
386 	dsi_phy_tst_set(base, CLK_THS_PREPARE, phy->clk_t_hs_prepare);
387 	dsi_phy_tst_set(base, CLK_THS_ZERO, phy->clk_t_hs_zero);
388 	dsi_phy_tst_set(base, CLK_THS_TRAIL, phy->clk_t_hs_trial);
389 	dsi_phy_tst_set(base, CLK_TWAKEUP, phy->clk_t_wakeup);
390 
391 	/*
392 	 * Data lane timing control setting: TLPX, THS-PREPARE,
393 	 * THS-ZERO, THS-TRAIL, TTA-GO, TTA-GET, TWAKEUP.
394 	 */
395 	for (i = 0; i < lanes; i++) {
396 		dsi_phy_tst_set(base, DATA_TLPX(i), phy->data_t_lpx);
397 		dsi_phy_tst_set(base, DATA_THS_PREPARE(i),
398 				phy->data_t_hs_prepare);
399 		dsi_phy_tst_set(base, DATA_THS_ZERO(i), phy->data_t_hs_zero);
400 		dsi_phy_tst_set(base, DATA_THS_TRAIL(i), phy->data_t_hs_trial);
401 		dsi_phy_tst_set(base, DATA_TTA_GO(i), phy->data_t_ta_go);
402 		dsi_phy_tst_set(base, DATA_TTA_GET(i), phy->data_t_ta_get);
403 		dsi_phy_tst_set(base, DATA_TWAKEUP(i), phy->data_t_wakeup);
404 	}
405 
406 	/*
407 	 * physical configuration: I, pll I, pll II, pll III,
408 	 * pll IV, pll V.
409 	 */
410 	dsi_phy_tst_set(base, PHY_CFG_I, phy->hstx_ckg_sel);
411 	val = (phy->pll_fbd_div5f << 5) + (phy->pll_fbd_div1f << 4) +
412 				(phy->pll_fbd_2p << 1) + phy->pll_enbwt;
413 	dsi_phy_tst_set(base, PHY_CFG_PLL_I, val);
414 	dsi_phy_tst_set(base, PHY_CFG_PLL_II, phy->pll_fbd_p);
415 	dsi_phy_tst_set(base, PHY_CFG_PLL_III, phy->pll_fbd_s);
416 	val = (phy->pll_pre_div1p << 7) + phy->pll_pre_p;
417 	dsi_phy_tst_set(base, PHY_CFG_PLL_IV, val);
418 	val = (5 << 5) + (phy->pll_vco_750M << 4) + (phy->pll_lpf_rs << 2) +
419 		phy->pll_lpf_cs;
420 	dsi_phy_tst_set(base, PHY_CFG_PLL_V, val);
421 
422 	writel(PHY_ENABLECLK, base + PHY_RSTZ);
423 	udelay(1);
424 	writel(PHY_ENABLECLK | PHY_UNSHUTDOWNZ, base + PHY_RSTZ);
425 	udelay(1);
426 	writel(PHY_ENABLECLK | PHY_UNRSTZ | PHY_UNSHUTDOWNZ, base + PHY_RSTZ);
427 	usleep_range(1000, 1500);
428 
429 	/*
430 	 * wait for phy's clock ready
431 	 */
432 	delay_count = 100;
433 	while (delay_count) {
434 		val = readl(base +  PHY_STATUS);
435 		if ((BIT(0) | BIT(2)) & val)
436 			break;
437 
438 		udelay(1);
439 		delay_count--;
440 	}
441 
442 	if (!delay_count)
443 		DRM_INFO("phylock and phystopstateclklane is not ready.\n");
444 }
445 
446 static void dsi_set_mode_timing(void __iomem *base,
447 				u32 lane_byte_clk_kHz,
448 				struct drm_display_mode *mode,
449 				enum mipi_dsi_pixel_format format)
450 {
451 	u32 hfp, hbp, hsw, vfp, vbp, vsw;
452 	u32 hline_time;
453 	u32 hsa_time;
454 	u32 hbp_time;
455 	u32 pixel_clk_kHz;
456 	int htot, vtot;
457 	u32 val;
458 	u64 tmp;
459 
460 	val = dsi_get_dpi_color_coding(format);
461 	writel(val, base + DPI_COLOR_CODING);
462 
463 	val = (mode->flags & DRM_MODE_FLAG_NHSYNC ? 1 : 0) << 2;
464 	val |= (mode->flags & DRM_MODE_FLAG_NVSYNC ? 1 : 0) << 1;
465 	writel(val, base +  DPI_CFG_POL);
466 
467 	/*
468 	 * The DSI IP accepts vertical timing using lines as normal,
469 	 * but horizontal timing is a mixture of pixel-clocks for the
470 	 * active region and byte-lane clocks for the blanking-related
471 	 * timings.  hfp is specified as the total hline_time in byte-
472 	 * lane clocks minus hsa, hbp and active.
473 	 */
474 	pixel_clk_kHz = mode->clock;
475 	htot = mode->htotal;
476 	vtot = mode->vtotal;
477 	hfp = mode->hsync_start - mode->hdisplay;
478 	hbp = mode->htotal - mode->hsync_end;
479 	hsw = mode->hsync_end - mode->hsync_start;
480 	vfp = mode->vsync_start - mode->vdisplay;
481 	vbp = mode->vtotal - mode->vsync_end;
482 	vsw = mode->vsync_end - mode->vsync_start;
483 	if (vsw > 15) {
484 		DRM_DEBUG_DRIVER("vsw exceeded 15\n");
485 		vsw = 15;
486 	}
487 
488 	hsa_time = (hsw * lane_byte_clk_kHz) / pixel_clk_kHz;
489 	hbp_time = (hbp * lane_byte_clk_kHz) / pixel_clk_kHz;
490 	tmp = (u64)htot * (u64)lane_byte_clk_kHz;
491 	hline_time = DIV_ROUND_UP(tmp, pixel_clk_kHz);
492 
493 	/* all specified in byte-lane clocks */
494 	writel(hsa_time, base + VID_HSA_TIME);
495 	writel(hbp_time, base + VID_HBP_TIME);
496 	writel(hline_time, base + VID_HLINE_TIME);
497 
498 	writel(vsw, base + VID_VSA_LINES);
499 	writel(vbp, base + VID_VBP_LINES);
500 	writel(vfp, base + VID_VFP_LINES);
501 	writel(mode->vdisplay, base + VID_VACTIVE_LINES);
502 	writel(mode->hdisplay, base + VID_PKT_SIZE);
503 
504 	DRM_DEBUG_DRIVER("htot=%d, hfp=%d, hbp=%d, hsw=%d\n",
505 			 htot, hfp, hbp, hsw);
506 	DRM_DEBUG_DRIVER("vtol=%d, vfp=%d, vbp=%d, vsw=%d\n",
507 			 vtot, vfp, vbp, vsw);
508 	DRM_DEBUG_DRIVER("hsa_time=%d, hbp_time=%d, hline_time=%d\n",
509 			 hsa_time, hbp_time, hline_time);
510 }
511 
512 static void dsi_set_video_mode(void __iomem *base, unsigned long flags)
513 {
514 	u32 val;
515 	u32 mode_mask = MIPI_DSI_MODE_VIDEO | MIPI_DSI_MODE_VIDEO_BURST |
516 		MIPI_DSI_MODE_VIDEO_SYNC_PULSE;
517 	u32 non_burst_sync_pulse = MIPI_DSI_MODE_VIDEO |
518 		MIPI_DSI_MODE_VIDEO_SYNC_PULSE;
519 	u32 non_burst_sync_event = MIPI_DSI_MODE_VIDEO;
520 
521 	/*
522 	 * choose video mode type
523 	 */
524 	if ((flags & mode_mask) == non_burst_sync_pulse)
525 		val = DSI_NON_BURST_SYNC_PULSES;
526 	else if ((flags & mode_mask) == non_burst_sync_event)
527 		val = DSI_NON_BURST_SYNC_EVENTS;
528 	else
529 		val = DSI_BURST_SYNC_PULSES_1;
530 	writel(val, base + VID_MODE_CFG);
531 
532 	writel(PHY_TXREQUESTCLKHS, base + LPCLK_CTRL);
533 	writel(DSI_VIDEO_MODE, base + MODE_CFG);
534 }
535 
536 static void dsi_mipi_init(struct dw_dsi *dsi)
537 {
538 	struct dsi_hw_ctx *ctx = dsi->ctx;
539 	struct mipi_phy_params *phy = &dsi->phy;
540 	struct drm_display_mode *mode = &dsi->cur_mode;
541 	u32 bpp = mipi_dsi_pixel_format_to_bpp(dsi->format);
542 	void __iomem *base = ctx->base;
543 	u32 dphy_req_kHz;
544 
545 	/*
546 	 * count phy params
547 	 */
548 	dphy_req_kHz = mode->clock * bpp / dsi->lanes;
549 	dsi_get_phy_params(dphy_req_kHz, phy);
550 
551 	/* reset Core */
552 	writel(RESET, base + PWR_UP);
553 
554 	/* set dsi phy params */
555 	dsi_set_mipi_phy(base, phy, dsi->lanes);
556 
557 	/* set dsi mode timing */
558 	dsi_set_mode_timing(base, phy->lane_byte_clk_kHz, mode, dsi->format);
559 
560 	/* set dsi video mode */
561 	dsi_set_video_mode(base, dsi->mode_flags);
562 
563 	/* dsi wake up */
564 	writel(POWERUP, base + PWR_UP);
565 
566 	DRM_DEBUG_DRIVER("lanes=%d, pixel_clk=%d kHz, bytes_freq=%d kHz\n",
567 			 dsi->lanes, mode->clock, phy->lane_byte_clk_kHz);
568 }
569 
570 static void dsi_encoder_disable(struct drm_encoder *encoder)
571 {
572 	struct dw_dsi *dsi = encoder_to_dsi(encoder);
573 	struct dsi_hw_ctx *ctx = dsi->ctx;
574 	void __iomem *base = ctx->base;
575 
576 	if (!dsi->enable)
577 		return;
578 
579 	writel(0, base + PWR_UP);
580 	writel(0, base + LPCLK_CTRL);
581 	writel(0, base + PHY_RSTZ);
582 	clk_disable_unprepare(ctx->pclk);
583 
584 	dsi->enable = false;
585 }
586 
587 static void dsi_encoder_enable(struct drm_encoder *encoder)
588 {
589 	struct dw_dsi *dsi = encoder_to_dsi(encoder);
590 	struct dsi_hw_ctx *ctx = dsi->ctx;
591 	int ret;
592 
593 	if (dsi->enable)
594 		return;
595 
596 	ret = clk_prepare_enable(ctx->pclk);
597 	if (ret) {
598 		DRM_ERROR("fail to enable pclk: %d\n", ret);
599 		return;
600 	}
601 
602 	dsi_mipi_init(dsi);
603 
604 	dsi->enable = true;
605 }
606 
607 static void dsi_encoder_mode_set(struct drm_encoder *encoder,
608 				 struct drm_display_mode *mode,
609 				 struct drm_display_mode *adj_mode)
610 {
611 	struct dw_dsi *dsi = encoder_to_dsi(encoder);
612 
613 	drm_mode_copy(&dsi->cur_mode, adj_mode);
614 }
615 
616 static int dsi_encoder_atomic_check(struct drm_encoder *encoder,
617 				    struct drm_crtc_state *crtc_state,
618 				    struct drm_connector_state *conn_state)
619 {
620 	/* do nothing */
621 	return 0;
622 }
623 
624 static const struct drm_encoder_helper_funcs dw_encoder_helper_funcs = {
625 	.atomic_check	= dsi_encoder_atomic_check,
626 	.mode_set	= dsi_encoder_mode_set,
627 	.enable		= dsi_encoder_enable,
628 	.disable	= dsi_encoder_disable
629 };
630 
631 static const struct drm_encoder_funcs dw_encoder_funcs = {
632 	.destroy = drm_encoder_cleanup,
633 };
634 
635 static int dw_drm_encoder_init(struct device *dev,
636 			       struct drm_device *drm_dev,
637 			       struct drm_encoder *encoder)
638 {
639 	int ret;
640 	u32 crtc_mask = drm_of_find_possible_crtcs(drm_dev, dev->of_node);
641 
642 	if (!crtc_mask) {
643 		DRM_ERROR("failed to find crtc mask\n");
644 		return -EINVAL;
645 	}
646 
647 	encoder->possible_crtcs = crtc_mask;
648 	ret = drm_encoder_init(drm_dev, encoder, &dw_encoder_funcs,
649 			       DRM_MODE_ENCODER_DSI, NULL);
650 	if (ret) {
651 		DRM_ERROR("failed to init dsi encoder\n");
652 		return ret;
653 	}
654 
655 	drm_encoder_helper_add(encoder, &dw_encoder_helper_funcs);
656 
657 	return 0;
658 }
659 
660 static int dsi_host_attach(struct mipi_dsi_host *host,
661 			   struct mipi_dsi_device *mdsi)
662 {
663 	struct dw_dsi *dsi = host_to_dsi(host);
664 
665 	if (mdsi->lanes < 1 || mdsi->lanes > 4) {
666 		DRM_ERROR("dsi device params invalid\n");
667 		return -EINVAL;
668 	}
669 
670 	dsi->lanes = mdsi->lanes;
671 	dsi->format = mdsi->format;
672 	dsi->mode_flags = mdsi->mode_flags;
673 
674 	return 0;
675 }
676 
677 static int dsi_host_detach(struct mipi_dsi_host *host,
678 			   struct mipi_dsi_device *mdsi)
679 {
680 	/* do nothing */
681 	return 0;
682 }
683 
684 static const struct mipi_dsi_host_ops dsi_host_ops = {
685 	.attach = dsi_host_attach,
686 	.detach = dsi_host_detach,
687 };
688 
689 static int dsi_host_init(struct device *dev, struct dw_dsi *dsi)
690 {
691 	struct mipi_dsi_host *host = &dsi->host;
692 	int ret;
693 
694 	host->dev = dev;
695 	host->ops = &dsi_host_ops;
696 	ret = mipi_dsi_host_register(host);
697 	if (ret) {
698 		DRM_ERROR("failed to register dsi host\n");
699 		return ret;
700 	}
701 
702 	return 0;
703 }
704 
705 static int dsi_bridge_init(struct drm_device *dev, struct dw_dsi *dsi)
706 {
707 	struct drm_encoder *encoder = &dsi->encoder;
708 	struct drm_bridge *bridge = dsi->bridge;
709 	int ret;
710 
711 	/* associate the bridge to dsi encoder */
712 	ret = drm_bridge_attach(encoder, bridge, NULL);
713 	if (ret) {
714 		DRM_ERROR("failed to attach external bridge\n");
715 		return ret;
716 	}
717 
718 	return 0;
719 }
720 
721 static int dsi_bind(struct device *dev, struct device *master, void *data)
722 {
723 	struct dsi_data *ddata = dev_get_drvdata(dev);
724 	struct dw_dsi *dsi = &ddata->dsi;
725 	struct drm_device *drm_dev = data;
726 	int ret;
727 
728 	ret = dw_drm_encoder_init(dev, drm_dev, &dsi->encoder);
729 	if (ret)
730 		return ret;
731 
732 	ret = dsi_host_init(dev, dsi);
733 	if (ret)
734 		return ret;
735 
736 	ret = dsi_bridge_init(drm_dev, dsi);
737 	if (ret)
738 		return ret;
739 
740 	return 0;
741 }
742 
743 static void dsi_unbind(struct device *dev, struct device *master, void *data)
744 {
745 	/* do nothing */
746 }
747 
748 static const struct component_ops dsi_ops = {
749 	.bind	= dsi_bind,
750 	.unbind	= dsi_unbind,
751 };
752 
753 static int dsi_parse_dt(struct platform_device *pdev, struct dw_dsi *dsi)
754 {
755 	struct dsi_hw_ctx *ctx = dsi->ctx;
756 	struct device_node *np = pdev->dev.of_node;
757 	struct device_node *endpoint, *bridge_node;
758 	struct drm_bridge *bridge;
759 	struct resource *res;
760 
761 	/*
762 	 * Get the endpoint node. In our case, dsi has one output port1
763 	 * to which the external HDMI bridge is connected.
764 	 */
765 	endpoint = of_graph_get_endpoint_by_regs(np, 1, -1);
766 	if (!endpoint) {
767 		DRM_ERROR("no valid endpoint node\n");
768 		return -ENODEV;
769 	}
770 	of_node_put(endpoint);
771 
772 	bridge_node = of_graph_get_remote_port_parent(endpoint);
773 	if (!bridge_node) {
774 		DRM_ERROR("no valid bridge node\n");
775 		return -ENODEV;
776 	}
777 	of_node_put(bridge_node);
778 
779 	bridge = of_drm_find_bridge(bridge_node);
780 	if (!bridge) {
781 		DRM_INFO("wait for external HDMI bridge driver.\n");
782 		return -EPROBE_DEFER;
783 	}
784 	dsi->bridge = bridge;
785 
786 	ctx->pclk = devm_clk_get(&pdev->dev, "pclk");
787 	if (IS_ERR(ctx->pclk)) {
788 		DRM_ERROR("failed to get pclk clock\n");
789 		return PTR_ERR(ctx->pclk);
790 	}
791 
792 	res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
793 	ctx->base = devm_ioremap_resource(&pdev->dev, res);
794 	if (IS_ERR(ctx->base)) {
795 		DRM_ERROR("failed to remap dsi io region\n");
796 		return PTR_ERR(ctx->base);
797 	}
798 
799 	return 0;
800 }
801 
802 static int dsi_probe(struct platform_device *pdev)
803 {
804 	struct dsi_data *data;
805 	struct dw_dsi *dsi;
806 	struct dsi_hw_ctx *ctx;
807 	int ret;
808 
809 	data = devm_kzalloc(&pdev->dev, sizeof(*data), GFP_KERNEL);
810 	if (!data) {
811 		DRM_ERROR("failed to allocate dsi data.\n");
812 		return -ENOMEM;
813 	}
814 	dsi = &data->dsi;
815 	ctx = &data->ctx;
816 	dsi->ctx = ctx;
817 
818 	ret = dsi_parse_dt(pdev, dsi);
819 	if (ret)
820 		return ret;
821 
822 	platform_set_drvdata(pdev, data);
823 
824 	return component_add(&pdev->dev, &dsi_ops);
825 }
826 
827 static int dsi_remove(struct platform_device *pdev)
828 {
829 	component_del(&pdev->dev, &dsi_ops);
830 
831 	return 0;
832 }
833 
834 static const struct of_device_id dsi_of_match[] = {
835 	{.compatible = "hisilicon,hi6220-dsi"},
836 	{ }
837 };
838 MODULE_DEVICE_TABLE(of, dsi_of_match);
839 
840 static struct platform_driver dsi_driver = {
841 	.probe = dsi_probe,
842 	.remove = dsi_remove,
843 	.driver = {
844 		.name = "dw-dsi",
845 		.of_match_table = dsi_of_match,
846 	},
847 };
848 
849 module_platform_driver(dsi_driver);
850 
851 MODULE_AUTHOR("Xinliang Liu <xinliang.liu@linaro.org>");
852 MODULE_AUTHOR("Xinliang Liu <z.liuxinliang@hisilicon.com>");
853 MODULE_AUTHOR("Xinwei Kong <kong.kongxinwei@hisilicon.com>");
854 MODULE_DESCRIPTION("DesignWare MIPI DSI Host Controller v1.02 driver");
855 MODULE_LICENSE("GPL v2");
856