xref: /linux/drivers/gpu/drm/msm/dp/dp_display.c (revision 90d32e92011eaae8e70a9169b4e7acf4ca8f9d3a)
1 // SPDX-License-Identifier: GPL-2.0-only
2 /*
3  * Copyright (c) 2017-2020, The Linux Foundation. All rights reserved.
4  */
5 
6 #include <linux/module.h>
7 #include <linux/slab.h>
8 #include <linux/uaccess.h>
9 #include <linux/debugfs.h>
10 #include <linux/component.h>
11 #include <linux/of_irq.h>
12 #include <linux/phy/phy.h>
13 #include <linux/delay.h>
14 #include <drm/display/drm_dp_aux_bus.h>
15 #include <drm/drm_edid.h>
16 
17 #include "msm_drv.h"
18 #include "msm_kms.h"
19 #include "dp_ctrl.h"
20 #include "dp_catalog.h"
21 #include "dp_aux.h"
22 #include "dp_reg.h"
23 #include "dp_link.h"
24 #include "dp_panel.h"
25 #include "dp_display.h"
26 #include "dp_drm.h"
27 #include "dp_audio.h"
28 #include "dp_debug.h"
29 
30 static bool psr_enabled = false;
31 module_param(psr_enabled, bool, 0);
32 MODULE_PARM_DESC(psr_enabled, "enable PSR for eDP and DP displays");
33 
34 #define HPD_STRING_SIZE 30
35 
36 enum {
37 	ISR_DISCONNECTED,
38 	ISR_CONNECT_PENDING,
39 	ISR_CONNECTED,
40 	ISR_HPD_REPLUG_COUNT,
41 	ISR_IRQ_HPD_PULSE_COUNT,
42 	ISR_HPD_LO_GLITH_COUNT,
43 };
44 
45 /* event thread connection state */
46 enum {
47 	ST_DISCONNECTED,
48 	ST_MAINLINK_READY,
49 	ST_CONNECTED,
50 	ST_DISCONNECT_PENDING,
51 	ST_DISPLAY_OFF,
52 };
53 
54 enum {
55 	EV_NO_EVENT,
56 	/* hpd events */
57 	EV_HPD_PLUG_INT,
58 	EV_IRQ_HPD_INT,
59 	EV_HPD_UNPLUG_INT,
60 	EV_USER_NOTIFICATION,
61 };
62 
63 #define EVENT_TIMEOUT	(HZ/10)	/* 100ms */
64 #define DP_EVENT_Q_MAX	8
65 
66 #define DP_TIMEOUT_NONE		0
67 
68 #define WAIT_FOR_RESUME_TIMEOUT_JIFFIES (HZ / 2)
69 
70 struct dp_event {
71 	u32 event_id;
72 	u32 data;
73 	u32 delay;
74 };
75 
76 struct dp_display_private {
77 	int irq;
78 
79 	unsigned int id;
80 
81 	/* state variables */
82 	bool core_initialized;
83 	bool phy_initialized;
84 	bool audio_supported;
85 
86 	struct drm_device *drm_dev;
87 
88 	struct dp_catalog *catalog;
89 	struct drm_dp_aux *aux;
90 	struct dp_link    *link;
91 	struct dp_panel   *panel;
92 	struct dp_ctrl    *ctrl;
93 
94 	struct dp_display_mode dp_mode;
95 	struct msm_dp dp_display;
96 
97 	/* wait for audio signaling */
98 	struct completion audio_comp;
99 
100 	/* event related only access by event thread */
101 	struct mutex event_mutex;
102 	wait_queue_head_t event_q;
103 	u32 hpd_state;
104 	u32 event_pndx;
105 	u32 event_gndx;
106 	struct task_struct *ev_tsk;
107 	struct dp_event event_list[DP_EVENT_Q_MAX];
108 	spinlock_t event_lock;
109 
110 	bool wide_bus_supported;
111 
112 	struct dp_audio *audio;
113 };
114 
115 struct msm_dp_desc {
116 	phys_addr_t io_start;
117 	unsigned int id;
118 	bool wide_bus_supported;
119 };
120 
121 static const struct msm_dp_desc sc7180_dp_descs[] = {
122 	{ .io_start = 0x0ae90000, .id = MSM_DP_CONTROLLER_0 },
123 	{}
124 };
125 
126 static const struct msm_dp_desc sc7280_dp_descs[] = {
127 	{ .io_start = 0x0ae90000, .id = MSM_DP_CONTROLLER_0, .wide_bus_supported = true },
128 	{ .io_start = 0x0aea0000, .id = MSM_DP_CONTROLLER_1, .wide_bus_supported = true },
129 	{}
130 };
131 
132 static const struct msm_dp_desc sc8180x_dp_descs[] = {
133 	{ .io_start = 0x0ae90000, .id = MSM_DP_CONTROLLER_0 },
134 	{ .io_start = 0x0ae98000, .id = MSM_DP_CONTROLLER_1 },
135 	{ .io_start = 0x0ae9a000, .id = MSM_DP_CONTROLLER_2 },
136 	{}
137 };
138 
139 static const struct msm_dp_desc sc8280xp_dp_descs[] = {
140 	{ .io_start = 0x0ae90000, .id = MSM_DP_CONTROLLER_0, .wide_bus_supported = true },
141 	{ .io_start = 0x0ae98000, .id = MSM_DP_CONTROLLER_1, .wide_bus_supported = true },
142 	{ .io_start = 0x0ae9a000, .id = MSM_DP_CONTROLLER_2, .wide_bus_supported = true },
143 	{ .io_start = 0x0aea0000, .id = MSM_DP_CONTROLLER_3, .wide_bus_supported = true },
144 	{ .io_start = 0x22090000, .id = MSM_DP_CONTROLLER_0, .wide_bus_supported = true },
145 	{ .io_start = 0x22098000, .id = MSM_DP_CONTROLLER_1, .wide_bus_supported = true },
146 	{ .io_start = 0x2209a000, .id = MSM_DP_CONTROLLER_2, .wide_bus_supported = true },
147 	{ .io_start = 0x220a0000, .id = MSM_DP_CONTROLLER_3, .wide_bus_supported = true },
148 	{}
149 };
150 
151 static const struct msm_dp_desc sm8650_dp_descs[] = {
152 	{ .io_start = 0x0af54000, .id = MSM_DP_CONTROLLER_0 },
153 	{}
154 };
155 
156 static const struct msm_dp_desc x1e80100_dp_descs[] = {
157 	{ .io_start = 0x0ae90000, .id = MSM_DP_CONTROLLER_0, .wide_bus_supported = true },
158 	{ .io_start = 0x0ae98000, .id = MSM_DP_CONTROLLER_1, .wide_bus_supported = true },
159 	{ .io_start = 0x0ae9a000, .id = MSM_DP_CONTROLLER_2, .wide_bus_supported = true },
160 	{ .io_start = 0x0aea0000, .id = MSM_DP_CONTROLLER_3, .wide_bus_supported = true },
161 	{}
162 };
163 
164 static const struct of_device_id dp_dt_match[] = {
165 	{ .compatible = "qcom,sc7180-dp", .data = &sc7180_dp_descs },
166 	{ .compatible = "qcom,sc7280-dp", .data = &sc7280_dp_descs },
167 	{ .compatible = "qcom,sc7280-edp", .data = &sc7280_dp_descs },
168 	{ .compatible = "qcom,sc8180x-dp", .data = &sc8180x_dp_descs },
169 	{ .compatible = "qcom,sc8180x-edp", .data = &sc8180x_dp_descs },
170 	{ .compatible = "qcom,sc8280xp-dp", .data = &sc8280xp_dp_descs },
171 	{ .compatible = "qcom,sc8280xp-edp", .data = &sc8280xp_dp_descs },
172 	{ .compatible = "qcom,sdm845-dp", .data = &sc7180_dp_descs },
173 	{ .compatible = "qcom,sm8350-dp", .data = &sc7180_dp_descs },
174 	{ .compatible = "qcom,sm8650-dp", .data = &sm8650_dp_descs },
175 	{ .compatible = "qcom,x1e80100-dp", .data = &x1e80100_dp_descs },
176 	{}
177 };
178 
179 static struct dp_display_private *dev_get_dp_display_private(struct device *dev)
180 {
181 	struct msm_dp *dp = dev_get_drvdata(dev);
182 
183 	return container_of(dp, struct dp_display_private, dp_display);
184 }
185 
186 static int dp_add_event(struct dp_display_private *dp_priv, u32 event,
187 						u32 data, u32 delay)
188 {
189 	unsigned long flag;
190 	struct dp_event *todo;
191 	int pndx;
192 
193 	spin_lock_irqsave(&dp_priv->event_lock, flag);
194 	pndx = dp_priv->event_pndx + 1;
195 	pndx %= DP_EVENT_Q_MAX;
196 	if (pndx == dp_priv->event_gndx) {
197 		pr_err("event_q is full: pndx=%d gndx=%d\n",
198 			dp_priv->event_pndx, dp_priv->event_gndx);
199 		spin_unlock_irqrestore(&dp_priv->event_lock, flag);
200 		return -EPERM;
201 	}
202 	todo = &dp_priv->event_list[dp_priv->event_pndx++];
203 	dp_priv->event_pndx %= DP_EVENT_Q_MAX;
204 	todo->event_id = event;
205 	todo->data = data;
206 	todo->delay = delay;
207 	wake_up(&dp_priv->event_q);
208 	spin_unlock_irqrestore(&dp_priv->event_lock, flag);
209 
210 	return 0;
211 }
212 
213 static int dp_del_event(struct dp_display_private *dp_priv, u32 event)
214 {
215 	unsigned long flag;
216 	struct dp_event *todo;
217 	u32	gndx;
218 
219 	spin_lock_irqsave(&dp_priv->event_lock, flag);
220 	if (dp_priv->event_pndx == dp_priv->event_gndx) {
221 		spin_unlock_irqrestore(&dp_priv->event_lock, flag);
222 		return -ENOENT;
223 	}
224 
225 	gndx = dp_priv->event_gndx;
226 	while (dp_priv->event_pndx != gndx) {
227 		todo = &dp_priv->event_list[gndx];
228 		if (todo->event_id == event) {
229 			todo->event_id = EV_NO_EVENT;	/* deleted */
230 			todo->delay = 0;
231 		}
232 		gndx++;
233 		gndx %= DP_EVENT_Q_MAX;
234 	}
235 	spin_unlock_irqrestore(&dp_priv->event_lock, flag);
236 
237 	return 0;
238 }
239 
240 void dp_display_signal_audio_start(struct msm_dp *dp_display)
241 {
242 	struct dp_display_private *dp;
243 
244 	dp = container_of(dp_display, struct dp_display_private, dp_display);
245 
246 	reinit_completion(&dp->audio_comp);
247 }
248 
249 void dp_display_signal_audio_complete(struct msm_dp *dp_display)
250 {
251 	struct dp_display_private *dp;
252 
253 	dp = container_of(dp_display, struct dp_display_private, dp_display);
254 
255 	complete_all(&dp->audio_comp);
256 }
257 
258 static int dp_hpd_event_thread_start(struct dp_display_private *dp_priv);
259 
260 static int dp_display_bind(struct device *dev, struct device *master,
261 			   void *data)
262 {
263 	int rc = 0;
264 	struct dp_display_private *dp = dev_get_dp_display_private(dev);
265 	struct msm_drm_private *priv = dev_get_drvdata(master);
266 	struct drm_device *drm = priv->dev;
267 
268 	dp->dp_display.drm_dev = drm;
269 	priv->dp[dp->id] = &dp->dp_display;
270 
271 
272 
273 	dp->drm_dev = drm;
274 	dp->aux->drm_dev = drm;
275 	rc = dp_aux_register(dp->aux);
276 	if (rc) {
277 		DRM_ERROR("DRM DP AUX register failed\n");
278 		goto end;
279 	}
280 
281 
282 	rc = dp_register_audio_driver(dev, dp->audio);
283 	if (rc) {
284 		DRM_ERROR("Audio registration Dp failed\n");
285 		goto end;
286 	}
287 
288 	rc = dp_hpd_event_thread_start(dp);
289 	if (rc) {
290 		DRM_ERROR("Event thread create failed\n");
291 		goto end;
292 	}
293 
294 	return 0;
295 end:
296 	return rc;
297 }
298 
299 static void dp_display_unbind(struct device *dev, struct device *master,
300 			      void *data)
301 {
302 	struct dp_display_private *dp = dev_get_dp_display_private(dev);
303 	struct msm_drm_private *priv = dev_get_drvdata(master);
304 
305 	kthread_stop(dp->ev_tsk);
306 
307 	of_dp_aux_depopulate_bus(dp->aux);
308 
309 	dp_unregister_audio_driver(dev, dp->audio);
310 	dp_aux_unregister(dp->aux);
311 	dp->drm_dev = NULL;
312 	dp->aux->drm_dev = NULL;
313 	priv->dp[dp->id] = NULL;
314 }
315 
316 static const struct component_ops dp_display_comp_ops = {
317 	.bind = dp_display_bind,
318 	.unbind = dp_display_unbind,
319 };
320 
321 static void dp_display_send_hpd_event(struct msm_dp *dp_display)
322 {
323 	struct dp_display_private *dp;
324 	struct drm_connector *connector;
325 
326 	dp = container_of(dp_display, struct dp_display_private, dp_display);
327 
328 	connector = dp->dp_display.connector;
329 	drm_helper_hpd_irq_event(connector->dev);
330 }
331 
332 static int dp_display_send_hpd_notification(struct dp_display_private *dp,
333 					    bool hpd)
334 {
335 	if ((hpd && dp->dp_display.link_ready) ||
336 			(!hpd && !dp->dp_display.link_ready)) {
337 		drm_dbg_dp(dp->drm_dev, "HPD already %s\n",
338 				(hpd ? "on" : "off"));
339 		return 0;
340 	}
341 
342 	/* reset video pattern flag on disconnect */
343 	if (!hpd) {
344 		dp->panel->video_test = false;
345 		if (!dp->dp_display.is_edp)
346 			drm_dp_set_subconnector_property(dp->dp_display.connector,
347 							 connector_status_disconnected,
348 							 dp->panel->dpcd,
349 							 dp->panel->downstream_ports);
350 	}
351 
352 	dp->dp_display.link_ready = hpd;
353 
354 	drm_dbg_dp(dp->drm_dev, "type=%d hpd=%d\n",
355 			dp->dp_display.connector_type, hpd);
356 	dp_display_send_hpd_event(&dp->dp_display);
357 
358 	return 0;
359 }
360 
361 static int dp_display_process_hpd_high(struct dp_display_private *dp)
362 {
363 	int rc = 0;
364 	struct edid *edid;
365 
366 	rc = dp_panel_read_sink_caps(dp->panel, dp->dp_display.connector);
367 	if (rc)
368 		goto end;
369 
370 	dp_link_process_request(dp->link);
371 
372 	if (!dp->dp_display.is_edp)
373 		drm_dp_set_subconnector_property(dp->dp_display.connector,
374 						 connector_status_connected,
375 						 dp->panel->dpcd,
376 						 dp->panel->downstream_ports);
377 
378 	edid = dp->panel->edid;
379 
380 	dp->dp_display.psr_supported = dp->panel->psr_cap.version && psr_enabled;
381 
382 	dp->audio_supported = drm_detect_monitor_audio(edid);
383 	dp_panel_handle_sink_request(dp->panel);
384 
385 	/*
386 	 * set sink to normal operation mode -- D0
387 	 * before dpcd read
388 	 */
389 	dp_link_psm_config(dp->link, &dp->panel->link_info, false);
390 
391 	dp_link_reset_phy_params_vx_px(dp->link);
392 	rc = dp_ctrl_on_link(dp->ctrl);
393 	if (rc) {
394 		DRM_ERROR("failed to complete DP link training\n");
395 		goto end;
396 	}
397 
398 	dp_add_event(dp, EV_USER_NOTIFICATION, true, 0);
399 
400 end:
401 	return rc;
402 }
403 
404 static void dp_display_host_phy_init(struct dp_display_private *dp)
405 {
406 	drm_dbg_dp(dp->drm_dev, "type=%d core_init=%d phy_init=%d\n",
407 		dp->dp_display.connector_type, dp->core_initialized,
408 		dp->phy_initialized);
409 
410 	if (!dp->phy_initialized) {
411 		dp_ctrl_phy_init(dp->ctrl);
412 		dp->phy_initialized = true;
413 	}
414 }
415 
416 static void dp_display_host_phy_exit(struct dp_display_private *dp)
417 {
418 	drm_dbg_dp(dp->drm_dev, "type=%d core_init=%d phy_init=%d\n",
419 		dp->dp_display.connector_type, dp->core_initialized,
420 		dp->phy_initialized);
421 
422 	if (dp->phy_initialized) {
423 		dp_ctrl_phy_exit(dp->ctrl);
424 		dp->phy_initialized = false;
425 	}
426 }
427 
428 static void dp_display_host_init(struct dp_display_private *dp)
429 {
430 	drm_dbg_dp(dp->drm_dev, "type=%d core_init=%d phy_init=%d\n",
431 		dp->dp_display.connector_type, dp->core_initialized,
432 		dp->phy_initialized);
433 
434 	dp_ctrl_core_clk_enable(dp->ctrl);
435 	dp_ctrl_reset_irq_ctrl(dp->ctrl, true);
436 	dp_aux_init(dp->aux);
437 	dp->core_initialized = true;
438 }
439 
440 static void dp_display_host_deinit(struct dp_display_private *dp)
441 {
442 	drm_dbg_dp(dp->drm_dev, "type=%d core_init=%d phy_init=%d\n",
443 		dp->dp_display.connector_type, dp->core_initialized,
444 		dp->phy_initialized);
445 
446 	dp_ctrl_reset_irq_ctrl(dp->ctrl, false);
447 	dp_aux_deinit(dp->aux);
448 	dp_ctrl_core_clk_disable(dp->ctrl);
449 	dp->core_initialized = false;
450 }
451 
452 static int dp_display_usbpd_configure_cb(struct device *dev)
453 {
454 	struct dp_display_private *dp = dev_get_dp_display_private(dev);
455 
456 	dp_display_host_phy_init(dp);
457 
458 	return dp_display_process_hpd_high(dp);
459 }
460 
461 static int dp_display_notify_disconnect(struct device *dev)
462 {
463 	struct dp_display_private *dp = dev_get_dp_display_private(dev);
464 
465 	dp_add_event(dp, EV_USER_NOTIFICATION, false, 0);
466 
467 	return 0;
468 }
469 
470 static void dp_display_handle_video_request(struct dp_display_private *dp)
471 {
472 	if (dp->link->sink_request & DP_TEST_LINK_VIDEO_PATTERN) {
473 		dp->panel->video_test = true;
474 		dp_link_send_test_response(dp->link);
475 	}
476 }
477 
478 static int dp_display_handle_port_status_changed(struct dp_display_private *dp)
479 {
480 	int rc = 0;
481 
482 	if (drm_dp_is_branch(dp->panel->dpcd) && dp->link->sink_count == 0) {
483 		drm_dbg_dp(dp->drm_dev, "sink count is zero, nothing to do\n");
484 		if (dp->hpd_state != ST_DISCONNECTED) {
485 			dp->hpd_state = ST_DISCONNECT_PENDING;
486 			dp_add_event(dp, EV_USER_NOTIFICATION, false, 0);
487 		}
488 	} else {
489 		if (dp->hpd_state == ST_DISCONNECTED) {
490 			dp->hpd_state = ST_MAINLINK_READY;
491 			rc = dp_display_process_hpd_high(dp);
492 			if (rc)
493 				dp->hpd_state = ST_DISCONNECTED;
494 		}
495 	}
496 
497 	return rc;
498 }
499 
500 static int dp_display_handle_irq_hpd(struct dp_display_private *dp)
501 {
502 	u32 sink_request = dp->link->sink_request;
503 
504 	drm_dbg_dp(dp->drm_dev, "%d\n", sink_request);
505 	if (dp->hpd_state == ST_DISCONNECTED) {
506 		if (sink_request & DP_LINK_STATUS_UPDATED) {
507 			drm_dbg_dp(dp->drm_dev, "Disconnected sink_request: %d\n",
508 							sink_request);
509 			DRM_ERROR("Disconnected, no DP_LINK_STATUS_UPDATED\n");
510 			return -EINVAL;
511 		}
512 	}
513 
514 	dp_ctrl_handle_sink_request(dp->ctrl);
515 
516 	if (sink_request & DP_TEST_LINK_VIDEO_PATTERN)
517 		dp_display_handle_video_request(dp);
518 
519 	return 0;
520 }
521 
522 static int dp_display_usbpd_attention_cb(struct device *dev)
523 {
524 	int rc = 0;
525 	u32 sink_request;
526 	struct dp_display_private *dp = dev_get_dp_display_private(dev);
527 
528 	/* check for any test request issued by sink */
529 	rc = dp_link_process_request(dp->link);
530 	if (!rc) {
531 		sink_request = dp->link->sink_request;
532 		drm_dbg_dp(dp->drm_dev, "hpd_state=%d sink_request=%d\n",
533 					dp->hpd_state, sink_request);
534 		if (sink_request & DS_PORT_STATUS_CHANGED)
535 			rc = dp_display_handle_port_status_changed(dp);
536 		else
537 			rc = dp_display_handle_irq_hpd(dp);
538 	}
539 
540 	return rc;
541 }
542 
543 static int dp_hpd_plug_handle(struct dp_display_private *dp, u32 data)
544 {
545 	u32 state;
546 	int ret;
547 	struct platform_device *pdev = dp->dp_display.pdev;
548 
549 	dp_aux_enable_xfers(dp->aux, true);
550 
551 	mutex_lock(&dp->event_mutex);
552 
553 	state =  dp->hpd_state;
554 	drm_dbg_dp(dp->drm_dev, "Before, type=%d hpd_state=%d\n",
555 			dp->dp_display.connector_type, state);
556 
557 	if (state == ST_DISPLAY_OFF) {
558 		mutex_unlock(&dp->event_mutex);
559 		return 0;
560 	}
561 
562 	if (state == ST_MAINLINK_READY || state == ST_CONNECTED) {
563 		mutex_unlock(&dp->event_mutex);
564 		return 0;
565 	}
566 
567 	if (state == ST_DISCONNECT_PENDING) {
568 		/* wait until ST_DISCONNECTED */
569 		dp_add_event(dp, EV_HPD_PLUG_INT, 0, 1); /* delay = 1 */
570 		mutex_unlock(&dp->event_mutex);
571 		return 0;
572 	}
573 
574 	ret = pm_runtime_resume_and_get(&pdev->dev);
575 	if (ret) {
576 		DRM_ERROR("failed to pm_runtime_resume\n");
577 		mutex_unlock(&dp->event_mutex);
578 		return ret;
579 	}
580 
581 	ret = dp_display_usbpd_configure_cb(&pdev->dev);
582 	if (ret) {	/* link train failed */
583 		dp->hpd_state = ST_DISCONNECTED;
584 		pm_runtime_put_sync(&pdev->dev);
585 	} else {
586 		dp->hpd_state = ST_MAINLINK_READY;
587 	}
588 
589 	drm_dbg_dp(dp->drm_dev, "After, type=%d hpd_state=%d\n",
590 			dp->dp_display.connector_type, state);
591 	mutex_unlock(&dp->event_mutex);
592 
593 	/* uevent will complete connection part */
594 	return 0;
595 };
596 
597 static void dp_display_handle_plugged_change(struct msm_dp *dp_display,
598 		bool plugged)
599 {
600 	struct dp_display_private *dp;
601 
602 	dp = container_of(dp_display,
603 			struct dp_display_private, dp_display);
604 
605 	/* notify audio subsystem only if sink supports audio */
606 	if (dp_display->plugged_cb && dp_display->codec_dev &&
607 			dp->audio_supported)
608 		dp_display->plugged_cb(dp_display->codec_dev, plugged);
609 }
610 
611 static int dp_hpd_unplug_handle(struct dp_display_private *dp, u32 data)
612 {
613 	u32 state;
614 	struct platform_device *pdev = dp->dp_display.pdev;
615 
616 	dp_aux_enable_xfers(dp->aux, false);
617 
618 	mutex_lock(&dp->event_mutex);
619 
620 	state = dp->hpd_state;
621 
622 	drm_dbg_dp(dp->drm_dev, "Before, type=%d hpd_state=%d\n",
623 			dp->dp_display.connector_type, state);
624 
625 	/* unplugged, no more irq_hpd handle */
626 	dp_del_event(dp, EV_IRQ_HPD_INT);
627 
628 	if (state == ST_DISCONNECTED) {
629 		/* triggered by irq_hdp with sink_count = 0 */
630 		if (dp->link->sink_count == 0) {
631 			dp_display_host_phy_exit(dp);
632 		}
633 		dp_display_notify_disconnect(&dp->dp_display.pdev->dev);
634 		mutex_unlock(&dp->event_mutex);
635 		return 0;
636 	} else if (state == ST_DISCONNECT_PENDING) {
637 		mutex_unlock(&dp->event_mutex);
638 		return 0;
639 	} else if (state == ST_MAINLINK_READY) {
640 		dp_ctrl_off_link(dp->ctrl);
641 		dp_display_host_phy_exit(dp);
642 		dp->hpd_state = ST_DISCONNECTED;
643 		dp_display_notify_disconnect(&dp->dp_display.pdev->dev);
644 		pm_runtime_put_sync(&pdev->dev);
645 		mutex_unlock(&dp->event_mutex);
646 		return 0;
647 	}
648 
649 	/*
650 	 * We don't need separate work for disconnect as
651 	 * connect/attention interrupts are disabled
652 	 */
653 	dp_display_notify_disconnect(&dp->dp_display.pdev->dev);
654 
655 	if (state == ST_DISPLAY_OFF) {
656 		dp->hpd_state = ST_DISCONNECTED;
657 	} else {
658 		dp->hpd_state = ST_DISCONNECT_PENDING;
659 	}
660 
661 	/* signal the disconnect event early to ensure proper teardown */
662 	dp_display_handle_plugged_change(&dp->dp_display, false);
663 
664 	drm_dbg_dp(dp->drm_dev, "After, type=%d hpd_state=%d\n",
665 			dp->dp_display.connector_type, state);
666 
667 	/* uevent will complete disconnection part */
668 	pm_runtime_put_sync(&pdev->dev);
669 	mutex_unlock(&dp->event_mutex);
670 	return 0;
671 }
672 
673 static int dp_irq_hpd_handle(struct dp_display_private *dp, u32 data)
674 {
675 	u32 state;
676 
677 	mutex_lock(&dp->event_mutex);
678 
679 	/* irq_hpd can happen at either connected or disconnected state */
680 	state =  dp->hpd_state;
681 	drm_dbg_dp(dp->drm_dev, "Before, type=%d hpd_state=%d\n",
682 			dp->dp_display.connector_type, state);
683 
684 	if (state == ST_DISPLAY_OFF) {
685 		mutex_unlock(&dp->event_mutex);
686 		return 0;
687 	}
688 
689 	if (state == ST_MAINLINK_READY || state == ST_DISCONNECT_PENDING) {
690 		/* wait until ST_CONNECTED */
691 		dp_add_event(dp, EV_IRQ_HPD_INT, 0, 1); /* delay = 1 */
692 		mutex_unlock(&dp->event_mutex);
693 		return 0;
694 	}
695 
696 	dp_display_usbpd_attention_cb(&dp->dp_display.pdev->dev);
697 
698 	drm_dbg_dp(dp->drm_dev, "After, type=%d hpd_state=%d\n",
699 			dp->dp_display.connector_type, state);
700 
701 	mutex_unlock(&dp->event_mutex);
702 
703 	return 0;
704 }
705 
706 static void dp_display_deinit_sub_modules(struct dp_display_private *dp)
707 {
708 	dp_audio_put(dp->audio);
709 	dp_panel_put(dp->panel);
710 	dp_aux_put(dp->aux);
711 }
712 
713 static int dp_init_sub_modules(struct dp_display_private *dp)
714 {
715 	int rc = 0;
716 	struct device *dev = &dp->dp_display.pdev->dev;
717 	struct dp_panel_in panel_in = {
718 		.dev = dev,
719 	};
720 	struct phy *phy;
721 
722 	phy = devm_phy_get(dev, "dp");
723 	if (IS_ERR(phy))
724 		return PTR_ERR(phy);
725 
726 	rc = phy_set_mode_ext(phy, PHY_MODE_DP,
727 			      dp->dp_display.is_edp ? PHY_SUBMODE_EDP : PHY_SUBMODE_DP);
728 	if (rc) {
729 		DRM_ERROR("failed to set phy submode, rc = %d\n", rc);
730 		dp->catalog = NULL;
731 		goto error;
732 	}
733 
734 	dp->catalog = dp_catalog_get(dev);
735 	if (IS_ERR(dp->catalog)) {
736 		rc = PTR_ERR(dp->catalog);
737 		DRM_ERROR("failed to initialize catalog, rc = %d\n", rc);
738 		dp->catalog = NULL;
739 		goto error;
740 	}
741 
742 	dp->aux = dp_aux_get(dev, dp->catalog,
743 			     phy,
744 			     dp->dp_display.is_edp);
745 	if (IS_ERR(dp->aux)) {
746 		rc = PTR_ERR(dp->aux);
747 		DRM_ERROR("failed to initialize aux, rc = %d\n", rc);
748 		dp->aux = NULL;
749 		goto error;
750 	}
751 
752 	dp->link = dp_link_get(dev, dp->aux);
753 	if (IS_ERR(dp->link)) {
754 		rc = PTR_ERR(dp->link);
755 		DRM_ERROR("failed to initialize link, rc = %d\n", rc);
756 		dp->link = NULL;
757 		goto error_link;
758 	}
759 
760 	panel_in.aux = dp->aux;
761 	panel_in.catalog = dp->catalog;
762 	panel_in.link = dp->link;
763 
764 	dp->panel = dp_panel_get(&panel_in);
765 	if (IS_ERR(dp->panel)) {
766 		rc = PTR_ERR(dp->panel);
767 		DRM_ERROR("failed to initialize panel, rc = %d\n", rc);
768 		dp->panel = NULL;
769 		goto error_link;
770 	}
771 
772 	dp->ctrl = dp_ctrl_get(dev, dp->link, dp->panel, dp->aux,
773 			       dp->catalog,
774 			       phy);
775 	if (IS_ERR(dp->ctrl)) {
776 		rc = PTR_ERR(dp->ctrl);
777 		DRM_ERROR("failed to initialize ctrl, rc = %d\n", rc);
778 		dp->ctrl = NULL;
779 		goto error_ctrl;
780 	}
781 
782 	dp->audio = dp_audio_get(dp->dp_display.pdev, dp->panel, dp->catalog);
783 	if (IS_ERR(dp->audio)) {
784 		rc = PTR_ERR(dp->audio);
785 		pr_err("failed to initialize audio, rc = %d\n", rc);
786 		dp->audio = NULL;
787 		goto error_ctrl;
788 	}
789 
790 	return rc;
791 
792 error_ctrl:
793 	dp_panel_put(dp->panel);
794 error_link:
795 	dp_aux_put(dp->aux);
796 error:
797 	return rc;
798 }
799 
800 static int dp_display_set_mode(struct msm_dp *dp_display,
801 			       struct dp_display_mode *mode)
802 {
803 	struct dp_display_private *dp;
804 
805 	dp = container_of(dp_display, struct dp_display_private, dp_display);
806 
807 	drm_mode_copy(&dp->panel->dp_mode.drm_mode, &mode->drm_mode);
808 	dp->panel->dp_mode.bpp = mode->bpp;
809 	dp->panel->dp_mode.out_fmt_is_yuv_420 = mode->out_fmt_is_yuv_420;
810 	dp_panel_init_panel_info(dp->panel);
811 	return 0;
812 }
813 
814 static int dp_display_enable(struct dp_display_private *dp, bool force_link_train)
815 {
816 	int rc = 0;
817 	struct msm_dp *dp_display = &dp->dp_display;
818 
819 	drm_dbg_dp(dp->drm_dev, "sink_count=%d\n", dp->link->sink_count);
820 	if (dp_display->power_on) {
821 		drm_dbg_dp(dp->drm_dev, "Link already setup, return\n");
822 		return 0;
823 	}
824 
825 	rc = dp_ctrl_on_stream(dp->ctrl, force_link_train);
826 	if (!rc)
827 		dp_display->power_on = true;
828 
829 	return rc;
830 }
831 
832 static int dp_display_post_enable(struct msm_dp *dp_display)
833 {
834 	struct dp_display_private *dp;
835 	u32 rate;
836 
837 	dp = container_of(dp_display, struct dp_display_private, dp_display);
838 
839 	rate = dp->link->link_params.rate;
840 
841 	if (dp->audio_supported) {
842 		dp->audio->bw_code = drm_dp_link_rate_to_bw_code(rate);
843 		dp->audio->lane_count = dp->link->link_params.num_lanes;
844 	}
845 
846 	/* signal the connect event late to synchronize video and display */
847 	dp_display_handle_plugged_change(dp_display, true);
848 
849 	if (dp_display->psr_supported)
850 		dp_ctrl_config_psr(dp->ctrl);
851 
852 	return 0;
853 }
854 
855 static int dp_display_disable(struct dp_display_private *dp)
856 {
857 	struct msm_dp *dp_display = &dp->dp_display;
858 
859 	if (!dp_display->power_on)
860 		return 0;
861 
862 	/* wait only if audio was enabled */
863 	if (dp_display->audio_enabled) {
864 		/* signal the disconnect event */
865 		dp_display_handle_plugged_change(dp_display, false);
866 		if (!wait_for_completion_timeout(&dp->audio_comp,
867 				HZ * 5))
868 			DRM_ERROR("audio comp timeout\n");
869 	}
870 
871 	dp_display->audio_enabled = false;
872 
873 	if (dp->link->sink_count == 0) {
874 		/*
875 		 * irq_hpd with sink_count = 0
876 		 * hdmi unplugged out of dongle
877 		 */
878 		dp_ctrl_off_link_stream(dp->ctrl);
879 	} else {
880 		/*
881 		 * unplugged interrupt
882 		 * dongle unplugged out of DUT
883 		 */
884 		dp_ctrl_off(dp->ctrl);
885 		dp_display_host_phy_exit(dp);
886 	}
887 
888 	dp_display->power_on = false;
889 
890 	drm_dbg_dp(dp->drm_dev, "sink count: %d\n", dp->link->sink_count);
891 	return 0;
892 }
893 
894 int dp_display_set_plugged_cb(struct msm_dp *dp_display,
895 		hdmi_codec_plugged_cb fn, struct device *codec_dev)
896 {
897 	bool plugged;
898 
899 	dp_display->plugged_cb = fn;
900 	dp_display->codec_dev = codec_dev;
901 	plugged = dp_display->link_ready;
902 	dp_display_handle_plugged_change(dp_display, plugged);
903 
904 	return 0;
905 }
906 
907 /**
908  * dp_bridge_mode_valid - callback to determine if specified mode is valid
909  * @bridge: Pointer to drm bridge structure
910  * @info: display info
911  * @mode: Pointer to drm mode structure
912  * Returns: Validity status for specified mode
913  */
914 enum drm_mode_status dp_bridge_mode_valid(struct drm_bridge *bridge,
915 					  const struct drm_display_info *info,
916 					  const struct drm_display_mode *mode)
917 {
918 	const u32 num_components = 3, default_bpp = 24;
919 	struct dp_display_private *dp_display;
920 	struct dp_link_info *link_info;
921 	u32 mode_rate_khz = 0, supported_rate_khz = 0, mode_bpp = 0;
922 	struct msm_dp *dp;
923 	int mode_pclk_khz = mode->clock;
924 
925 	dp = to_dp_bridge(bridge)->dp_display;
926 
927 	if (!dp || !mode_pclk_khz || !dp->connector) {
928 		DRM_ERROR("invalid params\n");
929 		return -EINVAL;
930 	}
931 
932 	if (mode->clock > DP_MAX_PIXEL_CLK_KHZ)
933 		return MODE_CLOCK_HIGH;
934 
935 	dp_display = container_of(dp, struct dp_display_private, dp_display);
936 	link_info = &dp_display->panel->link_info;
937 
938 	if (drm_mode_is_420_only(&dp->connector->display_info, mode) &&
939 	    dp_display->panel->vsc_sdp_supported)
940 		mode_pclk_khz /= 2;
941 
942 	mode_bpp = dp->connector->display_info.bpc * num_components;
943 	if (!mode_bpp)
944 		mode_bpp = default_bpp;
945 
946 	mode_bpp = dp_panel_get_mode_bpp(dp_display->panel,
947 			mode_bpp, mode_pclk_khz);
948 
949 	mode_rate_khz = mode_pclk_khz * mode_bpp;
950 	supported_rate_khz = link_info->num_lanes * link_info->rate * 8;
951 
952 	if (mode_rate_khz > supported_rate_khz)
953 		return MODE_BAD;
954 
955 	return MODE_OK;
956 }
957 
958 int dp_display_get_modes(struct msm_dp *dp)
959 {
960 	struct dp_display_private *dp_display;
961 
962 	if (!dp) {
963 		DRM_ERROR("invalid params\n");
964 		return 0;
965 	}
966 
967 	dp_display = container_of(dp, struct dp_display_private, dp_display);
968 
969 	return dp_panel_get_modes(dp_display->panel,
970 		dp->connector);
971 }
972 
973 bool dp_display_check_video_test(struct msm_dp *dp)
974 {
975 	struct dp_display_private *dp_display;
976 
977 	dp_display = container_of(dp, struct dp_display_private, dp_display);
978 
979 	return dp_display->panel->video_test;
980 }
981 
982 int dp_display_get_test_bpp(struct msm_dp *dp)
983 {
984 	struct dp_display_private *dp_display;
985 
986 	if (!dp) {
987 		DRM_ERROR("invalid params\n");
988 		return 0;
989 	}
990 
991 	dp_display = container_of(dp, struct dp_display_private, dp_display);
992 
993 	return dp_link_bit_depth_to_bpp(
994 		dp_display->link->test_video.test_bit_depth);
995 }
996 
997 void msm_dp_snapshot(struct msm_disp_state *disp_state, struct msm_dp *dp)
998 {
999 	struct dp_display_private *dp_display;
1000 
1001 	dp_display = container_of(dp, struct dp_display_private, dp_display);
1002 
1003 	/*
1004 	 * if we are reading registers we need the link clocks to be on
1005 	 * however till DP cable is connected this will not happen as we
1006 	 * do not know the resolution to power up with. Hence check the
1007 	 * power_on status before dumping DP registers to avoid crash due
1008 	 * to unclocked access
1009 	 */
1010 	mutex_lock(&dp_display->event_mutex);
1011 
1012 	if (!dp->power_on) {
1013 		mutex_unlock(&dp_display->event_mutex);
1014 		return;
1015 	}
1016 
1017 	dp_catalog_snapshot(dp_display->catalog, disp_state);
1018 
1019 	mutex_unlock(&dp_display->event_mutex);
1020 }
1021 
1022 void dp_display_set_psr(struct msm_dp *dp_display, bool enter)
1023 {
1024 	struct dp_display_private *dp;
1025 
1026 	if (!dp_display) {
1027 		DRM_ERROR("invalid params\n");
1028 		return;
1029 	}
1030 
1031 	dp = container_of(dp_display, struct dp_display_private, dp_display);
1032 	dp_ctrl_set_psr(dp->ctrl, enter);
1033 }
1034 
1035 static int hpd_event_thread(void *data)
1036 {
1037 	struct dp_display_private *dp_priv;
1038 	unsigned long flag;
1039 	struct dp_event *todo;
1040 	int timeout_mode = 0;
1041 
1042 	dp_priv = (struct dp_display_private *)data;
1043 
1044 	while (1) {
1045 		if (timeout_mode) {
1046 			wait_event_timeout(dp_priv->event_q,
1047 				(dp_priv->event_pndx == dp_priv->event_gndx) ||
1048 					kthread_should_stop(), EVENT_TIMEOUT);
1049 		} else {
1050 			wait_event_interruptible(dp_priv->event_q,
1051 				(dp_priv->event_pndx != dp_priv->event_gndx) ||
1052 					kthread_should_stop());
1053 		}
1054 
1055 		if (kthread_should_stop())
1056 			break;
1057 
1058 		spin_lock_irqsave(&dp_priv->event_lock, flag);
1059 		todo = &dp_priv->event_list[dp_priv->event_gndx];
1060 		if (todo->delay) {
1061 			struct dp_event *todo_next;
1062 
1063 			dp_priv->event_gndx++;
1064 			dp_priv->event_gndx %= DP_EVENT_Q_MAX;
1065 
1066 			/* re enter delay event into q */
1067 			todo_next = &dp_priv->event_list[dp_priv->event_pndx++];
1068 			dp_priv->event_pndx %= DP_EVENT_Q_MAX;
1069 			todo_next->event_id = todo->event_id;
1070 			todo_next->data = todo->data;
1071 			todo_next->delay = todo->delay - 1;
1072 
1073 			/* clean up older event */
1074 			todo->event_id = EV_NO_EVENT;
1075 			todo->delay = 0;
1076 
1077 			/* switch to timeout mode */
1078 			timeout_mode = 1;
1079 			spin_unlock_irqrestore(&dp_priv->event_lock, flag);
1080 			continue;
1081 		}
1082 
1083 		/* timeout with no events in q */
1084 		if (dp_priv->event_pndx == dp_priv->event_gndx) {
1085 			spin_unlock_irqrestore(&dp_priv->event_lock, flag);
1086 			continue;
1087 		}
1088 
1089 		dp_priv->event_gndx++;
1090 		dp_priv->event_gndx %= DP_EVENT_Q_MAX;
1091 		timeout_mode = 0;
1092 		spin_unlock_irqrestore(&dp_priv->event_lock, flag);
1093 
1094 		switch (todo->event_id) {
1095 		case EV_HPD_PLUG_INT:
1096 			dp_hpd_plug_handle(dp_priv, todo->data);
1097 			break;
1098 		case EV_HPD_UNPLUG_INT:
1099 			dp_hpd_unplug_handle(dp_priv, todo->data);
1100 			break;
1101 		case EV_IRQ_HPD_INT:
1102 			dp_irq_hpd_handle(dp_priv, todo->data);
1103 			break;
1104 		case EV_USER_NOTIFICATION:
1105 			dp_display_send_hpd_notification(dp_priv,
1106 						todo->data);
1107 			break;
1108 		default:
1109 			break;
1110 		}
1111 	}
1112 
1113 	return 0;
1114 }
1115 
1116 static int dp_hpd_event_thread_start(struct dp_display_private *dp_priv)
1117 {
1118 	/* set event q to empty */
1119 	dp_priv->event_gndx = 0;
1120 	dp_priv->event_pndx = 0;
1121 
1122 	dp_priv->ev_tsk = kthread_run(hpd_event_thread, dp_priv, "dp_hpd_handler");
1123 	if (IS_ERR(dp_priv->ev_tsk))
1124 		return PTR_ERR(dp_priv->ev_tsk);
1125 
1126 	return 0;
1127 }
1128 
1129 static irqreturn_t dp_display_irq_handler(int irq, void *dev_id)
1130 {
1131 	struct dp_display_private *dp = dev_id;
1132 	irqreturn_t ret = IRQ_NONE;
1133 	u32 hpd_isr_status;
1134 
1135 	if (!dp) {
1136 		DRM_ERROR("invalid data\n");
1137 		return IRQ_NONE;
1138 	}
1139 
1140 	hpd_isr_status = dp_catalog_hpd_get_intr_status(dp->catalog);
1141 
1142 	if (hpd_isr_status & 0x0F) {
1143 		drm_dbg_dp(dp->drm_dev, "type=%d isr=0x%x\n",
1144 			dp->dp_display.connector_type, hpd_isr_status);
1145 		/* hpd related interrupts */
1146 		if (hpd_isr_status & DP_DP_HPD_PLUG_INT_MASK)
1147 			dp_add_event(dp, EV_HPD_PLUG_INT, 0, 0);
1148 
1149 		if (hpd_isr_status & DP_DP_IRQ_HPD_INT_MASK) {
1150 			dp_add_event(dp, EV_IRQ_HPD_INT, 0, 0);
1151 		}
1152 
1153 		if (hpd_isr_status & DP_DP_HPD_REPLUG_INT_MASK) {
1154 			dp_add_event(dp, EV_HPD_UNPLUG_INT, 0, 0);
1155 			dp_add_event(dp, EV_HPD_PLUG_INT, 0, 3);
1156 		}
1157 
1158 		if (hpd_isr_status & DP_DP_HPD_UNPLUG_INT_MASK)
1159 			dp_add_event(dp, EV_HPD_UNPLUG_INT, 0, 0);
1160 
1161 		ret = IRQ_HANDLED;
1162 	}
1163 
1164 	/* DP controller isr */
1165 	ret |= dp_ctrl_isr(dp->ctrl);
1166 
1167 	/* DP aux isr */
1168 	ret |= dp_aux_isr(dp->aux);
1169 
1170 	return ret;
1171 }
1172 
1173 static int dp_display_request_irq(struct dp_display_private *dp)
1174 {
1175 	int rc = 0;
1176 	struct platform_device *pdev = dp->dp_display.pdev;
1177 
1178 	dp->irq = platform_get_irq(pdev, 0);
1179 	if (dp->irq < 0) {
1180 		DRM_ERROR("failed to get irq\n");
1181 		return dp->irq;
1182 	}
1183 
1184 	rc = devm_request_irq(&pdev->dev, dp->irq, dp_display_irq_handler,
1185 			      IRQF_TRIGGER_HIGH|IRQF_NO_AUTOEN,
1186 			      "dp_display_isr", dp);
1187 
1188 	if (rc < 0) {
1189 		DRM_ERROR("failed to request IRQ%u: %d\n",
1190 				dp->irq, rc);
1191 		return rc;
1192 	}
1193 
1194 	return 0;
1195 }
1196 
1197 static const struct msm_dp_desc *dp_display_get_desc(struct platform_device *pdev)
1198 {
1199 	const struct msm_dp_desc *descs = of_device_get_match_data(&pdev->dev);
1200 	struct resource *res;
1201 	int i;
1202 
1203 	res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
1204 	if (!res)
1205 		return NULL;
1206 
1207 	for (i = 0; i < descs[i].io_start; i++) {
1208 		if (descs[i].io_start == res->start)
1209 			return &descs[i];
1210 	}
1211 
1212 	dev_err(&pdev->dev, "unknown displayport instance\n");
1213 	return NULL;
1214 }
1215 
1216 static int dp_display_probe_tail(struct device *dev)
1217 {
1218 	struct msm_dp *dp = dev_get_drvdata(dev);
1219 	int ret;
1220 
1221 	/*
1222 	 * External bridges are mandatory for eDP interfaces: one has to
1223 	 * provide at least an eDP panel (which gets wrapped into panel-bridge).
1224 	 *
1225 	 * For DisplayPort interfaces external bridges are optional, so
1226 	 * silently ignore an error if one is not present (-ENODEV).
1227 	 */
1228 	dp->next_bridge = devm_drm_of_get_bridge(&dp->pdev->dev, dp->pdev->dev.of_node, 1, 0);
1229 	if (IS_ERR(dp->next_bridge)) {
1230 		ret = PTR_ERR(dp->next_bridge);
1231 		dp->next_bridge = NULL;
1232 		if (dp->is_edp || ret != -ENODEV)
1233 			return ret;
1234 	}
1235 
1236 	ret = component_add(dev, &dp_display_comp_ops);
1237 	if (ret)
1238 		DRM_ERROR("component add failed, rc=%d\n", ret);
1239 
1240 	return ret;
1241 }
1242 
1243 static int dp_auxbus_done_probe(struct drm_dp_aux *aux)
1244 {
1245 	return dp_display_probe_tail(aux->dev);
1246 }
1247 
1248 static int dp_display_get_connector_type(struct platform_device *pdev,
1249 					 const struct msm_dp_desc *desc)
1250 {
1251 	struct device_node *node = pdev->dev.of_node;
1252 	struct device_node *aux_bus = of_get_child_by_name(node, "aux-bus");
1253 	struct device_node *panel = of_get_child_by_name(aux_bus, "panel");
1254 	int connector_type;
1255 
1256 	if (panel)
1257 		connector_type = DRM_MODE_CONNECTOR_eDP;
1258 	else
1259 		connector_type = DRM_MODE_SUBCONNECTOR_DisplayPort;
1260 
1261 	of_node_put(panel);
1262 	of_node_put(aux_bus);
1263 
1264 	return connector_type;
1265 }
1266 
1267 static int dp_display_probe(struct platform_device *pdev)
1268 {
1269 	int rc = 0;
1270 	struct dp_display_private *dp;
1271 	const struct msm_dp_desc *desc;
1272 
1273 	if (!pdev || !pdev->dev.of_node) {
1274 		DRM_ERROR("pdev not found\n");
1275 		return -ENODEV;
1276 	}
1277 
1278 	dp = devm_kzalloc(&pdev->dev, sizeof(*dp), GFP_KERNEL);
1279 	if (!dp)
1280 		return -ENOMEM;
1281 
1282 	desc = dp_display_get_desc(pdev);
1283 	if (!desc)
1284 		return -EINVAL;
1285 
1286 	dp->dp_display.pdev = pdev;
1287 	dp->id = desc->id;
1288 	dp->dp_display.connector_type = dp_display_get_connector_type(pdev, desc);
1289 	dp->wide_bus_supported = desc->wide_bus_supported;
1290 	dp->dp_display.is_edp =
1291 		(dp->dp_display.connector_type == DRM_MODE_CONNECTOR_eDP);
1292 
1293 	rc = dp_init_sub_modules(dp);
1294 	if (rc) {
1295 		DRM_ERROR("init sub module failed\n");
1296 		return -EPROBE_DEFER;
1297 	}
1298 
1299 	/* setup event q */
1300 	mutex_init(&dp->event_mutex);
1301 	init_waitqueue_head(&dp->event_q);
1302 	spin_lock_init(&dp->event_lock);
1303 
1304 	/* Store DP audio handle inside DP display */
1305 	dp->dp_display.dp_audio = dp->audio;
1306 
1307 	init_completion(&dp->audio_comp);
1308 
1309 	platform_set_drvdata(pdev, &dp->dp_display);
1310 
1311 	rc = devm_pm_runtime_enable(&pdev->dev);
1312 	if (rc)
1313 		goto err;
1314 
1315 	rc = dp_display_request_irq(dp);
1316 	if (rc)
1317 		goto err;
1318 
1319 	if (dp->dp_display.is_edp) {
1320 		rc = devm_of_dp_aux_populate_bus(dp->aux, dp_auxbus_done_probe);
1321 		if (rc) {
1322 			DRM_ERROR("eDP auxbus population failed, rc=%d\n", rc);
1323 			goto err;
1324 		}
1325 	} else {
1326 		rc = dp_display_probe_tail(&pdev->dev);
1327 		if (rc)
1328 			goto err;
1329 	}
1330 
1331 	return rc;
1332 
1333 err:
1334 	dp_display_deinit_sub_modules(dp);
1335 	return rc;
1336 }
1337 
1338 static void dp_display_remove(struct platform_device *pdev)
1339 {
1340 	struct dp_display_private *dp = dev_get_dp_display_private(&pdev->dev);
1341 
1342 	component_del(&pdev->dev, &dp_display_comp_ops);
1343 	dp_display_deinit_sub_modules(dp);
1344 	platform_set_drvdata(pdev, NULL);
1345 }
1346 
1347 static int dp_pm_runtime_suspend(struct device *dev)
1348 {
1349 	struct dp_display_private *dp = dev_get_dp_display_private(dev);
1350 
1351 	disable_irq(dp->irq);
1352 
1353 	if (dp->dp_display.is_edp) {
1354 		dp_display_host_phy_exit(dp);
1355 		dp_catalog_ctrl_hpd_disable(dp->catalog);
1356 	}
1357 	dp_display_host_deinit(dp);
1358 
1359 	return 0;
1360 }
1361 
1362 static int dp_pm_runtime_resume(struct device *dev)
1363 {
1364 	struct dp_display_private *dp = dev_get_dp_display_private(dev);
1365 
1366 	/*
1367 	 * for eDP, host cotroller, HPD block and PHY are enabled here
1368 	 * but with HPD irq disabled
1369 	 *
1370 	 * for DP, only host controller is enabled here.
1371 	 * HPD block is enabled at dp_bridge_hpd_enable()
1372 	 * PHY will be enabled at plugin handler later
1373 	 */
1374 	dp_display_host_init(dp);
1375 	if (dp->dp_display.is_edp) {
1376 		dp_catalog_ctrl_hpd_enable(dp->catalog);
1377 		dp_display_host_phy_init(dp);
1378 	}
1379 
1380 	enable_irq(dp->irq);
1381 	return 0;
1382 }
1383 
1384 static const struct dev_pm_ops dp_pm_ops = {
1385 	SET_RUNTIME_PM_OPS(dp_pm_runtime_suspend, dp_pm_runtime_resume, NULL)
1386 	SET_SYSTEM_SLEEP_PM_OPS(pm_runtime_force_suspend,
1387 				pm_runtime_force_resume)
1388 };
1389 
1390 static struct platform_driver dp_display_driver = {
1391 	.probe  = dp_display_probe,
1392 	.remove_new = dp_display_remove,
1393 	.driver = {
1394 		.name = "msm-dp-display",
1395 		.of_match_table = dp_dt_match,
1396 		.suppress_bind_attrs = true,
1397 		.pm = &dp_pm_ops,
1398 	},
1399 };
1400 
1401 int __init msm_dp_register(void)
1402 {
1403 	int ret;
1404 
1405 	ret = platform_driver_register(&dp_display_driver);
1406 	if (ret)
1407 		DRM_ERROR("Dp display driver register failed");
1408 
1409 	return ret;
1410 }
1411 
1412 void __exit msm_dp_unregister(void)
1413 {
1414 	platform_driver_unregister(&dp_display_driver);
1415 }
1416 
1417 bool msm_dp_is_yuv_420_enabled(const struct msm_dp *dp_display,
1418 			       const struct drm_display_mode *mode)
1419 {
1420 	struct dp_display_private *dp;
1421 	const struct drm_display_info *info;
1422 
1423 	dp = container_of(dp_display, struct dp_display_private, dp_display);
1424 	info = &dp_display->connector->display_info;
1425 
1426 	return dp->panel->vsc_sdp_supported && drm_mode_is_420_only(info, mode);
1427 }
1428 
1429 bool msm_dp_needs_periph_flush(const struct msm_dp *dp_display,
1430 			       const struct drm_display_mode *mode)
1431 {
1432 	return msm_dp_is_yuv_420_enabled(dp_display, mode);
1433 }
1434 
1435 bool msm_dp_wide_bus_available(const struct msm_dp *dp_display)
1436 {
1437 	struct dp_display_private *dp;
1438 
1439 	dp = container_of(dp_display, struct dp_display_private, dp_display);
1440 
1441 	if (dp->dp_mode.out_fmt_is_yuv_420)
1442 		return false;
1443 
1444 	return dp->wide_bus_supported;
1445 }
1446 
1447 void dp_display_debugfs_init(struct msm_dp *dp_display, struct dentry *root, bool is_edp)
1448 {
1449 	struct dp_display_private *dp;
1450 	struct device *dev;
1451 	int rc;
1452 
1453 	dp = container_of(dp_display, struct dp_display_private, dp_display);
1454 	dev = &dp->dp_display.pdev->dev;
1455 
1456 	rc = dp_debug_init(dev, dp->panel, dp->link, dp->dp_display.connector, root, is_edp);
1457 	if (rc)
1458 		DRM_ERROR("failed to initialize debug, rc = %d\n", rc);
1459 }
1460 
1461 int msm_dp_modeset_init(struct msm_dp *dp_display, struct drm_device *dev,
1462 			struct drm_encoder *encoder, bool yuv_supported)
1463 {
1464 	struct dp_display_private *dp_priv;
1465 	int ret;
1466 
1467 	dp_display->drm_dev = dev;
1468 
1469 	dp_priv = container_of(dp_display, struct dp_display_private, dp_display);
1470 
1471 	ret = dp_bridge_init(dp_display, dev, encoder);
1472 	if (ret) {
1473 		DRM_DEV_ERROR(dev->dev,
1474 			"failed to create dp bridge: %d\n", ret);
1475 		return ret;
1476 	}
1477 
1478 	dp_display->connector = dp_drm_connector_init(dp_display, encoder, yuv_supported);
1479 	if (IS_ERR(dp_display->connector)) {
1480 		ret = PTR_ERR(dp_display->connector);
1481 		DRM_DEV_ERROR(dev->dev,
1482 			"failed to create dp connector: %d\n", ret);
1483 		dp_display->connector = NULL;
1484 		return ret;
1485 	}
1486 
1487 	dp_priv->panel->connector = dp_display->connector;
1488 
1489 	return 0;
1490 }
1491 
1492 void dp_bridge_atomic_enable(struct drm_bridge *drm_bridge,
1493 			     struct drm_bridge_state *old_bridge_state)
1494 {
1495 	struct msm_dp_bridge *dp_bridge = to_dp_bridge(drm_bridge);
1496 	struct msm_dp *dp = dp_bridge->dp_display;
1497 	int rc = 0;
1498 	struct dp_display_private *dp_display;
1499 	u32 state;
1500 	bool force_link_train = false;
1501 
1502 	dp_display = container_of(dp, struct dp_display_private, dp_display);
1503 	if (!dp_display->dp_mode.drm_mode.clock) {
1504 		DRM_ERROR("invalid params\n");
1505 		return;
1506 	}
1507 
1508 	if (dp->is_edp)
1509 		dp_hpd_plug_handle(dp_display, 0);
1510 
1511 	mutex_lock(&dp_display->event_mutex);
1512 	if (pm_runtime_resume_and_get(&dp->pdev->dev)) {
1513 		DRM_ERROR("failed to pm_runtime_resume\n");
1514 		mutex_unlock(&dp_display->event_mutex);
1515 		return;
1516 	}
1517 
1518 	state = dp_display->hpd_state;
1519 	if (state != ST_DISPLAY_OFF && state != ST_MAINLINK_READY) {
1520 		mutex_unlock(&dp_display->event_mutex);
1521 		return;
1522 	}
1523 
1524 	rc = dp_display_set_mode(dp, &dp_display->dp_mode);
1525 	if (rc) {
1526 		DRM_ERROR("Failed to perform a mode set, rc=%d\n", rc);
1527 		mutex_unlock(&dp_display->event_mutex);
1528 		return;
1529 	}
1530 
1531 	state =  dp_display->hpd_state;
1532 
1533 	if (state == ST_DISPLAY_OFF) {
1534 		dp_display_host_phy_init(dp_display);
1535 		force_link_train = true;
1536 	}
1537 
1538 	dp_display_enable(dp_display, force_link_train);
1539 
1540 	rc = dp_display_post_enable(dp);
1541 	if (rc) {
1542 		DRM_ERROR("DP display post enable failed, rc=%d\n", rc);
1543 		dp_display_disable(dp_display);
1544 	}
1545 
1546 	/* completed connection */
1547 	dp_display->hpd_state = ST_CONNECTED;
1548 
1549 	drm_dbg_dp(dp->drm_dev, "type=%d Done\n", dp->connector_type);
1550 	mutex_unlock(&dp_display->event_mutex);
1551 }
1552 
1553 void dp_bridge_atomic_disable(struct drm_bridge *drm_bridge,
1554 			      struct drm_bridge_state *old_bridge_state)
1555 {
1556 	struct msm_dp_bridge *dp_bridge = to_dp_bridge(drm_bridge);
1557 	struct msm_dp *dp = dp_bridge->dp_display;
1558 	struct dp_display_private *dp_display;
1559 
1560 	dp_display = container_of(dp, struct dp_display_private, dp_display);
1561 
1562 	dp_ctrl_push_idle(dp_display->ctrl);
1563 }
1564 
1565 void dp_bridge_atomic_post_disable(struct drm_bridge *drm_bridge,
1566 				   struct drm_bridge_state *old_bridge_state)
1567 {
1568 	struct msm_dp_bridge *dp_bridge = to_dp_bridge(drm_bridge);
1569 	struct msm_dp *dp = dp_bridge->dp_display;
1570 	u32 state;
1571 	struct dp_display_private *dp_display;
1572 
1573 	dp_display = container_of(dp, struct dp_display_private, dp_display);
1574 
1575 	if (dp->is_edp)
1576 		dp_hpd_unplug_handle(dp_display, 0);
1577 
1578 	mutex_lock(&dp_display->event_mutex);
1579 
1580 	state = dp_display->hpd_state;
1581 	if (state != ST_DISCONNECT_PENDING && state != ST_CONNECTED)
1582 		drm_dbg_dp(dp->drm_dev, "type=%d wrong hpd_state=%d\n",
1583 			   dp->connector_type, state);
1584 
1585 	dp_display_disable(dp_display);
1586 
1587 	state =  dp_display->hpd_state;
1588 	if (state == ST_DISCONNECT_PENDING) {
1589 		/* completed disconnection */
1590 		dp_display->hpd_state = ST_DISCONNECTED;
1591 	} else {
1592 		dp_display->hpd_state = ST_DISPLAY_OFF;
1593 	}
1594 
1595 	drm_dbg_dp(dp->drm_dev, "type=%d Done\n", dp->connector_type);
1596 
1597 	pm_runtime_put_sync(&dp->pdev->dev);
1598 	mutex_unlock(&dp_display->event_mutex);
1599 }
1600 
1601 void dp_bridge_mode_set(struct drm_bridge *drm_bridge,
1602 			const struct drm_display_mode *mode,
1603 			const struct drm_display_mode *adjusted_mode)
1604 {
1605 	struct msm_dp_bridge *dp_bridge = to_dp_bridge(drm_bridge);
1606 	struct msm_dp *dp = dp_bridge->dp_display;
1607 	struct dp_display_private *dp_display;
1608 	struct dp_panel *dp_panel;
1609 
1610 	dp_display = container_of(dp, struct dp_display_private, dp_display);
1611 	dp_panel = dp_display->panel;
1612 
1613 	memset(&dp_display->dp_mode, 0x0, sizeof(struct dp_display_mode));
1614 
1615 	if (dp_display_check_video_test(dp))
1616 		dp_display->dp_mode.bpp = dp_display_get_test_bpp(dp);
1617 	else /* Default num_components per pixel = 3 */
1618 		dp_display->dp_mode.bpp = dp->connector->display_info.bpc * 3;
1619 
1620 	if (!dp_display->dp_mode.bpp)
1621 		dp_display->dp_mode.bpp = 24; /* Default bpp */
1622 
1623 	drm_mode_copy(&dp_display->dp_mode.drm_mode, adjusted_mode);
1624 
1625 	dp_display->dp_mode.v_active_low =
1626 		!!(dp_display->dp_mode.drm_mode.flags & DRM_MODE_FLAG_NVSYNC);
1627 
1628 	dp_display->dp_mode.h_active_low =
1629 		!!(dp_display->dp_mode.drm_mode.flags & DRM_MODE_FLAG_NHSYNC);
1630 
1631 	dp_display->dp_mode.out_fmt_is_yuv_420 =
1632 		drm_mode_is_420_only(&dp->connector->display_info, adjusted_mode) &&
1633 		dp_panel->vsc_sdp_supported;
1634 
1635 	/* populate wide_bus_support to different layers */
1636 	dp_display->ctrl->wide_bus_en =
1637 		dp_display->dp_mode.out_fmt_is_yuv_420 ? false : dp_display->wide_bus_supported;
1638 	dp_display->catalog->wide_bus_en =
1639 		dp_display->dp_mode.out_fmt_is_yuv_420 ? false : dp_display->wide_bus_supported;
1640 }
1641 
1642 void dp_bridge_hpd_enable(struct drm_bridge *bridge)
1643 {
1644 	struct msm_dp_bridge *dp_bridge = to_dp_bridge(bridge);
1645 	struct msm_dp *dp_display = dp_bridge->dp_display;
1646 	struct dp_display_private *dp = container_of(dp_display, struct dp_display_private, dp_display);
1647 
1648 	/*
1649 	 * this is for external DP with hpd irq enabled case,
1650 	 * step-1: dp_pm_runtime_resume() enable dp host only
1651 	 * step-2: enable hdp block and have hpd irq enabled here
1652 	 * step-3: waiting for plugin irq while phy is not initialized
1653 	 * step-4: DP PHY is initialized at plugin handler before link training
1654 	 *
1655 	 */
1656 	mutex_lock(&dp->event_mutex);
1657 	if (pm_runtime_resume_and_get(&dp_display->pdev->dev)) {
1658 		DRM_ERROR("failed to resume power\n");
1659 		mutex_unlock(&dp->event_mutex);
1660 		return;
1661 	}
1662 
1663 	dp_catalog_ctrl_hpd_enable(dp->catalog);
1664 
1665 	/* enable HDP interrupts */
1666 	dp_catalog_hpd_config_intr(dp->catalog, DP_DP_HPD_INT_MASK, true);
1667 
1668 	dp_display->internal_hpd = true;
1669 	mutex_unlock(&dp->event_mutex);
1670 }
1671 
1672 void dp_bridge_hpd_disable(struct drm_bridge *bridge)
1673 {
1674 	struct msm_dp_bridge *dp_bridge = to_dp_bridge(bridge);
1675 	struct msm_dp *dp_display = dp_bridge->dp_display;
1676 	struct dp_display_private *dp = container_of(dp_display, struct dp_display_private, dp_display);
1677 
1678 	mutex_lock(&dp->event_mutex);
1679 	/* disable HDP interrupts */
1680 	dp_catalog_hpd_config_intr(dp->catalog, DP_DP_HPD_INT_MASK, false);
1681 	dp_catalog_ctrl_hpd_disable(dp->catalog);
1682 
1683 	dp_display->internal_hpd = false;
1684 
1685 	pm_runtime_put_sync(&dp_display->pdev->dev);
1686 	mutex_unlock(&dp->event_mutex);
1687 }
1688 
1689 void dp_bridge_hpd_notify(struct drm_bridge *bridge,
1690 			  enum drm_connector_status status)
1691 {
1692 	struct msm_dp_bridge *dp_bridge = to_dp_bridge(bridge);
1693 	struct msm_dp *dp_display = dp_bridge->dp_display;
1694 	struct dp_display_private *dp = container_of(dp_display, struct dp_display_private, dp_display);
1695 
1696 	/* Without next_bridge interrupts are handled by the DP core directly */
1697 	if (dp_display->internal_hpd)
1698 		return;
1699 
1700 	if (!dp_display->link_ready && status == connector_status_connected)
1701 		dp_add_event(dp, EV_HPD_PLUG_INT, 0, 0);
1702 	else if (dp_display->link_ready && status == connector_status_disconnected)
1703 		dp_add_event(dp, EV_HPD_UNPLUG_INT, 0, 0);
1704 }
1705