xref: /linux/drivers/gpu/drm/msm/dp/dp_display.c (revision be239684b18e1cdcafcf8c7face4a2f562c745ad)
1 // SPDX-License-Identifier: GPL-2.0-only
2 /*
3  * Copyright (c) 2017-2020, The Linux Foundation. All rights reserved.
4  */
5 
6 #include <linux/module.h>
7 #include <linux/slab.h>
8 #include <linux/uaccess.h>
9 #include <linux/debugfs.h>
10 #include <linux/component.h>
11 #include <linux/of_irq.h>
12 #include <linux/phy/phy.h>
13 #include <linux/delay.h>
14 #include <drm/display/drm_dp_aux_bus.h>
15 #include <drm/drm_edid.h>
16 
17 #include "msm_drv.h"
18 #include "msm_kms.h"
19 #include "dp_ctrl.h"
20 #include "dp_catalog.h"
21 #include "dp_aux.h"
22 #include "dp_reg.h"
23 #include "dp_link.h"
24 #include "dp_panel.h"
25 #include "dp_display.h"
26 #include "dp_drm.h"
27 #include "dp_audio.h"
28 #include "dp_debug.h"
29 
30 static bool psr_enabled = false;
31 module_param(psr_enabled, bool, 0);
32 MODULE_PARM_DESC(psr_enabled, "enable PSR for eDP and DP displays");
33 
34 #define HPD_STRING_SIZE 30
35 
36 enum {
37 	ISR_DISCONNECTED,
38 	ISR_CONNECT_PENDING,
39 	ISR_CONNECTED,
40 	ISR_HPD_REPLUG_COUNT,
41 	ISR_IRQ_HPD_PULSE_COUNT,
42 	ISR_HPD_LO_GLITH_COUNT,
43 };
44 
45 /* event thread connection state */
46 enum {
47 	ST_DISCONNECTED,
48 	ST_MAINLINK_READY,
49 	ST_CONNECTED,
50 	ST_DISCONNECT_PENDING,
51 	ST_DISPLAY_OFF,
52 };
53 
54 enum {
55 	EV_NO_EVENT,
56 	/* hpd events */
57 	EV_HPD_PLUG_INT,
58 	EV_IRQ_HPD_INT,
59 	EV_HPD_UNPLUG_INT,
60 	EV_USER_NOTIFICATION,
61 };
62 
63 #define EVENT_TIMEOUT	(HZ/10)	/* 100ms */
64 #define DP_EVENT_Q_MAX	8
65 
66 #define DP_TIMEOUT_NONE		0
67 
68 #define WAIT_FOR_RESUME_TIMEOUT_JIFFIES (HZ / 2)
69 
70 struct dp_event {
71 	u32 event_id;
72 	u32 data;
73 	u32 delay;
74 };
75 
76 struct dp_display_private {
77 	char *name;
78 	int irq;
79 
80 	unsigned int id;
81 
82 	/* state variables */
83 	bool core_initialized;
84 	bool phy_initialized;
85 	bool hpd_irq_on;
86 	bool audio_supported;
87 
88 	struct drm_device *drm_dev;
89 	struct dentry *root;
90 
91 	struct dp_catalog *catalog;
92 	struct drm_dp_aux *aux;
93 	struct dp_link    *link;
94 	struct dp_panel   *panel;
95 	struct dp_ctrl    *ctrl;
96 	struct dp_debug   *debug;
97 
98 	struct dp_display_mode dp_mode;
99 	struct msm_dp dp_display;
100 
101 	/* wait for audio signaling */
102 	struct completion audio_comp;
103 
104 	/* event related only access by event thread */
105 	struct mutex event_mutex;
106 	wait_queue_head_t event_q;
107 	u32 hpd_state;
108 	u32 event_pndx;
109 	u32 event_gndx;
110 	struct task_struct *ev_tsk;
111 	struct dp_event event_list[DP_EVENT_Q_MAX];
112 	spinlock_t event_lock;
113 
114 	bool wide_bus_supported;
115 
116 	struct dp_audio *audio;
117 };
118 
119 struct msm_dp_desc {
120 	phys_addr_t io_start;
121 	unsigned int id;
122 	unsigned int connector_type;
123 	bool wide_bus_supported;
124 };
125 
126 static const struct msm_dp_desc sc7180_dp_descs[] = {
127 	{ .io_start = 0x0ae90000, .id = MSM_DP_CONTROLLER_0, .connector_type = DRM_MODE_CONNECTOR_DisplayPort },
128 	{}
129 };
130 
131 static const struct msm_dp_desc sc7280_dp_descs[] = {
132 	{ .io_start = 0x0ae90000, .id = MSM_DP_CONTROLLER_0, .connector_type = DRM_MODE_CONNECTOR_DisplayPort, .wide_bus_supported = true },
133 	{ .io_start = 0x0aea0000, .id = MSM_DP_CONTROLLER_1, .connector_type = DRM_MODE_CONNECTOR_eDP, .wide_bus_supported = true },
134 	{}
135 };
136 
137 static const struct msm_dp_desc sc8180x_dp_descs[] = {
138 	{ .io_start = 0x0ae90000, .id = MSM_DP_CONTROLLER_0, .connector_type = DRM_MODE_CONNECTOR_DisplayPort },
139 	{ .io_start = 0x0ae98000, .id = MSM_DP_CONTROLLER_1, .connector_type = DRM_MODE_CONNECTOR_DisplayPort },
140 	{ .io_start = 0x0ae9a000, .id = MSM_DP_CONTROLLER_2, .connector_type = DRM_MODE_CONNECTOR_eDP },
141 	{}
142 };
143 
144 static const struct msm_dp_desc sc8280xp_dp_descs[] = {
145 	{ .io_start = 0x0ae90000, .id = MSM_DP_CONTROLLER_0, .connector_type = DRM_MODE_CONNECTOR_DisplayPort, .wide_bus_supported = true },
146 	{ .io_start = 0x0ae98000, .id = MSM_DP_CONTROLLER_1, .connector_type = DRM_MODE_CONNECTOR_DisplayPort, .wide_bus_supported = true },
147 	{ .io_start = 0x0ae9a000, .id = MSM_DP_CONTROLLER_2, .connector_type = DRM_MODE_CONNECTOR_DisplayPort, .wide_bus_supported = true },
148 	{ .io_start = 0x0aea0000, .id = MSM_DP_CONTROLLER_3, .connector_type = DRM_MODE_CONNECTOR_DisplayPort, .wide_bus_supported = true },
149 	{ .io_start = 0x22090000, .id = MSM_DP_CONTROLLER_0, .connector_type = DRM_MODE_CONNECTOR_DisplayPort, .wide_bus_supported = true },
150 	{ .io_start = 0x22098000, .id = MSM_DP_CONTROLLER_1, .connector_type = DRM_MODE_CONNECTOR_DisplayPort, .wide_bus_supported = true },
151 	{ .io_start = 0x2209a000, .id = MSM_DP_CONTROLLER_2, .connector_type = DRM_MODE_CONNECTOR_DisplayPort, .wide_bus_supported = true },
152 	{ .io_start = 0x220a0000, .id = MSM_DP_CONTROLLER_3, .connector_type = DRM_MODE_CONNECTOR_DisplayPort, .wide_bus_supported = true },
153 	{}
154 };
155 
156 static const struct msm_dp_desc sc8280xp_edp_descs[] = {
157 	{ .io_start = 0x0ae9a000, .id = MSM_DP_CONTROLLER_2, .connector_type = DRM_MODE_CONNECTOR_eDP, .wide_bus_supported = true },
158 	{ .io_start = 0x0aea0000, .id = MSM_DP_CONTROLLER_3, .connector_type = DRM_MODE_CONNECTOR_eDP, .wide_bus_supported = true },
159 	{ .io_start = 0x2209a000, .id = MSM_DP_CONTROLLER_2, .connector_type = DRM_MODE_CONNECTOR_eDP, .wide_bus_supported = true },
160 	{ .io_start = 0x220a0000, .id = MSM_DP_CONTROLLER_3, .connector_type = DRM_MODE_CONNECTOR_eDP, .wide_bus_supported = true },
161 	{}
162 };
163 
164 static const struct msm_dp_desc sm8350_dp_descs[] = {
165 	{ .io_start = 0x0ae90000, .id = MSM_DP_CONTROLLER_0, .connector_type = DRM_MODE_CONNECTOR_DisplayPort },
166 	{}
167 };
168 
169 static const struct msm_dp_desc sm8650_dp_descs[] = {
170 	{ .io_start = 0x0af54000, .id = MSM_DP_CONTROLLER_0, .connector_type = DRM_MODE_CONNECTOR_DisplayPort },
171 	{}
172 };
173 
174 static const struct of_device_id dp_dt_match[] = {
175 	{ .compatible = "qcom,sc7180-dp", .data = &sc7180_dp_descs },
176 	{ .compatible = "qcom,sc7280-dp", .data = &sc7280_dp_descs },
177 	{ .compatible = "qcom,sc7280-edp", .data = &sc7280_dp_descs },
178 	{ .compatible = "qcom,sc8180x-dp", .data = &sc8180x_dp_descs },
179 	{ .compatible = "qcom,sc8180x-edp", .data = &sc8180x_dp_descs },
180 	{ .compatible = "qcom,sc8280xp-dp", .data = &sc8280xp_dp_descs },
181 	{ .compatible = "qcom,sc8280xp-edp", .data = &sc8280xp_edp_descs },
182 	{ .compatible = "qcom,sdm845-dp", .data = &sc7180_dp_descs },
183 	{ .compatible = "qcom,sm8350-dp", .data = &sm8350_dp_descs },
184 	{ .compatible = "qcom,sm8650-dp", .data = &sm8650_dp_descs },
185 	{}
186 };
187 
188 static struct dp_display_private *dev_get_dp_display_private(struct device *dev)
189 {
190 	struct msm_dp *dp = dev_get_drvdata(dev);
191 
192 	return container_of(dp, struct dp_display_private, dp_display);
193 }
194 
195 static int dp_add_event(struct dp_display_private *dp_priv, u32 event,
196 						u32 data, u32 delay)
197 {
198 	unsigned long flag;
199 	struct dp_event *todo;
200 	int pndx;
201 
202 	spin_lock_irqsave(&dp_priv->event_lock, flag);
203 	pndx = dp_priv->event_pndx + 1;
204 	pndx %= DP_EVENT_Q_MAX;
205 	if (pndx == dp_priv->event_gndx) {
206 		pr_err("event_q is full: pndx=%d gndx=%d\n",
207 			dp_priv->event_pndx, dp_priv->event_gndx);
208 		spin_unlock_irqrestore(&dp_priv->event_lock, flag);
209 		return -EPERM;
210 	}
211 	todo = &dp_priv->event_list[dp_priv->event_pndx++];
212 	dp_priv->event_pndx %= DP_EVENT_Q_MAX;
213 	todo->event_id = event;
214 	todo->data = data;
215 	todo->delay = delay;
216 	wake_up(&dp_priv->event_q);
217 	spin_unlock_irqrestore(&dp_priv->event_lock, flag);
218 
219 	return 0;
220 }
221 
222 static int dp_del_event(struct dp_display_private *dp_priv, u32 event)
223 {
224 	unsigned long flag;
225 	struct dp_event *todo;
226 	u32	gndx;
227 
228 	spin_lock_irqsave(&dp_priv->event_lock, flag);
229 	if (dp_priv->event_pndx == dp_priv->event_gndx) {
230 		spin_unlock_irqrestore(&dp_priv->event_lock, flag);
231 		return -ENOENT;
232 	}
233 
234 	gndx = dp_priv->event_gndx;
235 	while (dp_priv->event_pndx != gndx) {
236 		todo = &dp_priv->event_list[gndx];
237 		if (todo->event_id == event) {
238 			todo->event_id = EV_NO_EVENT;	/* deleted */
239 			todo->delay = 0;
240 		}
241 		gndx++;
242 		gndx %= DP_EVENT_Q_MAX;
243 	}
244 	spin_unlock_irqrestore(&dp_priv->event_lock, flag);
245 
246 	return 0;
247 }
248 
249 void dp_display_signal_audio_start(struct msm_dp *dp_display)
250 {
251 	struct dp_display_private *dp;
252 
253 	dp = container_of(dp_display, struct dp_display_private, dp_display);
254 
255 	reinit_completion(&dp->audio_comp);
256 }
257 
258 void dp_display_signal_audio_complete(struct msm_dp *dp_display)
259 {
260 	struct dp_display_private *dp;
261 
262 	dp = container_of(dp_display, struct dp_display_private, dp_display);
263 
264 	complete_all(&dp->audio_comp);
265 }
266 
267 static int dp_hpd_event_thread_start(struct dp_display_private *dp_priv);
268 
269 static int dp_display_bind(struct device *dev, struct device *master,
270 			   void *data)
271 {
272 	int rc = 0;
273 	struct dp_display_private *dp = dev_get_dp_display_private(dev);
274 	struct msm_drm_private *priv = dev_get_drvdata(master);
275 	struct drm_device *drm = priv->dev;
276 
277 	dp->dp_display.drm_dev = drm;
278 	priv->dp[dp->id] = &dp->dp_display;
279 
280 
281 
282 	dp->drm_dev = drm;
283 	dp->aux->drm_dev = drm;
284 	rc = dp_aux_register(dp->aux);
285 	if (rc) {
286 		DRM_ERROR("DRM DP AUX register failed\n");
287 		goto end;
288 	}
289 
290 
291 	rc = dp_register_audio_driver(dev, dp->audio);
292 	if (rc) {
293 		DRM_ERROR("Audio registration Dp failed\n");
294 		goto end;
295 	}
296 
297 	rc = dp_hpd_event_thread_start(dp);
298 	if (rc) {
299 		DRM_ERROR("Event thread create failed\n");
300 		goto end;
301 	}
302 
303 	return 0;
304 end:
305 	return rc;
306 }
307 
308 static void dp_display_unbind(struct device *dev, struct device *master,
309 			      void *data)
310 {
311 	struct dp_display_private *dp = dev_get_dp_display_private(dev);
312 	struct msm_drm_private *priv = dev_get_drvdata(master);
313 
314 	kthread_stop(dp->ev_tsk);
315 
316 	of_dp_aux_depopulate_bus(dp->aux);
317 
318 	dp_unregister_audio_driver(dev, dp->audio);
319 	dp_aux_unregister(dp->aux);
320 	dp->drm_dev = NULL;
321 	dp->aux->drm_dev = NULL;
322 	priv->dp[dp->id] = NULL;
323 }
324 
325 static const struct component_ops dp_display_comp_ops = {
326 	.bind = dp_display_bind,
327 	.unbind = dp_display_unbind,
328 };
329 
330 static int dp_display_send_hpd_notification(struct dp_display_private *dp,
331 					    bool hpd)
332 {
333 	struct drm_bridge *bridge = dp->dp_display.bridge;
334 
335 	/* reset video pattern flag on disconnect */
336 	if (!hpd) {
337 		dp->panel->video_test = false;
338 		if (!dp->dp_display.is_edp)
339 			drm_dp_set_subconnector_property(dp->dp_display.connector,
340 							 connector_status_disconnected,
341 							 dp->panel->dpcd,
342 							 dp->panel->downstream_ports);
343 	}
344 
345 	dp->dp_display.link_ready = hpd;
346 
347 	drm_dbg_dp(dp->drm_dev, "type=%d hpd=%d\n",
348 			dp->dp_display.connector_type, hpd);
349 	drm_bridge_hpd_notify(bridge, dp->dp_display.link_ready);
350 
351 	return 0;
352 }
353 
354 static int dp_display_process_hpd_high(struct dp_display_private *dp)
355 {
356 	int rc = 0;
357 	struct edid *edid;
358 
359 	rc = dp_panel_read_sink_caps(dp->panel, dp->dp_display.connector);
360 	if (rc)
361 		goto end;
362 
363 	dp_link_process_request(dp->link);
364 
365 	if (!dp->dp_display.is_edp)
366 		drm_dp_set_subconnector_property(dp->dp_display.connector,
367 						 connector_status_connected,
368 						 dp->panel->dpcd,
369 						 dp->panel->downstream_ports);
370 
371 	edid = dp->panel->edid;
372 
373 	dp->dp_display.psr_supported = dp->panel->psr_cap.version && psr_enabled;
374 
375 	dp->audio_supported = drm_detect_monitor_audio(edid);
376 	dp_panel_handle_sink_request(dp->panel);
377 
378 	/*
379 	 * set sink to normal operation mode -- D0
380 	 * before dpcd read
381 	 */
382 	dp_link_psm_config(dp->link, &dp->panel->link_info, false);
383 
384 	dp_link_reset_phy_params_vx_px(dp->link);
385 	rc = dp_ctrl_on_link(dp->ctrl);
386 	if (rc) {
387 		DRM_ERROR("failed to complete DP link training\n");
388 		goto end;
389 	}
390 
391 	dp_add_event(dp, EV_USER_NOTIFICATION, true, 0);
392 
393 end:
394 	return rc;
395 }
396 
397 static void dp_display_host_phy_init(struct dp_display_private *dp)
398 {
399 	drm_dbg_dp(dp->drm_dev, "type=%d core_init=%d phy_init=%d\n",
400 		dp->dp_display.connector_type, dp->core_initialized,
401 		dp->phy_initialized);
402 
403 	if (!dp->phy_initialized) {
404 		dp_ctrl_phy_init(dp->ctrl);
405 		dp->phy_initialized = true;
406 	}
407 }
408 
409 static void dp_display_host_phy_exit(struct dp_display_private *dp)
410 {
411 	drm_dbg_dp(dp->drm_dev, "type=%d core_init=%d phy_init=%d\n",
412 		dp->dp_display.connector_type, dp->core_initialized,
413 		dp->phy_initialized);
414 
415 	if (dp->phy_initialized) {
416 		dp_ctrl_phy_exit(dp->ctrl);
417 		dp->phy_initialized = false;
418 	}
419 }
420 
421 static void dp_display_host_init(struct dp_display_private *dp)
422 {
423 	drm_dbg_dp(dp->drm_dev, "type=%d core_init=%d phy_init=%d\n",
424 		dp->dp_display.connector_type, dp->core_initialized,
425 		dp->phy_initialized);
426 
427 	dp_ctrl_core_clk_enable(dp->ctrl);
428 	dp_ctrl_reset_irq_ctrl(dp->ctrl, true);
429 	dp_aux_init(dp->aux);
430 	dp->core_initialized = true;
431 }
432 
433 static void dp_display_host_deinit(struct dp_display_private *dp)
434 {
435 	drm_dbg_dp(dp->drm_dev, "type=%d core_init=%d phy_init=%d\n",
436 		dp->dp_display.connector_type, dp->core_initialized,
437 		dp->phy_initialized);
438 
439 	dp_ctrl_reset_irq_ctrl(dp->ctrl, false);
440 	dp_aux_deinit(dp->aux);
441 	dp_ctrl_core_clk_disable(dp->ctrl);
442 	dp->core_initialized = false;
443 }
444 
445 static int dp_display_usbpd_configure_cb(struct device *dev)
446 {
447 	struct dp_display_private *dp = dev_get_dp_display_private(dev);
448 
449 	dp_display_host_phy_init(dp);
450 
451 	return dp_display_process_hpd_high(dp);
452 }
453 
454 static int dp_display_notify_disconnect(struct device *dev)
455 {
456 	struct dp_display_private *dp = dev_get_dp_display_private(dev);
457 
458 	dp_add_event(dp, EV_USER_NOTIFICATION, false, 0);
459 
460 	return 0;
461 }
462 
463 static void dp_display_handle_video_request(struct dp_display_private *dp)
464 {
465 	if (dp->link->sink_request & DP_TEST_LINK_VIDEO_PATTERN) {
466 		dp->panel->video_test = true;
467 		dp_link_send_test_response(dp->link);
468 	}
469 }
470 
471 static int dp_display_handle_port_ststus_changed(struct dp_display_private *dp)
472 {
473 	int rc = 0;
474 
475 	if (drm_dp_is_branch(dp->panel->dpcd) && dp->link->sink_count == 0) {
476 		drm_dbg_dp(dp->drm_dev, "sink count is zero, nothing to do\n");
477 		if (dp->hpd_state != ST_DISCONNECTED) {
478 			dp->hpd_state = ST_DISCONNECT_PENDING;
479 			dp_add_event(dp, EV_USER_NOTIFICATION, false, 0);
480 		}
481 	} else {
482 		if (dp->hpd_state == ST_DISCONNECTED) {
483 			dp->hpd_state = ST_MAINLINK_READY;
484 			rc = dp_display_process_hpd_high(dp);
485 			if (rc)
486 				dp->hpd_state = ST_DISCONNECTED;
487 		}
488 	}
489 
490 	return rc;
491 }
492 
493 static int dp_display_handle_irq_hpd(struct dp_display_private *dp)
494 {
495 	u32 sink_request = dp->link->sink_request;
496 
497 	drm_dbg_dp(dp->drm_dev, "%d\n", sink_request);
498 	if (dp->hpd_state == ST_DISCONNECTED) {
499 		if (sink_request & DP_LINK_STATUS_UPDATED) {
500 			drm_dbg_dp(dp->drm_dev, "Disconnected sink_request: %d\n",
501 							sink_request);
502 			DRM_ERROR("Disconnected, no DP_LINK_STATUS_UPDATED\n");
503 			return -EINVAL;
504 		}
505 	}
506 
507 	dp_ctrl_handle_sink_request(dp->ctrl);
508 
509 	if (sink_request & DP_TEST_LINK_VIDEO_PATTERN)
510 		dp_display_handle_video_request(dp);
511 
512 	return 0;
513 }
514 
515 static int dp_display_usbpd_attention_cb(struct device *dev)
516 {
517 	int rc = 0;
518 	u32 sink_request;
519 	struct dp_display_private *dp = dev_get_dp_display_private(dev);
520 
521 	/* check for any test request issued by sink */
522 	rc = dp_link_process_request(dp->link);
523 	if (!rc) {
524 		sink_request = dp->link->sink_request;
525 		drm_dbg_dp(dp->drm_dev, "hpd_state=%d sink_request=%d\n",
526 					dp->hpd_state, sink_request);
527 		if (sink_request & DS_PORT_STATUS_CHANGED)
528 			rc = dp_display_handle_port_ststus_changed(dp);
529 		else
530 			rc = dp_display_handle_irq_hpd(dp);
531 	}
532 
533 	return rc;
534 }
535 
536 static int dp_hpd_plug_handle(struct dp_display_private *dp, u32 data)
537 {
538 	u32 state;
539 	int ret;
540 	struct platform_device *pdev = dp->dp_display.pdev;
541 
542 	mutex_lock(&dp->event_mutex);
543 
544 	state =  dp->hpd_state;
545 	drm_dbg_dp(dp->drm_dev, "Before, type=%d hpd_state=%d\n",
546 			dp->dp_display.connector_type, state);
547 
548 	if (state == ST_DISPLAY_OFF) {
549 		mutex_unlock(&dp->event_mutex);
550 		return 0;
551 	}
552 
553 	if (state == ST_MAINLINK_READY || state == ST_CONNECTED) {
554 		mutex_unlock(&dp->event_mutex);
555 		return 0;
556 	}
557 
558 	if (state == ST_DISCONNECT_PENDING) {
559 		/* wait until ST_DISCONNECTED */
560 		dp_add_event(dp, EV_HPD_PLUG_INT, 0, 1); /* delay = 1 */
561 		mutex_unlock(&dp->event_mutex);
562 		return 0;
563 	}
564 
565 	ret = pm_runtime_resume_and_get(&pdev->dev);
566 	if (ret) {
567 		DRM_ERROR("failed to pm_runtime_resume\n");
568 		mutex_unlock(&dp->event_mutex);
569 		return ret;
570 	}
571 
572 	ret = dp_display_usbpd_configure_cb(&pdev->dev);
573 	if (ret) {	/* link train failed */
574 		dp->hpd_state = ST_DISCONNECTED;
575 	} else {
576 		dp->hpd_state = ST_MAINLINK_READY;
577 	}
578 
579 	drm_dbg_dp(dp->drm_dev, "After, type=%d hpd_state=%d\n",
580 			dp->dp_display.connector_type, state);
581 	mutex_unlock(&dp->event_mutex);
582 
583 	/* uevent will complete connection part */
584 	return 0;
585 };
586 
587 static void dp_display_handle_plugged_change(struct msm_dp *dp_display,
588 		bool plugged)
589 {
590 	struct dp_display_private *dp;
591 
592 	dp = container_of(dp_display,
593 			struct dp_display_private, dp_display);
594 
595 	/* notify audio subsystem only if sink supports audio */
596 	if (dp_display->plugged_cb && dp_display->codec_dev &&
597 			dp->audio_supported)
598 		dp_display->plugged_cb(dp_display->codec_dev, plugged);
599 }
600 
601 static int dp_hpd_unplug_handle(struct dp_display_private *dp, u32 data)
602 {
603 	u32 state;
604 	struct platform_device *pdev = dp->dp_display.pdev;
605 
606 	mutex_lock(&dp->event_mutex);
607 
608 	state = dp->hpd_state;
609 
610 	drm_dbg_dp(dp->drm_dev, "Before, type=%d hpd_state=%d\n",
611 			dp->dp_display.connector_type, state);
612 
613 	/* unplugged, no more irq_hpd handle */
614 	dp_del_event(dp, EV_IRQ_HPD_INT);
615 
616 	if (state == ST_DISCONNECTED) {
617 		/* triggered by irq_hdp with sink_count = 0 */
618 		if (dp->link->sink_count == 0) {
619 			dp_display_host_phy_exit(dp);
620 		}
621 		dp_display_notify_disconnect(&dp->dp_display.pdev->dev);
622 		mutex_unlock(&dp->event_mutex);
623 		return 0;
624 	} else if (state == ST_DISCONNECT_PENDING) {
625 		mutex_unlock(&dp->event_mutex);
626 		return 0;
627 	} else if (state == ST_MAINLINK_READY) {
628 		dp_ctrl_off_link(dp->ctrl);
629 		dp_display_host_phy_exit(dp);
630 		dp->hpd_state = ST_DISCONNECTED;
631 		dp_display_notify_disconnect(&dp->dp_display.pdev->dev);
632 		mutex_unlock(&dp->event_mutex);
633 		return 0;
634 	}
635 
636 	/*
637 	 * We don't need separate work for disconnect as
638 	 * connect/attention interrupts are disabled
639 	 */
640 	dp_display_notify_disconnect(&dp->dp_display.pdev->dev);
641 
642 	if (state == ST_DISPLAY_OFF) {
643 		dp->hpd_state = ST_DISCONNECTED;
644 	} else {
645 		dp->hpd_state = ST_DISCONNECT_PENDING;
646 	}
647 
648 	/* signal the disconnect event early to ensure proper teardown */
649 	dp_display_handle_plugged_change(&dp->dp_display, false);
650 
651 	drm_dbg_dp(dp->drm_dev, "After, type=%d hpd_state=%d\n",
652 			dp->dp_display.connector_type, state);
653 
654 	/* uevent will complete disconnection part */
655 	pm_runtime_put_sync(&pdev->dev);
656 	mutex_unlock(&dp->event_mutex);
657 	return 0;
658 }
659 
660 static int dp_irq_hpd_handle(struct dp_display_private *dp, u32 data)
661 {
662 	u32 state;
663 
664 	mutex_lock(&dp->event_mutex);
665 
666 	/* irq_hpd can happen at either connected or disconnected state */
667 	state =  dp->hpd_state;
668 	drm_dbg_dp(dp->drm_dev, "Before, type=%d hpd_state=%d\n",
669 			dp->dp_display.connector_type, state);
670 
671 	if (state == ST_DISPLAY_OFF) {
672 		mutex_unlock(&dp->event_mutex);
673 		return 0;
674 	}
675 
676 	if (state == ST_MAINLINK_READY || state == ST_DISCONNECT_PENDING) {
677 		/* wait until ST_CONNECTED */
678 		dp_add_event(dp, EV_IRQ_HPD_INT, 0, 1); /* delay = 1 */
679 		mutex_unlock(&dp->event_mutex);
680 		return 0;
681 	}
682 
683 	dp_display_usbpd_attention_cb(&dp->dp_display.pdev->dev);
684 
685 	drm_dbg_dp(dp->drm_dev, "After, type=%d hpd_state=%d\n",
686 			dp->dp_display.connector_type, state);
687 
688 	mutex_unlock(&dp->event_mutex);
689 
690 	return 0;
691 }
692 
693 static void dp_display_deinit_sub_modules(struct dp_display_private *dp)
694 {
695 	dp_audio_put(dp->audio);
696 	dp_panel_put(dp->panel);
697 	dp_aux_put(dp->aux);
698 }
699 
700 static int dp_init_sub_modules(struct dp_display_private *dp)
701 {
702 	int rc = 0;
703 	struct device *dev = &dp->dp_display.pdev->dev;
704 	struct dp_panel_in panel_in = {
705 		.dev = dev,
706 	};
707 	struct phy *phy;
708 
709 	phy = devm_phy_get(dev, "dp");
710 	if (IS_ERR(phy))
711 		return PTR_ERR(phy);
712 
713 	dp->catalog = dp_catalog_get(dev);
714 	if (IS_ERR(dp->catalog)) {
715 		rc = PTR_ERR(dp->catalog);
716 		DRM_ERROR("failed to initialize catalog, rc = %d\n", rc);
717 		dp->catalog = NULL;
718 		goto error;
719 	}
720 
721 	dp->aux = dp_aux_get(dev, dp->catalog,
722 			     phy,
723 			     dp->dp_display.is_edp);
724 	if (IS_ERR(dp->aux)) {
725 		rc = PTR_ERR(dp->aux);
726 		DRM_ERROR("failed to initialize aux, rc = %d\n", rc);
727 		dp->aux = NULL;
728 		goto error;
729 	}
730 
731 	dp->link = dp_link_get(dev, dp->aux);
732 	if (IS_ERR(dp->link)) {
733 		rc = PTR_ERR(dp->link);
734 		DRM_ERROR("failed to initialize link, rc = %d\n", rc);
735 		dp->link = NULL;
736 		goto error_link;
737 	}
738 
739 	panel_in.aux = dp->aux;
740 	panel_in.catalog = dp->catalog;
741 	panel_in.link = dp->link;
742 
743 	dp->panel = dp_panel_get(&panel_in);
744 	if (IS_ERR(dp->panel)) {
745 		rc = PTR_ERR(dp->panel);
746 		DRM_ERROR("failed to initialize panel, rc = %d\n", rc);
747 		dp->panel = NULL;
748 		goto error_link;
749 	}
750 
751 	dp->ctrl = dp_ctrl_get(dev, dp->link, dp->panel, dp->aux,
752 			       dp->catalog,
753 			       phy);
754 	if (IS_ERR(dp->ctrl)) {
755 		rc = PTR_ERR(dp->ctrl);
756 		DRM_ERROR("failed to initialize ctrl, rc = %d\n", rc);
757 		dp->ctrl = NULL;
758 		goto error_ctrl;
759 	}
760 
761 	dp->audio = dp_audio_get(dp->dp_display.pdev, dp->panel, dp->catalog);
762 	if (IS_ERR(dp->audio)) {
763 		rc = PTR_ERR(dp->audio);
764 		pr_err("failed to initialize audio, rc = %d\n", rc);
765 		dp->audio = NULL;
766 		goto error_ctrl;
767 	}
768 
769 	return rc;
770 
771 error_ctrl:
772 	dp_panel_put(dp->panel);
773 error_link:
774 	dp_aux_put(dp->aux);
775 error:
776 	return rc;
777 }
778 
779 static int dp_display_set_mode(struct msm_dp *dp_display,
780 			       struct dp_display_mode *mode)
781 {
782 	struct dp_display_private *dp;
783 
784 	dp = container_of(dp_display, struct dp_display_private, dp_display);
785 
786 	drm_mode_copy(&dp->panel->dp_mode.drm_mode, &mode->drm_mode);
787 	dp->panel->dp_mode.bpp = mode->bpp;
788 	dp->panel->dp_mode.capabilities = mode->capabilities;
789 	dp->panel->dp_mode.out_fmt_is_yuv_420 = mode->out_fmt_is_yuv_420;
790 	dp_panel_init_panel_info(dp->panel);
791 	return 0;
792 }
793 
794 static int dp_display_enable(struct dp_display_private *dp, bool force_link_train)
795 {
796 	int rc = 0;
797 	struct msm_dp *dp_display = &dp->dp_display;
798 
799 	drm_dbg_dp(dp->drm_dev, "sink_count=%d\n", dp->link->sink_count);
800 	if (dp_display->power_on) {
801 		drm_dbg_dp(dp->drm_dev, "Link already setup, return\n");
802 		return 0;
803 	}
804 
805 	rc = dp_ctrl_on_stream(dp->ctrl, force_link_train);
806 	if (!rc)
807 		dp_display->power_on = true;
808 
809 	return rc;
810 }
811 
812 static int dp_display_post_enable(struct msm_dp *dp_display)
813 {
814 	struct dp_display_private *dp;
815 	u32 rate;
816 
817 	dp = container_of(dp_display, struct dp_display_private, dp_display);
818 
819 	rate = dp->link->link_params.rate;
820 
821 	if (dp->audio_supported) {
822 		dp->audio->bw_code = drm_dp_link_rate_to_bw_code(rate);
823 		dp->audio->lane_count = dp->link->link_params.num_lanes;
824 	}
825 
826 	/* signal the connect event late to synchronize video and display */
827 	dp_display_handle_plugged_change(dp_display, true);
828 
829 	if (dp_display->psr_supported)
830 		dp_ctrl_config_psr(dp->ctrl);
831 
832 	return 0;
833 }
834 
835 static int dp_display_disable(struct dp_display_private *dp)
836 {
837 	struct msm_dp *dp_display = &dp->dp_display;
838 
839 	if (!dp_display->power_on)
840 		return 0;
841 
842 	/* wait only if audio was enabled */
843 	if (dp_display->audio_enabled) {
844 		/* signal the disconnect event */
845 		dp_display_handle_plugged_change(dp_display, false);
846 		if (!wait_for_completion_timeout(&dp->audio_comp,
847 				HZ * 5))
848 			DRM_ERROR("audio comp timeout\n");
849 	}
850 
851 	dp_display->audio_enabled = false;
852 
853 	if (dp->link->sink_count == 0) {
854 		/*
855 		 * irq_hpd with sink_count = 0
856 		 * hdmi unplugged out of dongle
857 		 */
858 		dp_ctrl_off_link_stream(dp->ctrl);
859 	} else {
860 		/*
861 		 * unplugged interrupt
862 		 * dongle unplugged out of DUT
863 		 */
864 		dp_ctrl_off(dp->ctrl);
865 		dp_display_host_phy_exit(dp);
866 	}
867 
868 	dp_display->power_on = false;
869 
870 	drm_dbg_dp(dp->drm_dev, "sink count: %d\n", dp->link->sink_count);
871 	return 0;
872 }
873 
874 int dp_display_set_plugged_cb(struct msm_dp *dp_display,
875 		hdmi_codec_plugged_cb fn, struct device *codec_dev)
876 {
877 	bool plugged;
878 
879 	dp_display->plugged_cb = fn;
880 	dp_display->codec_dev = codec_dev;
881 	plugged = dp_display->link_ready;
882 	dp_display_handle_plugged_change(dp_display, plugged);
883 
884 	return 0;
885 }
886 
887 /**
888  * dp_bridge_mode_valid - callback to determine if specified mode is valid
889  * @bridge: Pointer to drm bridge structure
890  * @info: display info
891  * @mode: Pointer to drm mode structure
892  * Returns: Validity status for specified mode
893  */
894 enum drm_mode_status dp_bridge_mode_valid(struct drm_bridge *bridge,
895 					  const struct drm_display_info *info,
896 					  const struct drm_display_mode *mode)
897 {
898 	const u32 num_components = 3, default_bpp = 24;
899 	struct dp_display_private *dp_display;
900 	struct dp_link_info *link_info;
901 	u32 mode_rate_khz = 0, supported_rate_khz = 0, mode_bpp = 0;
902 	struct msm_dp *dp;
903 	int mode_pclk_khz = mode->clock;
904 
905 	dp = to_dp_bridge(bridge)->dp_display;
906 
907 	if (!dp || !mode_pclk_khz || !dp->connector) {
908 		DRM_ERROR("invalid params\n");
909 		return -EINVAL;
910 	}
911 
912 	if (mode->clock > DP_MAX_PIXEL_CLK_KHZ)
913 		return MODE_CLOCK_HIGH;
914 
915 	dp_display = container_of(dp, struct dp_display_private, dp_display);
916 	link_info = &dp_display->panel->link_info;
917 
918 	if (drm_mode_is_420_only(&dp->connector->display_info, mode) &&
919 	    dp_display->panel->vsc_sdp_supported)
920 		mode_pclk_khz /= 2;
921 
922 	mode_bpp = dp->connector->display_info.bpc * num_components;
923 	if (!mode_bpp)
924 		mode_bpp = default_bpp;
925 
926 	mode_bpp = dp_panel_get_mode_bpp(dp_display->panel,
927 			mode_bpp, mode_pclk_khz);
928 
929 	mode_rate_khz = mode_pclk_khz * mode_bpp;
930 	supported_rate_khz = link_info->num_lanes * link_info->rate * 8;
931 
932 	if (mode_rate_khz > supported_rate_khz)
933 		return MODE_BAD;
934 
935 	return MODE_OK;
936 }
937 
938 int dp_display_get_modes(struct msm_dp *dp)
939 {
940 	struct dp_display_private *dp_display;
941 
942 	if (!dp) {
943 		DRM_ERROR("invalid params\n");
944 		return 0;
945 	}
946 
947 	dp_display = container_of(dp, struct dp_display_private, dp_display);
948 
949 	return dp_panel_get_modes(dp_display->panel,
950 		dp->connector);
951 }
952 
953 bool dp_display_check_video_test(struct msm_dp *dp)
954 {
955 	struct dp_display_private *dp_display;
956 
957 	dp_display = container_of(dp, struct dp_display_private, dp_display);
958 
959 	return dp_display->panel->video_test;
960 }
961 
962 int dp_display_get_test_bpp(struct msm_dp *dp)
963 {
964 	struct dp_display_private *dp_display;
965 
966 	if (!dp) {
967 		DRM_ERROR("invalid params\n");
968 		return 0;
969 	}
970 
971 	dp_display = container_of(dp, struct dp_display_private, dp_display);
972 
973 	return dp_link_bit_depth_to_bpp(
974 		dp_display->link->test_video.test_bit_depth);
975 }
976 
977 void msm_dp_snapshot(struct msm_disp_state *disp_state, struct msm_dp *dp)
978 {
979 	struct dp_display_private *dp_display;
980 
981 	dp_display = container_of(dp, struct dp_display_private, dp_display);
982 
983 	/*
984 	 * if we are reading registers we need the link clocks to be on
985 	 * however till DP cable is connected this will not happen as we
986 	 * do not know the resolution to power up with. Hence check the
987 	 * power_on status before dumping DP registers to avoid crash due
988 	 * to unclocked access
989 	 */
990 	mutex_lock(&dp_display->event_mutex);
991 
992 	if (!dp->power_on) {
993 		mutex_unlock(&dp_display->event_mutex);
994 		return;
995 	}
996 
997 	dp_catalog_snapshot(dp_display->catalog, disp_state);
998 
999 	mutex_unlock(&dp_display->event_mutex);
1000 }
1001 
1002 void dp_display_set_psr(struct msm_dp *dp_display, bool enter)
1003 {
1004 	struct dp_display_private *dp;
1005 
1006 	if (!dp_display) {
1007 		DRM_ERROR("invalid params\n");
1008 		return;
1009 	}
1010 
1011 	dp = container_of(dp_display, struct dp_display_private, dp_display);
1012 	dp_ctrl_set_psr(dp->ctrl, enter);
1013 }
1014 
1015 static int hpd_event_thread(void *data)
1016 {
1017 	struct dp_display_private *dp_priv;
1018 	unsigned long flag;
1019 	struct dp_event *todo;
1020 	int timeout_mode = 0;
1021 
1022 	dp_priv = (struct dp_display_private *)data;
1023 
1024 	while (1) {
1025 		if (timeout_mode) {
1026 			wait_event_timeout(dp_priv->event_q,
1027 				(dp_priv->event_pndx == dp_priv->event_gndx) ||
1028 					kthread_should_stop(), EVENT_TIMEOUT);
1029 		} else {
1030 			wait_event_interruptible(dp_priv->event_q,
1031 				(dp_priv->event_pndx != dp_priv->event_gndx) ||
1032 					kthread_should_stop());
1033 		}
1034 
1035 		if (kthread_should_stop())
1036 			break;
1037 
1038 		spin_lock_irqsave(&dp_priv->event_lock, flag);
1039 		todo = &dp_priv->event_list[dp_priv->event_gndx];
1040 		if (todo->delay) {
1041 			struct dp_event *todo_next;
1042 
1043 			dp_priv->event_gndx++;
1044 			dp_priv->event_gndx %= DP_EVENT_Q_MAX;
1045 
1046 			/* re enter delay event into q */
1047 			todo_next = &dp_priv->event_list[dp_priv->event_pndx++];
1048 			dp_priv->event_pndx %= DP_EVENT_Q_MAX;
1049 			todo_next->event_id = todo->event_id;
1050 			todo_next->data = todo->data;
1051 			todo_next->delay = todo->delay - 1;
1052 
1053 			/* clean up older event */
1054 			todo->event_id = EV_NO_EVENT;
1055 			todo->delay = 0;
1056 
1057 			/* switch to timeout mode */
1058 			timeout_mode = 1;
1059 			spin_unlock_irqrestore(&dp_priv->event_lock, flag);
1060 			continue;
1061 		}
1062 
1063 		/* timeout with no events in q */
1064 		if (dp_priv->event_pndx == dp_priv->event_gndx) {
1065 			spin_unlock_irqrestore(&dp_priv->event_lock, flag);
1066 			continue;
1067 		}
1068 
1069 		dp_priv->event_gndx++;
1070 		dp_priv->event_gndx %= DP_EVENT_Q_MAX;
1071 		timeout_mode = 0;
1072 		spin_unlock_irqrestore(&dp_priv->event_lock, flag);
1073 
1074 		switch (todo->event_id) {
1075 		case EV_HPD_PLUG_INT:
1076 			dp_hpd_plug_handle(dp_priv, todo->data);
1077 			break;
1078 		case EV_HPD_UNPLUG_INT:
1079 			dp_hpd_unplug_handle(dp_priv, todo->data);
1080 			break;
1081 		case EV_IRQ_HPD_INT:
1082 			dp_irq_hpd_handle(dp_priv, todo->data);
1083 			break;
1084 		case EV_USER_NOTIFICATION:
1085 			dp_display_send_hpd_notification(dp_priv,
1086 						todo->data);
1087 			break;
1088 		default:
1089 			break;
1090 		}
1091 	}
1092 
1093 	return 0;
1094 }
1095 
1096 static int dp_hpd_event_thread_start(struct dp_display_private *dp_priv)
1097 {
1098 	/* set event q to empty */
1099 	dp_priv->event_gndx = 0;
1100 	dp_priv->event_pndx = 0;
1101 
1102 	dp_priv->ev_tsk = kthread_run(hpd_event_thread, dp_priv, "dp_hpd_handler");
1103 	if (IS_ERR(dp_priv->ev_tsk))
1104 		return PTR_ERR(dp_priv->ev_tsk);
1105 
1106 	return 0;
1107 }
1108 
1109 static irqreturn_t dp_display_irq_handler(int irq, void *dev_id)
1110 {
1111 	struct dp_display_private *dp = dev_id;
1112 	irqreturn_t ret = IRQ_NONE;
1113 	u32 hpd_isr_status;
1114 
1115 	if (!dp) {
1116 		DRM_ERROR("invalid data\n");
1117 		return IRQ_NONE;
1118 	}
1119 
1120 	hpd_isr_status = dp_catalog_hpd_get_intr_status(dp->catalog);
1121 
1122 	if (hpd_isr_status & 0x0F) {
1123 		drm_dbg_dp(dp->drm_dev, "type=%d isr=0x%x\n",
1124 			dp->dp_display.connector_type, hpd_isr_status);
1125 		/* hpd related interrupts */
1126 		if (hpd_isr_status & DP_DP_HPD_PLUG_INT_MASK)
1127 			dp_add_event(dp, EV_HPD_PLUG_INT, 0, 0);
1128 
1129 		if (hpd_isr_status & DP_DP_IRQ_HPD_INT_MASK) {
1130 			dp_add_event(dp, EV_IRQ_HPD_INT, 0, 0);
1131 		}
1132 
1133 		if (hpd_isr_status & DP_DP_HPD_REPLUG_INT_MASK) {
1134 			dp_add_event(dp, EV_HPD_UNPLUG_INT, 0, 0);
1135 			dp_add_event(dp, EV_HPD_PLUG_INT, 0, 3);
1136 		}
1137 
1138 		if (hpd_isr_status & DP_DP_HPD_UNPLUG_INT_MASK)
1139 			dp_add_event(dp, EV_HPD_UNPLUG_INT, 0, 0);
1140 
1141 		ret = IRQ_HANDLED;
1142 	}
1143 
1144 	/* DP controller isr */
1145 	ret |= dp_ctrl_isr(dp->ctrl);
1146 
1147 	/* DP aux isr */
1148 	ret |= dp_aux_isr(dp->aux);
1149 
1150 	return ret;
1151 }
1152 
1153 static int dp_display_request_irq(struct dp_display_private *dp)
1154 {
1155 	int rc = 0;
1156 	struct platform_device *pdev = dp->dp_display.pdev;
1157 
1158 	dp->irq = platform_get_irq(pdev, 0);
1159 	if (dp->irq < 0) {
1160 		DRM_ERROR("failed to get irq\n");
1161 		return dp->irq;
1162 	}
1163 
1164 	rc = devm_request_irq(&pdev->dev, dp->irq, dp_display_irq_handler,
1165 			      IRQF_TRIGGER_HIGH|IRQF_NO_AUTOEN,
1166 			      "dp_display_isr", dp);
1167 
1168 	if (rc < 0) {
1169 		DRM_ERROR("failed to request IRQ%u: %d\n",
1170 				dp->irq, rc);
1171 		return rc;
1172 	}
1173 
1174 	return 0;
1175 }
1176 
1177 static const struct msm_dp_desc *dp_display_get_desc(struct platform_device *pdev)
1178 {
1179 	const struct msm_dp_desc *descs = of_device_get_match_data(&pdev->dev);
1180 	struct resource *res;
1181 	int i;
1182 
1183 	res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
1184 	if (!res)
1185 		return NULL;
1186 
1187 	for (i = 0; i < descs[i].io_start; i++) {
1188 		if (descs[i].io_start == res->start)
1189 			return &descs[i];
1190 	}
1191 
1192 	dev_err(&pdev->dev, "unknown displayport instance\n");
1193 	return NULL;
1194 }
1195 
1196 static int dp_display_probe_tail(struct device *dev)
1197 {
1198 	struct msm_dp *dp = dev_get_drvdata(dev);
1199 	int ret;
1200 
1201 	/*
1202 	 * External bridges are mandatory for eDP interfaces: one has to
1203 	 * provide at least an eDP panel (which gets wrapped into panel-bridge).
1204 	 *
1205 	 * For DisplayPort interfaces external bridges are optional, so
1206 	 * silently ignore an error if one is not present (-ENODEV).
1207 	 */
1208 	dp->next_bridge = devm_drm_of_get_bridge(&dp->pdev->dev, dp->pdev->dev.of_node, 1, 0);
1209 	if (IS_ERR(dp->next_bridge)) {
1210 		ret = PTR_ERR(dp->next_bridge);
1211 		dp->next_bridge = NULL;
1212 		if (dp->is_edp || ret != -ENODEV)
1213 			return ret;
1214 	}
1215 
1216 	ret = component_add(dev, &dp_display_comp_ops);
1217 	if (ret)
1218 		DRM_ERROR("component add failed, rc=%d\n", ret);
1219 
1220 	return ret;
1221 }
1222 
1223 static int dp_auxbus_done_probe(struct drm_dp_aux *aux)
1224 {
1225 	return dp_display_probe_tail(aux->dev);
1226 }
1227 
1228 static int dp_display_probe(struct platform_device *pdev)
1229 {
1230 	int rc = 0;
1231 	struct dp_display_private *dp;
1232 	const struct msm_dp_desc *desc;
1233 
1234 	if (!pdev || !pdev->dev.of_node) {
1235 		DRM_ERROR("pdev not found\n");
1236 		return -ENODEV;
1237 	}
1238 
1239 	dp = devm_kzalloc(&pdev->dev, sizeof(*dp), GFP_KERNEL);
1240 	if (!dp)
1241 		return -ENOMEM;
1242 
1243 	desc = dp_display_get_desc(pdev);
1244 	if (!desc)
1245 		return -EINVAL;
1246 
1247 	dp->dp_display.pdev = pdev;
1248 	dp->name = "drm_dp";
1249 	dp->id = desc->id;
1250 	dp->dp_display.connector_type = desc->connector_type;
1251 	dp->wide_bus_supported = desc->wide_bus_supported;
1252 	dp->dp_display.is_edp =
1253 		(dp->dp_display.connector_type == DRM_MODE_CONNECTOR_eDP);
1254 
1255 	rc = dp_init_sub_modules(dp);
1256 	if (rc) {
1257 		DRM_ERROR("init sub module failed\n");
1258 		return -EPROBE_DEFER;
1259 	}
1260 
1261 	/* setup event q */
1262 	mutex_init(&dp->event_mutex);
1263 	init_waitqueue_head(&dp->event_q);
1264 	spin_lock_init(&dp->event_lock);
1265 
1266 	/* Store DP audio handle inside DP display */
1267 	dp->dp_display.dp_audio = dp->audio;
1268 
1269 	init_completion(&dp->audio_comp);
1270 
1271 	platform_set_drvdata(pdev, &dp->dp_display);
1272 
1273 	rc = devm_pm_runtime_enable(&pdev->dev);
1274 	if (rc)
1275 		goto err;
1276 
1277 	rc = dp_display_request_irq(dp);
1278 	if (rc)
1279 		goto err;
1280 
1281 	if (dp->dp_display.is_edp) {
1282 		rc = devm_of_dp_aux_populate_bus(dp->aux, dp_auxbus_done_probe);
1283 		if (rc) {
1284 			DRM_ERROR("eDP auxbus population failed, rc=%d\n", rc);
1285 			goto err;
1286 		}
1287 	} else {
1288 		rc = dp_display_probe_tail(&pdev->dev);
1289 		if (rc)
1290 			goto err;
1291 	}
1292 
1293 	return rc;
1294 
1295 err:
1296 	dp_display_deinit_sub_modules(dp);
1297 	return rc;
1298 }
1299 
1300 static void dp_display_remove(struct platform_device *pdev)
1301 {
1302 	struct dp_display_private *dp = dev_get_dp_display_private(&pdev->dev);
1303 
1304 	component_del(&pdev->dev, &dp_display_comp_ops);
1305 	dp_display_deinit_sub_modules(dp);
1306 	platform_set_drvdata(pdev, NULL);
1307 }
1308 
1309 static int dp_pm_runtime_suspend(struct device *dev)
1310 {
1311 	struct dp_display_private *dp = dev_get_dp_display_private(dev);
1312 
1313 	disable_irq(dp->irq);
1314 
1315 	if (dp->dp_display.is_edp) {
1316 		dp_display_host_phy_exit(dp);
1317 		dp_catalog_ctrl_hpd_disable(dp->catalog);
1318 	}
1319 	dp_display_host_deinit(dp);
1320 
1321 	return 0;
1322 }
1323 
1324 static int dp_pm_runtime_resume(struct device *dev)
1325 {
1326 	struct dp_display_private *dp = dev_get_dp_display_private(dev);
1327 
1328 	/*
1329 	 * for eDP, host cotroller, HPD block and PHY are enabled here
1330 	 * but with HPD irq disabled
1331 	 *
1332 	 * for DP, only host controller is enabled here.
1333 	 * HPD block is enabled at dp_bridge_hpd_enable()
1334 	 * PHY will be enabled at plugin handler later
1335 	 */
1336 	dp_display_host_init(dp);
1337 	if (dp->dp_display.is_edp) {
1338 		dp_catalog_ctrl_hpd_enable(dp->catalog);
1339 		dp_display_host_phy_init(dp);
1340 	}
1341 
1342 	enable_irq(dp->irq);
1343 	return 0;
1344 }
1345 
1346 static const struct dev_pm_ops dp_pm_ops = {
1347 	SET_RUNTIME_PM_OPS(dp_pm_runtime_suspend, dp_pm_runtime_resume, NULL)
1348 	SET_SYSTEM_SLEEP_PM_OPS(pm_runtime_force_suspend,
1349 				pm_runtime_force_resume)
1350 };
1351 
1352 static struct platform_driver dp_display_driver = {
1353 	.probe  = dp_display_probe,
1354 	.remove_new = dp_display_remove,
1355 	.driver = {
1356 		.name = "msm-dp-display",
1357 		.of_match_table = dp_dt_match,
1358 		.suppress_bind_attrs = true,
1359 		.pm = &dp_pm_ops,
1360 	},
1361 };
1362 
1363 int __init msm_dp_register(void)
1364 {
1365 	int ret;
1366 
1367 	ret = platform_driver_register(&dp_display_driver);
1368 	if (ret)
1369 		DRM_ERROR("Dp display driver register failed");
1370 
1371 	return ret;
1372 }
1373 
1374 void __exit msm_dp_unregister(void)
1375 {
1376 	platform_driver_unregister(&dp_display_driver);
1377 }
1378 
1379 bool msm_dp_is_yuv_420_enabled(const struct msm_dp *dp_display,
1380 			       const struct drm_display_mode *mode)
1381 {
1382 	struct dp_display_private *dp;
1383 	const struct drm_display_info *info;
1384 
1385 	dp = container_of(dp_display, struct dp_display_private, dp_display);
1386 	info = &dp_display->connector->display_info;
1387 
1388 	return dp->panel->vsc_sdp_supported && drm_mode_is_420_only(info, mode);
1389 }
1390 
1391 bool msm_dp_needs_periph_flush(const struct msm_dp *dp_display,
1392 			       const struct drm_display_mode *mode)
1393 {
1394 	return msm_dp_is_yuv_420_enabled(dp_display, mode);
1395 }
1396 
1397 bool msm_dp_wide_bus_available(const struct msm_dp *dp_display)
1398 {
1399 	struct dp_display_private *dp;
1400 
1401 	dp = container_of(dp_display, struct dp_display_private, dp_display);
1402 
1403 	if (dp->dp_mode.out_fmt_is_yuv_420)
1404 		return false;
1405 
1406 	return dp->wide_bus_supported;
1407 }
1408 
1409 void dp_display_debugfs_init(struct msm_dp *dp_display, struct dentry *root, bool is_edp)
1410 {
1411 	struct dp_display_private *dp;
1412 	struct device *dev;
1413 	int rc;
1414 
1415 	dp = container_of(dp_display, struct dp_display_private, dp_display);
1416 	dev = &dp->dp_display.pdev->dev;
1417 
1418 	dp->debug = dp_debug_get(dev, dp->panel,
1419 					dp->link, dp->dp_display.connector,
1420 					root, is_edp);
1421 	if (IS_ERR(dp->debug)) {
1422 		rc = PTR_ERR(dp->debug);
1423 		DRM_ERROR("failed to initialize debug, rc = %d\n", rc);
1424 		dp->debug = NULL;
1425 	}
1426 }
1427 
1428 int msm_dp_modeset_init(struct msm_dp *dp_display, struct drm_device *dev,
1429 			struct drm_encoder *encoder, bool yuv_supported)
1430 {
1431 	struct dp_display_private *dp_priv;
1432 	int ret;
1433 
1434 	dp_display->drm_dev = dev;
1435 
1436 	dp_priv = container_of(dp_display, struct dp_display_private, dp_display);
1437 
1438 	ret = dp_bridge_init(dp_display, dev, encoder);
1439 	if (ret) {
1440 		DRM_DEV_ERROR(dev->dev,
1441 			"failed to create dp bridge: %d\n", ret);
1442 		return ret;
1443 	}
1444 
1445 	dp_display->connector = dp_drm_connector_init(dp_display, encoder, yuv_supported);
1446 	if (IS_ERR(dp_display->connector)) {
1447 		ret = PTR_ERR(dp_display->connector);
1448 		DRM_DEV_ERROR(dev->dev,
1449 			"failed to create dp connector: %d\n", ret);
1450 		dp_display->connector = NULL;
1451 		return ret;
1452 	}
1453 
1454 	dp_priv->panel->connector = dp_display->connector;
1455 
1456 	return 0;
1457 }
1458 
1459 void dp_bridge_atomic_enable(struct drm_bridge *drm_bridge,
1460 			     struct drm_bridge_state *old_bridge_state)
1461 {
1462 	struct msm_dp_bridge *dp_bridge = to_dp_bridge(drm_bridge);
1463 	struct msm_dp *dp = dp_bridge->dp_display;
1464 	int rc = 0;
1465 	struct dp_display_private *dp_display;
1466 	u32 state;
1467 	bool force_link_train = false;
1468 
1469 	dp_display = container_of(dp, struct dp_display_private, dp_display);
1470 	if (!dp_display->dp_mode.drm_mode.clock) {
1471 		DRM_ERROR("invalid params\n");
1472 		return;
1473 	}
1474 
1475 	if (dp->is_edp)
1476 		dp_hpd_plug_handle(dp_display, 0);
1477 
1478 	mutex_lock(&dp_display->event_mutex);
1479 	if (pm_runtime_resume_and_get(&dp->pdev->dev)) {
1480 		DRM_ERROR("failed to pm_runtime_resume\n");
1481 		mutex_unlock(&dp_display->event_mutex);
1482 		return;
1483 	}
1484 
1485 	state = dp_display->hpd_state;
1486 	if (state != ST_DISPLAY_OFF && state != ST_MAINLINK_READY) {
1487 		mutex_unlock(&dp_display->event_mutex);
1488 		return;
1489 	}
1490 
1491 	rc = dp_display_set_mode(dp, &dp_display->dp_mode);
1492 	if (rc) {
1493 		DRM_ERROR("Failed to perform a mode set, rc=%d\n", rc);
1494 		mutex_unlock(&dp_display->event_mutex);
1495 		return;
1496 	}
1497 
1498 	state =  dp_display->hpd_state;
1499 
1500 	if (state == ST_DISPLAY_OFF) {
1501 		dp_display_host_phy_init(dp_display);
1502 		force_link_train = true;
1503 	}
1504 
1505 	dp_display_enable(dp_display, force_link_train);
1506 
1507 	rc = dp_display_post_enable(dp);
1508 	if (rc) {
1509 		DRM_ERROR("DP display post enable failed, rc=%d\n", rc);
1510 		dp_display_disable(dp_display);
1511 	}
1512 
1513 	/* completed connection */
1514 	dp_display->hpd_state = ST_CONNECTED;
1515 
1516 	drm_dbg_dp(dp->drm_dev, "type=%d Done\n", dp->connector_type);
1517 	mutex_unlock(&dp_display->event_mutex);
1518 }
1519 
1520 void dp_bridge_atomic_disable(struct drm_bridge *drm_bridge,
1521 			      struct drm_bridge_state *old_bridge_state)
1522 {
1523 	struct msm_dp_bridge *dp_bridge = to_dp_bridge(drm_bridge);
1524 	struct msm_dp *dp = dp_bridge->dp_display;
1525 	struct dp_display_private *dp_display;
1526 
1527 	dp_display = container_of(dp, struct dp_display_private, dp_display);
1528 
1529 	dp_ctrl_push_idle(dp_display->ctrl);
1530 }
1531 
1532 void dp_bridge_atomic_post_disable(struct drm_bridge *drm_bridge,
1533 				   struct drm_bridge_state *old_bridge_state)
1534 {
1535 	struct msm_dp_bridge *dp_bridge = to_dp_bridge(drm_bridge);
1536 	struct msm_dp *dp = dp_bridge->dp_display;
1537 	u32 state;
1538 	struct dp_display_private *dp_display;
1539 
1540 	dp_display = container_of(dp, struct dp_display_private, dp_display);
1541 
1542 	if (dp->is_edp)
1543 		dp_hpd_unplug_handle(dp_display, 0);
1544 
1545 	mutex_lock(&dp_display->event_mutex);
1546 
1547 	state = dp_display->hpd_state;
1548 	if (state != ST_DISCONNECT_PENDING && state != ST_CONNECTED)
1549 		drm_dbg_dp(dp->drm_dev, "type=%d wrong hpd_state=%d\n",
1550 			   dp->connector_type, state);
1551 
1552 	dp_display_disable(dp_display);
1553 
1554 	state =  dp_display->hpd_state;
1555 	if (state == ST_DISCONNECT_PENDING) {
1556 		/* completed disconnection */
1557 		dp_display->hpd_state = ST_DISCONNECTED;
1558 	} else {
1559 		dp_display->hpd_state = ST_DISPLAY_OFF;
1560 	}
1561 
1562 	drm_dbg_dp(dp->drm_dev, "type=%d Done\n", dp->connector_type);
1563 
1564 	pm_runtime_put_sync(&dp->pdev->dev);
1565 	mutex_unlock(&dp_display->event_mutex);
1566 }
1567 
1568 void dp_bridge_mode_set(struct drm_bridge *drm_bridge,
1569 			const struct drm_display_mode *mode,
1570 			const struct drm_display_mode *adjusted_mode)
1571 {
1572 	struct msm_dp_bridge *dp_bridge = to_dp_bridge(drm_bridge);
1573 	struct msm_dp *dp = dp_bridge->dp_display;
1574 	struct dp_display_private *dp_display;
1575 	struct dp_panel *dp_panel;
1576 
1577 	dp_display = container_of(dp, struct dp_display_private, dp_display);
1578 	dp_panel = dp_display->panel;
1579 
1580 	memset(&dp_display->dp_mode, 0x0, sizeof(struct dp_display_mode));
1581 
1582 	if (dp_display_check_video_test(dp))
1583 		dp_display->dp_mode.bpp = dp_display_get_test_bpp(dp);
1584 	else /* Default num_components per pixel = 3 */
1585 		dp_display->dp_mode.bpp = dp->connector->display_info.bpc * 3;
1586 
1587 	if (!dp_display->dp_mode.bpp)
1588 		dp_display->dp_mode.bpp = 24; /* Default bpp */
1589 
1590 	drm_mode_copy(&dp_display->dp_mode.drm_mode, adjusted_mode);
1591 
1592 	dp_display->dp_mode.v_active_low =
1593 		!!(dp_display->dp_mode.drm_mode.flags & DRM_MODE_FLAG_NVSYNC);
1594 
1595 	dp_display->dp_mode.h_active_low =
1596 		!!(dp_display->dp_mode.drm_mode.flags & DRM_MODE_FLAG_NHSYNC);
1597 
1598 	dp_display->dp_mode.out_fmt_is_yuv_420 =
1599 		drm_mode_is_420_only(&dp->connector->display_info, adjusted_mode) &&
1600 		dp_panel->vsc_sdp_supported;
1601 
1602 	/* populate wide_bus_support to different layers */
1603 	dp_display->ctrl->wide_bus_en =
1604 		dp_display->dp_mode.out_fmt_is_yuv_420 ? false : dp_display->wide_bus_supported;
1605 	dp_display->catalog->wide_bus_en =
1606 		dp_display->dp_mode.out_fmt_is_yuv_420 ? false : dp_display->wide_bus_supported;
1607 }
1608 
1609 void dp_bridge_hpd_enable(struct drm_bridge *bridge)
1610 {
1611 	struct msm_dp_bridge *dp_bridge = to_dp_bridge(bridge);
1612 	struct msm_dp *dp_display = dp_bridge->dp_display;
1613 	struct dp_display_private *dp = container_of(dp_display, struct dp_display_private, dp_display);
1614 
1615 	/*
1616 	 * this is for external DP with hpd irq enabled case,
1617 	 * step-1: dp_pm_runtime_resume() enable dp host only
1618 	 * step-2: enable hdp block and have hpd irq enabled here
1619 	 * step-3: waiting for plugin irq while phy is not initialized
1620 	 * step-4: DP PHY is initialized at plugin handler before link training
1621 	 *
1622 	 */
1623 	mutex_lock(&dp->event_mutex);
1624 	if (pm_runtime_resume_and_get(&dp_display->pdev->dev)) {
1625 		DRM_ERROR("failed to resume power\n");
1626 		mutex_unlock(&dp->event_mutex);
1627 		return;
1628 	}
1629 
1630 	dp_catalog_ctrl_hpd_enable(dp->catalog);
1631 
1632 	/* enable HDP interrupts */
1633 	dp_catalog_hpd_config_intr(dp->catalog, DP_DP_HPD_INT_MASK, true);
1634 
1635 	dp_display->internal_hpd = true;
1636 	mutex_unlock(&dp->event_mutex);
1637 }
1638 
1639 void dp_bridge_hpd_disable(struct drm_bridge *bridge)
1640 {
1641 	struct msm_dp_bridge *dp_bridge = to_dp_bridge(bridge);
1642 	struct msm_dp *dp_display = dp_bridge->dp_display;
1643 	struct dp_display_private *dp = container_of(dp_display, struct dp_display_private, dp_display);
1644 
1645 	mutex_lock(&dp->event_mutex);
1646 	/* disable HDP interrupts */
1647 	dp_catalog_hpd_config_intr(dp->catalog, DP_DP_HPD_INT_MASK, false);
1648 	dp_catalog_ctrl_hpd_disable(dp->catalog);
1649 
1650 	dp_display->internal_hpd = false;
1651 
1652 	pm_runtime_put_sync(&dp_display->pdev->dev);
1653 	mutex_unlock(&dp->event_mutex);
1654 }
1655 
1656 void dp_bridge_hpd_notify(struct drm_bridge *bridge,
1657 			  enum drm_connector_status status)
1658 {
1659 	struct msm_dp_bridge *dp_bridge = to_dp_bridge(bridge);
1660 	struct msm_dp *dp_display = dp_bridge->dp_display;
1661 	struct dp_display_private *dp = container_of(dp_display, struct dp_display_private, dp_display);
1662 
1663 	/* Without next_bridge interrupts are handled by the DP core directly */
1664 	if (dp_display->internal_hpd)
1665 		return;
1666 
1667 	if (!dp_display->link_ready && status == connector_status_connected)
1668 		dp_add_event(dp, EV_HPD_PLUG_INT, 0, 0);
1669 	else if (dp_display->link_ready && status == connector_status_disconnected)
1670 		dp_add_event(dp, EV_HPD_UNPLUG_INT, 0, 0);
1671 }
1672