xref: /linux/drivers/gpu/drm/msm/dp/dp_display.c (revision 3a39d672e7f48b8d6b91a09afa4b55352773b4b5)
1 // SPDX-License-Identifier: GPL-2.0-only
2 /*
3  * Copyright (c) 2017-2020, The Linux Foundation. All rights reserved.
4  */
5 
6 #include <linux/module.h>
7 #include <linux/slab.h>
8 #include <linux/uaccess.h>
9 #include <linux/debugfs.h>
10 #include <linux/component.h>
11 #include <linux/of_irq.h>
12 #include <linux/phy/phy.h>
13 #include <linux/delay.h>
14 #include <drm/display/drm_dp_aux_bus.h>
15 #include <drm/drm_edid.h>
16 
17 #include "msm_drv.h"
18 #include "msm_kms.h"
19 #include "dp_ctrl.h"
20 #include "dp_catalog.h"
21 #include "dp_aux.h"
22 #include "dp_reg.h"
23 #include "dp_link.h"
24 #include "dp_panel.h"
25 #include "dp_display.h"
26 #include "dp_drm.h"
27 #include "dp_audio.h"
28 #include "dp_debug.h"
29 
30 static bool psr_enabled = false;
31 module_param(psr_enabled, bool, 0);
32 MODULE_PARM_DESC(psr_enabled, "enable PSR for eDP and DP displays");
33 
34 #define HPD_STRING_SIZE 30
35 
36 enum {
37 	ISR_DISCONNECTED,
38 	ISR_CONNECT_PENDING,
39 	ISR_CONNECTED,
40 	ISR_HPD_REPLUG_COUNT,
41 	ISR_IRQ_HPD_PULSE_COUNT,
42 	ISR_HPD_LO_GLITH_COUNT,
43 };
44 
45 /* event thread connection state */
46 enum {
47 	ST_DISCONNECTED,
48 	ST_MAINLINK_READY,
49 	ST_CONNECTED,
50 	ST_DISCONNECT_PENDING,
51 	ST_DISPLAY_OFF,
52 };
53 
54 enum {
55 	EV_NO_EVENT,
56 	/* hpd events */
57 	EV_HPD_PLUG_INT,
58 	EV_IRQ_HPD_INT,
59 	EV_HPD_UNPLUG_INT,
60 	EV_USER_NOTIFICATION,
61 };
62 
63 #define EVENT_TIMEOUT	(HZ/10)	/* 100ms */
64 #define DP_EVENT_Q_MAX	8
65 
66 #define DP_TIMEOUT_NONE		0
67 
68 #define WAIT_FOR_RESUME_TIMEOUT_JIFFIES (HZ / 2)
69 
70 struct dp_event {
71 	u32 event_id;
72 	u32 data;
73 	u32 delay;
74 };
75 
76 struct dp_display_private {
77 	int irq;
78 
79 	unsigned int id;
80 
81 	/* state variables */
82 	bool core_initialized;
83 	bool phy_initialized;
84 	bool audio_supported;
85 
86 	struct drm_device *drm_dev;
87 
88 	struct dp_catalog *catalog;
89 	struct drm_dp_aux *aux;
90 	struct dp_link    *link;
91 	struct dp_panel   *panel;
92 	struct dp_ctrl    *ctrl;
93 
94 	struct dp_display_mode dp_mode;
95 	struct msm_dp dp_display;
96 
97 	/* wait for audio signaling */
98 	struct completion audio_comp;
99 
100 	/* event related only access by event thread */
101 	struct mutex event_mutex;
102 	wait_queue_head_t event_q;
103 	u32 hpd_state;
104 	u32 event_pndx;
105 	u32 event_gndx;
106 	struct task_struct *ev_tsk;
107 	struct dp_event event_list[DP_EVENT_Q_MAX];
108 	spinlock_t event_lock;
109 
110 	bool wide_bus_supported;
111 
112 	struct dp_audio *audio;
113 };
114 
115 struct msm_dp_desc {
116 	phys_addr_t io_start;
117 	unsigned int id;
118 	bool wide_bus_supported;
119 };
120 
121 static const struct msm_dp_desc sc7180_dp_descs[] = {
122 	{ .io_start = 0x0ae90000, .id = MSM_DP_CONTROLLER_0, .wide_bus_supported = true },
123 	{}
124 };
125 
126 static const struct msm_dp_desc sc7280_dp_descs[] = {
127 	{ .io_start = 0x0ae90000, .id = MSM_DP_CONTROLLER_0, .wide_bus_supported = true },
128 	{ .io_start = 0x0aea0000, .id = MSM_DP_CONTROLLER_1, .wide_bus_supported = true },
129 	{}
130 };
131 
132 static const struct msm_dp_desc sc8180x_dp_descs[] = {
133 	{ .io_start = 0x0ae90000, .id = MSM_DP_CONTROLLER_0, .wide_bus_supported = true },
134 	{ .io_start = 0x0ae98000, .id = MSM_DP_CONTROLLER_1, .wide_bus_supported = true },
135 	{ .io_start = 0x0ae9a000, .id = MSM_DP_CONTROLLER_2, .wide_bus_supported = true },
136 	{}
137 };
138 
139 static const struct msm_dp_desc sc8280xp_dp_descs[] = {
140 	{ .io_start = 0x0ae90000, .id = MSM_DP_CONTROLLER_0, .wide_bus_supported = true },
141 	{ .io_start = 0x0ae98000, .id = MSM_DP_CONTROLLER_1, .wide_bus_supported = true },
142 	{ .io_start = 0x0ae9a000, .id = MSM_DP_CONTROLLER_2, .wide_bus_supported = true },
143 	{ .io_start = 0x0aea0000, .id = MSM_DP_CONTROLLER_3, .wide_bus_supported = true },
144 	{ .io_start = 0x22090000, .id = MSM_DP_CONTROLLER_0, .wide_bus_supported = true },
145 	{ .io_start = 0x22098000, .id = MSM_DP_CONTROLLER_1, .wide_bus_supported = true },
146 	{ .io_start = 0x2209a000, .id = MSM_DP_CONTROLLER_2, .wide_bus_supported = true },
147 	{ .io_start = 0x220a0000, .id = MSM_DP_CONTROLLER_3, .wide_bus_supported = true },
148 	{}
149 };
150 
151 static const struct msm_dp_desc sm8650_dp_descs[] = {
152 	{ .io_start = 0x0af54000, .id = MSM_DP_CONTROLLER_0, .wide_bus_supported = true },
153 	{}
154 };
155 
156 static const struct msm_dp_desc x1e80100_dp_descs[] = {
157 	{ .io_start = 0x0ae90000, .id = MSM_DP_CONTROLLER_0, .wide_bus_supported = true },
158 	{ .io_start = 0x0ae98000, .id = MSM_DP_CONTROLLER_1, .wide_bus_supported = true },
159 	{ .io_start = 0x0ae9a000, .id = MSM_DP_CONTROLLER_2, .wide_bus_supported = true },
160 	{ .io_start = 0x0aea0000, .id = MSM_DP_CONTROLLER_3, .wide_bus_supported = true },
161 	{}
162 };
163 
164 static const struct of_device_id dp_dt_match[] = {
165 	{ .compatible = "qcom,sc7180-dp", .data = &sc7180_dp_descs },
166 	{ .compatible = "qcom,sc7280-dp", .data = &sc7280_dp_descs },
167 	{ .compatible = "qcom,sc7280-edp", .data = &sc7280_dp_descs },
168 	{ .compatible = "qcom,sc8180x-dp", .data = &sc8180x_dp_descs },
169 	{ .compatible = "qcom,sc8180x-edp", .data = &sc8180x_dp_descs },
170 	{ .compatible = "qcom,sc8280xp-dp", .data = &sc8280xp_dp_descs },
171 	{ .compatible = "qcom,sc8280xp-edp", .data = &sc8280xp_dp_descs },
172 	{ .compatible = "qcom,sdm845-dp", .data = &sc7180_dp_descs },
173 	{ .compatible = "qcom,sm8350-dp", .data = &sc7180_dp_descs },
174 	{ .compatible = "qcom,sm8650-dp", .data = &sm8650_dp_descs },
175 	{ .compatible = "qcom,x1e80100-dp", .data = &x1e80100_dp_descs },
176 	{}
177 };
178 
dev_get_dp_display_private(struct device * dev)179 static struct dp_display_private *dev_get_dp_display_private(struct device *dev)
180 {
181 	struct msm_dp *dp = dev_get_drvdata(dev);
182 
183 	return container_of(dp, struct dp_display_private, dp_display);
184 }
185 
dp_add_event(struct dp_display_private * dp_priv,u32 event,u32 data,u32 delay)186 static int dp_add_event(struct dp_display_private *dp_priv, u32 event,
187 						u32 data, u32 delay)
188 {
189 	unsigned long flag;
190 	struct dp_event *todo;
191 	int pndx;
192 
193 	spin_lock_irqsave(&dp_priv->event_lock, flag);
194 	pndx = dp_priv->event_pndx + 1;
195 	pndx %= DP_EVENT_Q_MAX;
196 	if (pndx == dp_priv->event_gndx) {
197 		pr_err("event_q is full: pndx=%d gndx=%d\n",
198 			dp_priv->event_pndx, dp_priv->event_gndx);
199 		spin_unlock_irqrestore(&dp_priv->event_lock, flag);
200 		return -EPERM;
201 	}
202 	todo = &dp_priv->event_list[dp_priv->event_pndx++];
203 	dp_priv->event_pndx %= DP_EVENT_Q_MAX;
204 	todo->event_id = event;
205 	todo->data = data;
206 	todo->delay = delay;
207 	wake_up(&dp_priv->event_q);
208 	spin_unlock_irqrestore(&dp_priv->event_lock, flag);
209 
210 	return 0;
211 }
212 
dp_del_event(struct dp_display_private * dp_priv,u32 event)213 static int dp_del_event(struct dp_display_private *dp_priv, u32 event)
214 {
215 	unsigned long flag;
216 	struct dp_event *todo;
217 	u32	gndx;
218 
219 	spin_lock_irqsave(&dp_priv->event_lock, flag);
220 	if (dp_priv->event_pndx == dp_priv->event_gndx) {
221 		spin_unlock_irqrestore(&dp_priv->event_lock, flag);
222 		return -ENOENT;
223 	}
224 
225 	gndx = dp_priv->event_gndx;
226 	while (dp_priv->event_pndx != gndx) {
227 		todo = &dp_priv->event_list[gndx];
228 		if (todo->event_id == event) {
229 			todo->event_id = EV_NO_EVENT;	/* deleted */
230 			todo->delay = 0;
231 		}
232 		gndx++;
233 		gndx %= DP_EVENT_Q_MAX;
234 	}
235 	spin_unlock_irqrestore(&dp_priv->event_lock, flag);
236 
237 	return 0;
238 }
239 
dp_display_signal_audio_start(struct msm_dp * dp_display)240 void dp_display_signal_audio_start(struct msm_dp *dp_display)
241 {
242 	struct dp_display_private *dp;
243 
244 	dp = container_of(dp_display, struct dp_display_private, dp_display);
245 
246 	reinit_completion(&dp->audio_comp);
247 }
248 
dp_display_signal_audio_complete(struct msm_dp * dp_display)249 void dp_display_signal_audio_complete(struct msm_dp *dp_display)
250 {
251 	struct dp_display_private *dp;
252 
253 	dp = container_of(dp_display, struct dp_display_private, dp_display);
254 
255 	complete_all(&dp->audio_comp);
256 }
257 
258 static int dp_hpd_event_thread_start(struct dp_display_private *dp_priv);
259 
dp_display_bind(struct device * dev,struct device * master,void * data)260 static int dp_display_bind(struct device *dev, struct device *master,
261 			   void *data)
262 {
263 	int rc = 0;
264 	struct dp_display_private *dp = dev_get_dp_display_private(dev);
265 	struct msm_drm_private *priv = dev_get_drvdata(master);
266 	struct drm_device *drm = priv->dev;
267 
268 	dp->dp_display.drm_dev = drm;
269 	priv->dp[dp->id] = &dp->dp_display;
270 
271 
272 
273 	dp->drm_dev = drm;
274 	dp->aux->drm_dev = drm;
275 	rc = dp_aux_register(dp->aux);
276 	if (rc) {
277 		DRM_ERROR("DRM DP AUX register failed\n");
278 		goto end;
279 	}
280 
281 
282 	rc = dp_register_audio_driver(dev, dp->audio);
283 	if (rc) {
284 		DRM_ERROR("Audio registration Dp failed\n");
285 		goto end;
286 	}
287 
288 	rc = dp_hpd_event_thread_start(dp);
289 	if (rc) {
290 		DRM_ERROR("Event thread create failed\n");
291 		goto end;
292 	}
293 
294 	return 0;
295 end:
296 	return rc;
297 }
298 
dp_display_unbind(struct device * dev,struct device * master,void * data)299 static void dp_display_unbind(struct device *dev, struct device *master,
300 			      void *data)
301 {
302 	struct dp_display_private *dp = dev_get_dp_display_private(dev);
303 	struct msm_drm_private *priv = dev_get_drvdata(master);
304 
305 	kthread_stop(dp->ev_tsk);
306 
307 	of_dp_aux_depopulate_bus(dp->aux);
308 
309 	dp_unregister_audio_driver(dev, dp->audio);
310 	dp_aux_unregister(dp->aux);
311 	dp->drm_dev = NULL;
312 	dp->aux->drm_dev = NULL;
313 	priv->dp[dp->id] = NULL;
314 }
315 
316 static const struct component_ops dp_display_comp_ops = {
317 	.bind = dp_display_bind,
318 	.unbind = dp_display_unbind,
319 };
320 
dp_display_send_hpd_event(struct msm_dp * dp_display)321 static void dp_display_send_hpd_event(struct msm_dp *dp_display)
322 {
323 	struct dp_display_private *dp;
324 	struct drm_connector *connector;
325 
326 	dp = container_of(dp_display, struct dp_display_private, dp_display);
327 
328 	connector = dp->dp_display.connector;
329 	drm_helper_hpd_irq_event(connector->dev);
330 }
331 
dp_display_send_hpd_notification(struct dp_display_private * dp,bool hpd)332 static int dp_display_send_hpd_notification(struct dp_display_private *dp,
333 					    bool hpd)
334 {
335 	if ((hpd && dp->dp_display.link_ready) ||
336 			(!hpd && !dp->dp_display.link_ready)) {
337 		drm_dbg_dp(dp->drm_dev, "HPD already %s\n",
338 				(hpd ? "on" : "off"));
339 		return 0;
340 	}
341 
342 	/* reset video pattern flag on disconnect */
343 	if (!hpd) {
344 		dp->panel->video_test = false;
345 		if (!dp->dp_display.is_edp)
346 			drm_dp_set_subconnector_property(dp->dp_display.connector,
347 							 connector_status_disconnected,
348 							 dp->panel->dpcd,
349 							 dp->panel->downstream_ports);
350 	}
351 
352 	dp->dp_display.link_ready = hpd;
353 
354 	drm_dbg_dp(dp->drm_dev, "type=%d hpd=%d\n",
355 			dp->dp_display.connector_type, hpd);
356 	dp_display_send_hpd_event(&dp->dp_display);
357 
358 	return 0;
359 }
360 
dp_display_process_hpd_high(struct dp_display_private * dp)361 static int dp_display_process_hpd_high(struct dp_display_private *dp)
362 {
363 	struct drm_connector *connector = dp->dp_display.connector;
364 	const struct drm_display_info *info = &connector->display_info;
365 	int rc = 0;
366 
367 	rc = dp_panel_read_sink_caps(dp->panel, connector);
368 	if (rc)
369 		goto end;
370 
371 	dp_link_process_request(dp->link);
372 
373 	if (!dp->dp_display.is_edp)
374 		drm_dp_set_subconnector_property(connector,
375 						 connector_status_connected,
376 						 dp->panel->dpcd,
377 						 dp->panel->downstream_ports);
378 
379 	dp->dp_display.psr_supported = dp->panel->psr_cap.version && psr_enabled;
380 
381 	dp->audio_supported = info->has_audio;
382 	dp_panel_handle_sink_request(dp->panel);
383 
384 	/*
385 	 * set sink to normal operation mode -- D0
386 	 * before dpcd read
387 	 */
388 	dp_link_psm_config(dp->link, &dp->panel->link_info, false);
389 
390 	dp_link_reset_phy_params_vx_px(dp->link);
391 	rc = dp_ctrl_on_link(dp->ctrl);
392 	if (rc) {
393 		DRM_ERROR("failed to complete DP link training\n");
394 		goto end;
395 	}
396 
397 	dp_add_event(dp, EV_USER_NOTIFICATION, true, 0);
398 
399 end:
400 	return rc;
401 }
402 
dp_display_host_phy_init(struct dp_display_private * dp)403 static void dp_display_host_phy_init(struct dp_display_private *dp)
404 {
405 	drm_dbg_dp(dp->drm_dev, "type=%d core_init=%d phy_init=%d\n",
406 		dp->dp_display.connector_type, dp->core_initialized,
407 		dp->phy_initialized);
408 
409 	if (!dp->phy_initialized) {
410 		dp_ctrl_phy_init(dp->ctrl);
411 		dp->phy_initialized = true;
412 	}
413 }
414 
dp_display_host_phy_exit(struct dp_display_private * dp)415 static void dp_display_host_phy_exit(struct dp_display_private *dp)
416 {
417 	drm_dbg_dp(dp->drm_dev, "type=%d core_init=%d phy_init=%d\n",
418 		dp->dp_display.connector_type, dp->core_initialized,
419 		dp->phy_initialized);
420 
421 	if (dp->phy_initialized) {
422 		dp_ctrl_phy_exit(dp->ctrl);
423 		dp->phy_initialized = false;
424 	}
425 }
426 
dp_display_host_init(struct dp_display_private * dp)427 static void dp_display_host_init(struct dp_display_private *dp)
428 {
429 	drm_dbg_dp(dp->drm_dev, "type=%d core_init=%d phy_init=%d\n",
430 		dp->dp_display.connector_type, dp->core_initialized,
431 		dp->phy_initialized);
432 
433 	dp_ctrl_core_clk_enable(dp->ctrl);
434 	dp_ctrl_reset_irq_ctrl(dp->ctrl, true);
435 	dp_aux_init(dp->aux);
436 	dp->core_initialized = true;
437 }
438 
dp_display_host_deinit(struct dp_display_private * dp)439 static void dp_display_host_deinit(struct dp_display_private *dp)
440 {
441 	drm_dbg_dp(dp->drm_dev, "type=%d core_init=%d phy_init=%d\n",
442 		dp->dp_display.connector_type, dp->core_initialized,
443 		dp->phy_initialized);
444 
445 	dp_ctrl_reset_irq_ctrl(dp->ctrl, false);
446 	dp_aux_deinit(dp->aux);
447 	dp_ctrl_core_clk_disable(dp->ctrl);
448 	dp->core_initialized = false;
449 }
450 
dp_display_usbpd_configure_cb(struct device * dev)451 static int dp_display_usbpd_configure_cb(struct device *dev)
452 {
453 	struct dp_display_private *dp = dev_get_dp_display_private(dev);
454 
455 	dp_display_host_phy_init(dp);
456 
457 	return dp_display_process_hpd_high(dp);
458 }
459 
dp_display_notify_disconnect(struct device * dev)460 static int dp_display_notify_disconnect(struct device *dev)
461 {
462 	struct dp_display_private *dp = dev_get_dp_display_private(dev);
463 
464 	dp_add_event(dp, EV_USER_NOTIFICATION, false, 0);
465 
466 	return 0;
467 }
468 
dp_display_handle_video_request(struct dp_display_private * dp)469 static void dp_display_handle_video_request(struct dp_display_private *dp)
470 {
471 	if (dp->link->sink_request & DP_TEST_LINK_VIDEO_PATTERN) {
472 		dp->panel->video_test = true;
473 		dp_link_send_test_response(dp->link);
474 	}
475 }
476 
dp_display_handle_port_status_changed(struct dp_display_private * dp)477 static int dp_display_handle_port_status_changed(struct dp_display_private *dp)
478 {
479 	int rc = 0;
480 
481 	if (drm_dp_is_branch(dp->panel->dpcd) && dp->link->sink_count == 0) {
482 		drm_dbg_dp(dp->drm_dev, "sink count is zero, nothing to do\n");
483 		if (dp->hpd_state != ST_DISCONNECTED) {
484 			dp->hpd_state = ST_DISCONNECT_PENDING;
485 			dp_add_event(dp, EV_USER_NOTIFICATION, false, 0);
486 		}
487 	} else {
488 		if (dp->hpd_state == ST_DISCONNECTED) {
489 			dp->hpd_state = ST_MAINLINK_READY;
490 			rc = dp_display_process_hpd_high(dp);
491 			if (rc)
492 				dp->hpd_state = ST_DISCONNECTED;
493 		}
494 	}
495 
496 	return rc;
497 }
498 
dp_display_handle_irq_hpd(struct dp_display_private * dp)499 static int dp_display_handle_irq_hpd(struct dp_display_private *dp)
500 {
501 	u32 sink_request = dp->link->sink_request;
502 
503 	drm_dbg_dp(dp->drm_dev, "%d\n", sink_request);
504 	if (dp->hpd_state == ST_DISCONNECTED) {
505 		if (sink_request & DP_LINK_STATUS_UPDATED) {
506 			drm_dbg_dp(dp->drm_dev, "Disconnected sink_request: %d\n",
507 							sink_request);
508 			DRM_ERROR("Disconnected, no DP_LINK_STATUS_UPDATED\n");
509 			return -EINVAL;
510 		}
511 	}
512 
513 	dp_ctrl_handle_sink_request(dp->ctrl);
514 
515 	if (sink_request & DP_TEST_LINK_VIDEO_PATTERN)
516 		dp_display_handle_video_request(dp);
517 
518 	return 0;
519 }
520 
dp_display_usbpd_attention_cb(struct device * dev)521 static int dp_display_usbpd_attention_cb(struct device *dev)
522 {
523 	int rc = 0;
524 	u32 sink_request;
525 	struct dp_display_private *dp = dev_get_dp_display_private(dev);
526 
527 	/* check for any test request issued by sink */
528 	rc = dp_link_process_request(dp->link);
529 	if (!rc) {
530 		sink_request = dp->link->sink_request;
531 		drm_dbg_dp(dp->drm_dev, "hpd_state=%d sink_request=%d\n",
532 					dp->hpd_state, sink_request);
533 		if (sink_request & DS_PORT_STATUS_CHANGED)
534 			rc = dp_display_handle_port_status_changed(dp);
535 		else
536 			rc = dp_display_handle_irq_hpd(dp);
537 	}
538 
539 	return rc;
540 }
541 
dp_hpd_plug_handle(struct dp_display_private * dp,u32 data)542 static int dp_hpd_plug_handle(struct dp_display_private *dp, u32 data)
543 {
544 	u32 state;
545 	int ret;
546 	struct platform_device *pdev = dp->dp_display.pdev;
547 
548 	dp_aux_enable_xfers(dp->aux, true);
549 
550 	mutex_lock(&dp->event_mutex);
551 
552 	state =  dp->hpd_state;
553 	drm_dbg_dp(dp->drm_dev, "Before, type=%d hpd_state=%d\n",
554 			dp->dp_display.connector_type, state);
555 
556 	if (state == ST_DISPLAY_OFF) {
557 		mutex_unlock(&dp->event_mutex);
558 		return 0;
559 	}
560 
561 	if (state == ST_MAINLINK_READY || state == ST_CONNECTED) {
562 		mutex_unlock(&dp->event_mutex);
563 		return 0;
564 	}
565 
566 	if (state == ST_DISCONNECT_PENDING) {
567 		/* wait until ST_DISCONNECTED */
568 		dp_add_event(dp, EV_HPD_PLUG_INT, 0, 1); /* delay = 1 */
569 		mutex_unlock(&dp->event_mutex);
570 		return 0;
571 	}
572 
573 	ret = pm_runtime_resume_and_get(&pdev->dev);
574 	if (ret) {
575 		DRM_ERROR("failed to pm_runtime_resume\n");
576 		mutex_unlock(&dp->event_mutex);
577 		return ret;
578 	}
579 
580 	ret = dp_display_usbpd_configure_cb(&pdev->dev);
581 	if (ret) {	/* link train failed */
582 		dp->hpd_state = ST_DISCONNECTED;
583 		pm_runtime_put_sync(&pdev->dev);
584 	} else {
585 		dp->hpd_state = ST_MAINLINK_READY;
586 	}
587 
588 	drm_dbg_dp(dp->drm_dev, "After, type=%d hpd_state=%d\n",
589 			dp->dp_display.connector_type, state);
590 	mutex_unlock(&dp->event_mutex);
591 
592 	/* uevent will complete connection part */
593 	return 0;
594 };
595 
dp_display_handle_plugged_change(struct msm_dp * dp_display,bool plugged)596 static void dp_display_handle_plugged_change(struct msm_dp *dp_display,
597 		bool plugged)
598 {
599 	struct dp_display_private *dp;
600 
601 	dp = container_of(dp_display,
602 			struct dp_display_private, dp_display);
603 
604 	/* notify audio subsystem only if sink supports audio */
605 	if (dp_display->plugged_cb && dp_display->codec_dev &&
606 			dp->audio_supported)
607 		dp_display->plugged_cb(dp_display->codec_dev, plugged);
608 }
609 
dp_hpd_unplug_handle(struct dp_display_private * dp,u32 data)610 static int dp_hpd_unplug_handle(struct dp_display_private *dp, u32 data)
611 {
612 	u32 state;
613 	struct platform_device *pdev = dp->dp_display.pdev;
614 
615 	dp_aux_enable_xfers(dp->aux, false);
616 
617 	mutex_lock(&dp->event_mutex);
618 
619 	state = dp->hpd_state;
620 
621 	drm_dbg_dp(dp->drm_dev, "Before, type=%d hpd_state=%d\n",
622 			dp->dp_display.connector_type, state);
623 
624 	/* unplugged, no more irq_hpd handle */
625 	dp_del_event(dp, EV_IRQ_HPD_INT);
626 
627 	if (state == ST_DISCONNECTED) {
628 		/* triggered by irq_hdp with sink_count = 0 */
629 		if (dp->link->sink_count == 0) {
630 			dp_display_host_phy_exit(dp);
631 		}
632 		dp_display_notify_disconnect(&dp->dp_display.pdev->dev);
633 		mutex_unlock(&dp->event_mutex);
634 		return 0;
635 	} else if (state == ST_DISCONNECT_PENDING) {
636 		mutex_unlock(&dp->event_mutex);
637 		return 0;
638 	} else if (state == ST_MAINLINK_READY) {
639 		dp_ctrl_off_link(dp->ctrl);
640 		dp_display_host_phy_exit(dp);
641 		dp->hpd_state = ST_DISCONNECTED;
642 		dp_display_notify_disconnect(&dp->dp_display.pdev->dev);
643 		pm_runtime_put_sync(&pdev->dev);
644 		mutex_unlock(&dp->event_mutex);
645 		return 0;
646 	}
647 
648 	/*
649 	 * We don't need separate work for disconnect as
650 	 * connect/attention interrupts are disabled
651 	 */
652 	dp_display_notify_disconnect(&dp->dp_display.pdev->dev);
653 
654 	if (state == ST_DISPLAY_OFF) {
655 		dp->hpd_state = ST_DISCONNECTED;
656 	} else {
657 		dp->hpd_state = ST_DISCONNECT_PENDING;
658 	}
659 
660 	/* signal the disconnect event early to ensure proper teardown */
661 	dp_display_handle_plugged_change(&dp->dp_display, false);
662 
663 	drm_dbg_dp(dp->drm_dev, "After, type=%d hpd_state=%d\n",
664 			dp->dp_display.connector_type, state);
665 
666 	/* uevent will complete disconnection part */
667 	pm_runtime_put_sync(&pdev->dev);
668 	mutex_unlock(&dp->event_mutex);
669 	return 0;
670 }
671 
dp_irq_hpd_handle(struct dp_display_private * dp,u32 data)672 static int dp_irq_hpd_handle(struct dp_display_private *dp, u32 data)
673 {
674 	u32 state;
675 
676 	mutex_lock(&dp->event_mutex);
677 
678 	/* irq_hpd can happen at either connected or disconnected state */
679 	state =  dp->hpd_state;
680 	drm_dbg_dp(dp->drm_dev, "Before, type=%d hpd_state=%d\n",
681 			dp->dp_display.connector_type, state);
682 
683 	if (state == ST_DISPLAY_OFF) {
684 		mutex_unlock(&dp->event_mutex);
685 		return 0;
686 	}
687 
688 	if (state == ST_MAINLINK_READY || state == ST_DISCONNECT_PENDING) {
689 		/* wait until ST_CONNECTED */
690 		dp_add_event(dp, EV_IRQ_HPD_INT, 0, 1); /* delay = 1 */
691 		mutex_unlock(&dp->event_mutex);
692 		return 0;
693 	}
694 
695 	dp_display_usbpd_attention_cb(&dp->dp_display.pdev->dev);
696 
697 	drm_dbg_dp(dp->drm_dev, "After, type=%d hpd_state=%d\n",
698 			dp->dp_display.connector_type, state);
699 
700 	mutex_unlock(&dp->event_mutex);
701 
702 	return 0;
703 }
704 
dp_display_deinit_sub_modules(struct dp_display_private * dp)705 static void dp_display_deinit_sub_modules(struct dp_display_private *dp)
706 {
707 	dp_audio_put(dp->audio);
708 	dp_panel_put(dp->panel);
709 	dp_aux_put(dp->aux);
710 }
711 
dp_init_sub_modules(struct dp_display_private * dp)712 static int dp_init_sub_modules(struct dp_display_private *dp)
713 {
714 	int rc = 0;
715 	struct device *dev = &dp->dp_display.pdev->dev;
716 	struct dp_panel_in panel_in = {
717 		.dev = dev,
718 	};
719 	struct phy *phy;
720 
721 	phy = devm_phy_get(dev, "dp");
722 	if (IS_ERR(phy))
723 		return PTR_ERR(phy);
724 
725 	rc = phy_set_mode_ext(phy, PHY_MODE_DP,
726 			      dp->dp_display.is_edp ? PHY_SUBMODE_EDP : PHY_SUBMODE_DP);
727 	if (rc) {
728 		DRM_ERROR("failed to set phy submode, rc = %d\n", rc);
729 		dp->catalog = NULL;
730 		goto error;
731 	}
732 
733 	dp->catalog = dp_catalog_get(dev);
734 	if (IS_ERR(dp->catalog)) {
735 		rc = PTR_ERR(dp->catalog);
736 		DRM_ERROR("failed to initialize catalog, rc = %d\n", rc);
737 		dp->catalog = NULL;
738 		goto error;
739 	}
740 
741 	dp->aux = dp_aux_get(dev, dp->catalog,
742 			     phy,
743 			     dp->dp_display.is_edp);
744 	if (IS_ERR(dp->aux)) {
745 		rc = PTR_ERR(dp->aux);
746 		DRM_ERROR("failed to initialize aux, rc = %d\n", rc);
747 		dp->aux = NULL;
748 		goto error;
749 	}
750 
751 	dp->link = dp_link_get(dev, dp->aux);
752 	if (IS_ERR(dp->link)) {
753 		rc = PTR_ERR(dp->link);
754 		DRM_ERROR("failed to initialize link, rc = %d\n", rc);
755 		dp->link = NULL;
756 		goto error_link;
757 	}
758 
759 	panel_in.aux = dp->aux;
760 	panel_in.catalog = dp->catalog;
761 	panel_in.link = dp->link;
762 
763 	dp->panel = dp_panel_get(&panel_in);
764 	if (IS_ERR(dp->panel)) {
765 		rc = PTR_ERR(dp->panel);
766 		DRM_ERROR("failed to initialize panel, rc = %d\n", rc);
767 		dp->panel = NULL;
768 		goto error_link;
769 	}
770 
771 	dp->ctrl = dp_ctrl_get(dev, dp->link, dp->panel, dp->aux,
772 			       dp->catalog,
773 			       phy);
774 	if (IS_ERR(dp->ctrl)) {
775 		rc = PTR_ERR(dp->ctrl);
776 		DRM_ERROR("failed to initialize ctrl, rc = %d\n", rc);
777 		dp->ctrl = NULL;
778 		goto error_ctrl;
779 	}
780 
781 	dp->audio = dp_audio_get(dp->dp_display.pdev, dp->panel, dp->catalog);
782 	if (IS_ERR(dp->audio)) {
783 		rc = PTR_ERR(dp->audio);
784 		pr_err("failed to initialize audio, rc = %d\n", rc);
785 		dp->audio = NULL;
786 		goto error_ctrl;
787 	}
788 
789 	return rc;
790 
791 error_ctrl:
792 	dp_panel_put(dp->panel);
793 error_link:
794 	dp_aux_put(dp->aux);
795 error:
796 	return rc;
797 }
798 
dp_display_set_mode(struct msm_dp * dp_display,struct dp_display_mode * mode)799 static int dp_display_set_mode(struct msm_dp *dp_display,
800 			       struct dp_display_mode *mode)
801 {
802 	struct dp_display_private *dp;
803 
804 	dp = container_of(dp_display, struct dp_display_private, dp_display);
805 
806 	drm_mode_copy(&dp->panel->dp_mode.drm_mode, &mode->drm_mode);
807 	dp->panel->dp_mode.bpp = mode->bpp;
808 	dp->panel->dp_mode.out_fmt_is_yuv_420 = mode->out_fmt_is_yuv_420;
809 	dp_panel_init_panel_info(dp->panel);
810 	return 0;
811 }
812 
dp_display_enable(struct dp_display_private * dp,bool force_link_train)813 static int dp_display_enable(struct dp_display_private *dp, bool force_link_train)
814 {
815 	int rc = 0;
816 	struct msm_dp *dp_display = &dp->dp_display;
817 
818 	drm_dbg_dp(dp->drm_dev, "sink_count=%d\n", dp->link->sink_count);
819 	if (dp_display->power_on) {
820 		drm_dbg_dp(dp->drm_dev, "Link already setup, return\n");
821 		return 0;
822 	}
823 
824 	rc = dp_ctrl_on_stream(dp->ctrl, force_link_train);
825 	if (!rc)
826 		dp_display->power_on = true;
827 
828 	return rc;
829 }
830 
dp_display_post_enable(struct msm_dp * dp_display)831 static int dp_display_post_enable(struct msm_dp *dp_display)
832 {
833 	struct dp_display_private *dp;
834 	u32 rate;
835 
836 	dp = container_of(dp_display, struct dp_display_private, dp_display);
837 
838 	rate = dp->link->link_params.rate;
839 
840 	if (dp->audio_supported) {
841 		dp->audio->bw_code = drm_dp_link_rate_to_bw_code(rate);
842 		dp->audio->lane_count = dp->link->link_params.num_lanes;
843 	}
844 
845 	/* signal the connect event late to synchronize video and display */
846 	dp_display_handle_plugged_change(dp_display, true);
847 
848 	if (dp_display->psr_supported)
849 		dp_ctrl_config_psr(dp->ctrl);
850 
851 	return 0;
852 }
853 
dp_display_disable(struct dp_display_private * dp)854 static int dp_display_disable(struct dp_display_private *dp)
855 {
856 	struct msm_dp *dp_display = &dp->dp_display;
857 
858 	if (!dp_display->power_on)
859 		return 0;
860 
861 	/* wait only if audio was enabled */
862 	if (dp_display->audio_enabled) {
863 		/* signal the disconnect event */
864 		dp_display_handle_plugged_change(dp_display, false);
865 		if (!wait_for_completion_timeout(&dp->audio_comp,
866 				HZ * 5))
867 			DRM_ERROR("audio comp timeout\n");
868 	}
869 
870 	dp_display->audio_enabled = false;
871 
872 	if (dp->link->sink_count == 0) {
873 		/*
874 		 * irq_hpd with sink_count = 0
875 		 * hdmi unplugged out of dongle
876 		 */
877 		dp_ctrl_off_link_stream(dp->ctrl);
878 	} else {
879 		/*
880 		 * unplugged interrupt
881 		 * dongle unplugged out of DUT
882 		 */
883 		dp_ctrl_off(dp->ctrl);
884 		dp_display_host_phy_exit(dp);
885 	}
886 
887 	dp_display->power_on = false;
888 
889 	drm_dbg_dp(dp->drm_dev, "sink count: %d\n", dp->link->sink_count);
890 	return 0;
891 }
892 
dp_display_set_plugged_cb(struct msm_dp * dp_display,hdmi_codec_plugged_cb fn,struct device * codec_dev)893 int dp_display_set_plugged_cb(struct msm_dp *dp_display,
894 		hdmi_codec_plugged_cb fn, struct device *codec_dev)
895 {
896 	bool plugged;
897 
898 	dp_display->plugged_cb = fn;
899 	dp_display->codec_dev = codec_dev;
900 	plugged = dp_display->link_ready;
901 	dp_display_handle_plugged_change(dp_display, plugged);
902 
903 	return 0;
904 }
905 
906 /**
907  * dp_bridge_mode_valid - callback to determine if specified mode is valid
908  * @bridge: Pointer to drm bridge structure
909  * @info: display info
910  * @mode: Pointer to drm mode structure
911  * Returns: Validity status for specified mode
912  */
dp_bridge_mode_valid(struct drm_bridge * bridge,const struct drm_display_info * info,const struct drm_display_mode * mode)913 enum drm_mode_status dp_bridge_mode_valid(struct drm_bridge *bridge,
914 					  const struct drm_display_info *info,
915 					  const struct drm_display_mode *mode)
916 {
917 	const u32 num_components = 3, default_bpp = 24;
918 	struct dp_display_private *dp_display;
919 	struct dp_link_info *link_info;
920 	u32 mode_rate_khz = 0, supported_rate_khz = 0, mode_bpp = 0;
921 	struct msm_dp *dp;
922 	int mode_pclk_khz = mode->clock;
923 
924 	dp = to_dp_bridge(bridge)->dp_display;
925 
926 	if (!dp || !mode_pclk_khz || !dp->connector) {
927 		DRM_ERROR("invalid params\n");
928 		return -EINVAL;
929 	}
930 
931 	if (mode->clock > DP_MAX_PIXEL_CLK_KHZ)
932 		return MODE_CLOCK_HIGH;
933 
934 	dp_display = container_of(dp, struct dp_display_private, dp_display);
935 	link_info = &dp_display->panel->link_info;
936 
937 	if (drm_mode_is_420_only(&dp->connector->display_info, mode) &&
938 	    dp_display->panel->vsc_sdp_supported)
939 		mode_pclk_khz /= 2;
940 
941 	mode_bpp = dp->connector->display_info.bpc * num_components;
942 	if (!mode_bpp)
943 		mode_bpp = default_bpp;
944 
945 	mode_bpp = dp_panel_get_mode_bpp(dp_display->panel,
946 			mode_bpp, mode_pclk_khz);
947 
948 	mode_rate_khz = mode_pclk_khz * mode_bpp;
949 	supported_rate_khz = link_info->num_lanes * link_info->rate * 8;
950 
951 	if (mode_rate_khz > supported_rate_khz)
952 		return MODE_BAD;
953 
954 	return MODE_OK;
955 }
956 
dp_display_get_modes(struct msm_dp * dp)957 int dp_display_get_modes(struct msm_dp *dp)
958 {
959 	struct dp_display_private *dp_display;
960 
961 	if (!dp) {
962 		DRM_ERROR("invalid params\n");
963 		return 0;
964 	}
965 
966 	dp_display = container_of(dp, struct dp_display_private, dp_display);
967 
968 	return dp_panel_get_modes(dp_display->panel,
969 		dp->connector);
970 }
971 
dp_display_check_video_test(struct msm_dp * dp)972 bool dp_display_check_video_test(struct msm_dp *dp)
973 {
974 	struct dp_display_private *dp_display;
975 
976 	dp_display = container_of(dp, struct dp_display_private, dp_display);
977 
978 	return dp_display->panel->video_test;
979 }
980 
dp_display_get_test_bpp(struct msm_dp * dp)981 int dp_display_get_test_bpp(struct msm_dp *dp)
982 {
983 	struct dp_display_private *dp_display;
984 
985 	if (!dp) {
986 		DRM_ERROR("invalid params\n");
987 		return 0;
988 	}
989 
990 	dp_display = container_of(dp, struct dp_display_private, dp_display);
991 
992 	return dp_link_bit_depth_to_bpp(
993 		dp_display->link->test_video.test_bit_depth);
994 }
995 
msm_dp_snapshot(struct msm_disp_state * disp_state,struct msm_dp * dp)996 void msm_dp_snapshot(struct msm_disp_state *disp_state, struct msm_dp *dp)
997 {
998 	struct dp_display_private *dp_display;
999 
1000 	dp_display = container_of(dp, struct dp_display_private, dp_display);
1001 
1002 	/*
1003 	 * if we are reading registers we need the link clocks to be on
1004 	 * however till DP cable is connected this will not happen as we
1005 	 * do not know the resolution to power up with. Hence check the
1006 	 * power_on status before dumping DP registers to avoid crash due
1007 	 * to unclocked access
1008 	 */
1009 	mutex_lock(&dp_display->event_mutex);
1010 
1011 	if (!dp->power_on) {
1012 		mutex_unlock(&dp_display->event_mutex);
1013 		return;
1014 	}
1015 
1016 	dp_catalog_snapshot(dp_display->catalog, disp_state);
1017 
1018 	mutex_unlock(&dp_display->event_mutex);
1019 }
1020 
dp_display_set_psr(struct msm_dp * dp_display,bool enter)1021 void dp_display_set_psr(struct msm_dp *dp_display, bool enter)
1022 {
1023 	struct dp_display_private *dp;
1024 
1025 	if (!dp_display) {
1026 		DRM_ERROR("invalid params\n");
1027 		return;
1028 	}
1029 
1030 	dp = container_of(dp_display, struct dp_display_private, dp_display);
1031 	dp_ctrl_set_psr(dp->ctrl, enter);
1032 }
1033 
hpd_event_thread(void * data)1034 static int hpd_event_thread(void *data)
1035 {
1036 	struct dp_display_private *dp_priv;
1037 	unsigned long flag;
1038 	struct dp_event *todo;
1039 	int timeout_mode = 0;
1040 
1041 	dp_priv = (struct dp_display_private *)data;
1042 
1043 	while (1) {
1044 		if (timeout_mode) {
1045 			wait_event_timeout(dp_priv->event_q,
1046 				(dp_priv->event_pndx == dp_priv->event_gndx) ||
1047 					kthread_should_stop(), EVENT_TIMEOUT);
1048 		} else {
1049 			wait_event_interruptible(dp_priv->event_q,
1050 				(dp_priv->event_pndx != dp_priv->event_gndx) ||
1051 					kthread_should_stop());
1052 		}
1053 
1054 		if (kthread_should_stop())
1055 			break;
1056 
1057 		spin_lock_irqsave(&dp_priv->event_lock, flag);
1058 		todo = &dp_priv->event_list[dp_priv->event_gndx];
1059 		if (todo->delay) {
1060 			struct dp_event *todo_next;
1061 
1062 			dp_priv->event_gndx++;
1063 			dp_priv->event_gndx %= DP_EVENT_Q_MAX;
1064 
1065 			/* re enter delay event into q */
1066 			todo_next = &dp_priv->event_list[dp_priv->event_pndx++];
1067 			dp_priv->event_pndx %= DP_EVENT_Q_MAX;
1068 			todo_next->event_id = todo->event_id;
1069 			todo_next->data = todo->data;
1070 			todo_next->delay = todo->delay - 1;
1071 
1072 			/* clean up older event */
1073 			todo->event_id = EV_NO_EVENT;
1074 			todo->delay = 0;
1075 
1076 			/* switch to timeout mode */
1077 			timeout_mode = 1;
1078 			spin_unlock_irqrestore(&dp_priv->event_lock, flag);
1079 			continue;
1080 		}
1081 
1082 		/* timeout with no events in q */
1083 		if (dp_priv->event_pndx == dp_priv->event_gndx) {
1084 			spin_unlock_irqrestore(&dp_priv->event_lock, flag);
1085 			continue;
1086 		}
1087 
1088 		dp_priv->event_gndx++;
1089 		dp_priv->event_gndx %= DP_EVENT_Q_MAX;
1090 		timeout_mode = 0;
1091 		spin_unlock_irqrestore(&dp_priv->event_lock, flag);
1092 
1093 		switch (todo->event_id) {
1094 		case EV_HPD_PLUG_INT:
1095 			dp_hpd_plug_handle(dp_priv, todo->data);
1096 			break;
1097 		case EV_HPD_UNPLUG_INT:
1098 			dp_hpd_unplug_handle(dp_priv, todo->data);
1099 			break;
1100 		case EV_IRQ_HPD_INT:
1101 			dp_irq_hpd_handle(dp_priv, todo->data);
1102 			break;
1103 		case EV_USER_NOTIFICATION:
1104 			dp_display_send_hpd_notification(dp_priv,
1105 						todo->data);
1106 			break;
1107 		default:
1108 			break;
1109 		}
1110 	}
1111 
1112 	return 0;
1113 }
1114 
dp_hpd_event_thread_start(struct dp_display_private * dp_priv)1115 static int dp_hpd_event_thread_start(struct dp_display_private *dp_priv)
1116 {
1117 	/* set event q to empty */
1118 	dp_priv->event_gndx = 0;
1119 	dp_priv->event_pndx = 0;
1120 
1121 	dp_priv->ev_tsk = kthread_run(hpd_event_thread, dp_priv, "dp_hpd_handler");
1122 	if (IS_ERR(dp_priv->ev_tsk))
1123 		return PTR_ERR(dp_priv->ev_tsk);
1124 
1125 	return 0;
1126 }
1127 
dp_display_irq_handler(int irq,void * dev_id)1128 static irqreturn_t dp_display_irq_handler(int irq, void *dev_id)
1129 {
1130 	struct dp_display_private *dp = dev_id;
1131 	irqreturn_t ret = IRQ_NONE;
1132 	u32 hpd_isr_status;
1133 
1134 	if (!dp) {
1135 		DRM_ERROR("invalid data\n");
1136 		return IRQ_NONE;
1137 	}
1138 
1139 	hpd_isr_status = dp_catalog_hpd_get_intr_status(dp->catalog);
1140 
1141 	if (hpd_isr_status & 0x0F) {
1142 		drm_dbg_dp(dp->drm_dev, "type=%d isr=0x%x\n",
1143 			dp->dp_display.connector_type, hpd_isr_status);
1144 		/* hpd related interrupts */
1145 		if (hpd_isr_status & DP_DP_HPD_PLUG_INT_MASK)
1146 			dp_add_event(dp, EV_HPD_PLUG_INT, 0, 0);
1147 
1148 		if (hpd_isr_status & DP_DP_IRQ_HPD_INT_MASK) {
1149 			dp_add_event(dp, EV_IRQ_HPD_INT, 0, 0);
1150 		}
1151 
1152 		if (hpd_isr_status & DP_DP_HPD_REPLUG_INT_MASK) {
1153 			dp_add_event(dp, EV_HPD_UNPLUG_INT, 0, 0);
1154 			dp_add_event(dp, EV_HPD_PLUG_INT, 0, 3);
1155 		}
1156 
1157 		if (hpd_isr_status & DP_DP_HPD_UNPLUG_INT_MASK)
1158 			dp_add_event(dp, EV_HPD_UNPLUG_INT, 0, 0);
1159 
1160 		ret = IRQ_HANDLED;
1161 	}
1162 
1163 	/* DP controller isr */
1164 	ret |= dp_ctrl_isr(dp->ctrl);
1165 
1166 	/* DP aux isr */
1167 	ret |= dp_aux_isr(dp->aux);
1168 
1169 	return ret;
1170 }
1171 
dp_display_request_irq(struct dp_display_private * dp)1172 static int dp_display_request_irq(struct dp_display_private *dp)
1173 {
1174 	int rc = 0;
1175 	struct platform_device *pdev = dp->dp_display.pdev;
1176 
1177 	dp->irq = platform_get_irq(pdev, 0);
1178 	if (dp->irq < 0) {
1179 		DRM_ERROR("failed to get irq\n");
1180 		return dp->irq;
1181 	}
1182 
1183 	rc = devm_request_irq(&pdev->dev, dp->irq, dp_display_irq_handler,
1184 			      IRQF_TRIGGER_HIGH|IRQF_NO_AUTOEN,
1185 			      "dp_display_isr", dp);
1186 
1187 	if (rc < 0) {
1188 		DRM_ERROR("failed to request IRQ%u: %d\n",
1189 				dp->irq, rc);
1190 		return rc;
1191 	}
1192 
1193 	return 0;
1194 }
1195 
dp_display_get_desc(struct platform_device * pdev)1196 static const struct msm_dp_desc *dp_display_get_desc(struct platform_device *pdev)
1197 {
1198 	const struct msm_dp_desc *descs = of_device_get_match_data(&pdev->dev);
1199 	struct resource *res;
1200 	int i;
1201 
1202 	res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
1203 	if (!res)
1204 		return NULL;
1205 
1206 	for (i = 0; i < descs[i].io_start; i++) {
1207 		if (descs[i].io_start == res->start)
1208 			return &descs[i];
1209 	}
1210 
1211 	dev_err(&pdev->dev, "unknown displayport instance\n");
1212 	return NULL;
1213 }
1214 
dp_display_probe_tail(struct device * dev)1215 static int dp_display_probe_tail(struct device *dev)
1216 {
1217 	struct msm_dp *dp = dev_get_drvdata(dev);
1218 	int ret;
1219 
1220 	/*
1221 	 * External bridges are mandatory for eDP interfaces: one has to
1222 	 * provide at least an eDP panel (which gets wrapped into panel-bridge).
1223 	 *
1224 	 * For DisplayPort interfaces external bridges are optional, so
1225 	 * silently ignore an error if one is not present (-ENODEV).
1226 	 */
1227 	dp->next_bridge = devm_drm_of_get_bridge(&dp->pdev->dev, dp->pdev->dev.of_node, 1, 0);
1228 	if (IS_ERR(dp->next_bridge)) {
1229 		ret = PTR_ERR(dp->next_bridge);
1230 		dp->next_bridge = NULL;
1231 		if (dp->is_edp || ret != -ENODEV)
1232 			return ret;
1233 	}
1234 
1235 	ret = component_add(dev, &dp_display_comp_ops);
1236 	if (ret)
1237 		DRM_ERROR("component add failed, rc=%d\n", ret);
1238 
1239 	return ret;
1240 }
1241 
dp_auxbus_done_probe(struct drm_dp_aux * aux)1242 static int dp_auxbus_done_probe(struct drm_dp_aux *aux)
1243 {
1244 	return dp_display_probe_tail(aux->dev);
1245 }
1246 
dp_display_get_connector_type(struct platform_device * pdev,const struct msm_dp_desc * desc)1247 static int dp_display_get_connector_type(struct platform_device *pdev,
1248 					 const struct msm_dp_desc *desc)
1249 {
1250 	struct device_node *node = pdev->dev.of_node;
1251 	struct device_node *aux_bus = of_get_child_by_name(node, "aux-bus");
1252 	struct device_node *panel = of_get_child_by_name(aux_bus, "panel");
1253 	int connector_type;
1254 
1255 	if (panel)
1256 		connector_type = DRM_MODE_CONNECTOR_eDP;
1257 	else
1258 		connector_type = DRM_MODE_SUBCONNECTOR_DisplayPort;
1259 
1260 	of_node_put(panel);
1261 	of_node_put(aux_bus);
1262 
1263 	return connector_type;
1264 }
1265 
dp_display_probe(struct platform_device * pdev)1266 static int dp_display_probe(struct platform_device *pdev)
1267 {
1268 	int rc = 0;
1269 	struct dp_display_private *dp;
1270 	const struct msm_dp_desc *desc;
1271 
1272 	if (!pdev || !pdev->dev.of_node) {
1273 		DRM_ERROR("pdev not found\n");
1274 		return -ENODEV;
1275 	}
1276 
1277 	dp = devm_kzalloc(&pdev->dev, sizeof(*dp), GFP_KERNEL);
1278 	if (!dp)
1279 		return -ENOMEM;
1280 
1281 	desc = dp_display_get_desc(pdev);
1282 	if (!desc)
1283 		return -EINVAL;
1284 
1285 	dp->dp_display.pdev = pdev;
1286 	dp->id = desc->id;
1287 	dp->dp_display.connector_type = dp_display_get_connector_type(pdev, desc);
1288 	dp->wide_bus_supported = desc->wide_bus_supported;
1289 	dp->dp_display.is_edp =
1290 		(dp->dp_display.connector_type == DRM_MODE_CONNECTOR_eDP);
1291 
1292 	rc = dp_init_sub_modules(dp);
1293 	if (rc) {
1294 		DRM_ERROR("init sub module failed\n");
1295 		return -EPROBE_DEFER;
1296 	}
1297 
1298 	/* setup event q */
1299 	mutex_init(&dp->event_mutex);
1300 	init_waitqueue_head(&dp->event_q);
1301 	spin_lock_init(&dp->event_lock);
1302 
1303 	/* Store DP audio handle inside DP display */
1304 	dp->dp_display.dp_audio = dp->audio;
1305 
1306 	init_completion(&dp->audio_comp);
1307 
1308 	platform_set_drvdata(pdev, &dp->dp_display);
1309 
1310 	rc = devm_pm_runtime_enable(&pdev->dev);
1311 	if (rc)
1312 		goto err;
1313 
1314 	rc = dp_display_request_irq(dp);
1315 	if (rc)
1316 		goto err;
1317 
1318 	if (dp->dp_display.is_edp) {
1319 		rc = devm_of_dp_aux_populate_bus(dp->aux, dp_auxbus_done_probe);
1320 		if (rc) {
1321 			DRM_ERROR("eDP auxbus population failed, rc=%d\n", rc);
1322 			goto err;
1323 		}
1324 	} else {
1325 		rc = dp_display_probe_tail(&pdev->dev);
1326 		if (rc)
1327 			goto err;
1328 	}
1329 
1330 	return rc;
1331 
1332 err:
1333 	dp_display_deinit_sub_modules(dp);
1334 	return rc;
1335 }
1336 
dp_display_remove(struct platform_device * pdev)1337 static void dp_display_remove(struct platform_device *pdev)
1338 {
1339 	struct dp_display_private *dp = dev_get_dp_display_private(&pdev->dev);
1340 
1341 	component_del(&pdev->dev, &dp_display_comp_ops);
1342 	dp_display_deinit_sub_modules(dp);
1343 	platform_set_drvdata(pdev, NULL);
1344 }
1345 
dp_pm_runtime_suspend(struct device * dev)1346 static int dp_pm_runtime_suspend(struct device *dev)
1347 {
1348 	struct dp_display_private *dp = dev_get_dp_display_private(dev);
1349 
1350 	disable_irq(dp->irq);
1351 
1352 	if (dp->dp_display.is_edp) {
1353 		dp_display_host_phy_exit(dp);
1354 		dp_catalog_ctrl_hpd_disable(dp->catalog);
1355 	}
1356 	dp_display_host_deinit(dp);
1357 
1358 	return 0;
1359 }
1360 
dp_pm_runtime_resume(struct device * dev)1361 static int dp_pm_runtime_resume(struct device *dev)
1362 {
1363 	struct dp_display_private *dp = dev_get_dp_display_private(dev);
1364 
1365 	/*
1366 	 * for eDP, host cotroller, HPD block and PHY are enabled here
1367 	 * but with HPD irq disabled
1368 	 *
1369 	 * for DP, only host controller is enabled here.
1370 	 * HPD block is enabled at dp_bridge_hpd_enable()
1371 	 * PHY will be enabled at plugin handler later
1372 	 */
1373 	dp_display_host_init(dp);
1374 	if (dp->dp_display.is_edp) {
1375 		dp_catalog_ctrl_hpd_enable(dp->catalog);
1376 		dp_display_host_phy_init(dp);
1377 	}
1378 
1379 	enable_irq(dp->irq);
1380 	return 0;
1381 }
1382 
1383 static const struct dev_pm_ops dp_pm_ops = {
1384 	SET_RUNTIME_PM_OPS(dp_pm_runtime_suspend, dp_pm_runtime_resume, NULL)
1385 	SET_SYSTEM_SLEEP_PM_OPS(pm_runtime_force_suspend,
1386 				pm_runtime_force_resume)
1387 };
1388 
1389 static struct platform_driver dp_display_driver = {
1390 	.probe  = dp_display_probe,
1391 	.remove_new = dp_display_remove,
1392 	.driver = {
1393 		.name = "msm-dp-display",
1394 		.of_match_table = dp_dt_match,
1395 		.suppress_bind_attrs = true,
1396 		.pm = &dp_pm_ops,
1397 	},
1398 };
1399 
msm_dp_register(void)1400 int __init msm_dp_register(void)
1401 {
1402 	int ret;
1403 
1404 	ret = platform_driver_register(&dp_display_driver);
1405 	if (ret)
1406 		DRM_ERROR("Dp display driver register failed");
1407 
1408 	return ret;
1409 }
1410 
msm_dp_unregister(void)1411 void __exit msm_dp_unregister(void)
1412 {
1413 	platform_driver_unregister(&dp_display_driver);
1414 }
1415 
msm_dp_is_yuv_420_enabled(const struct msm_dp * dp_display,const struct drm_display_mode * mode)1416 bool msm_dp_is_yuv_420_enabled(const struct msm_dp *dp_display,
1417 			       const struct drm_display_mode *mode)
1418 {
1419 	struct dp_display_private *dp;
1420 	const struct drm_display_info *info;
1421 
1422 	dp = container_of(dp_display, struct dp_display_private, dp_display);
1423 	info = &dp_display->connector->display_info;
1424 
1425 	return dp->panel->vsc_sdp_supported && drm_mode_is_420_only(info, mode);
1426 }
1427 
msm_dp_needs_periph_flush(const struct msm_dp * dp_display,const struct drm_display_mode * mode)1428 bool msm_dp_needs_periph_flush(const struct msm_dp *dp_display,
1429 			       const struct drm_display_mode *mode)
1430 {
1431 	return msm_dp_is_yuv_420_enabled(dp_display, mode);
1432 }
1433 
msm_dp_wide_bus_available(const struct msm_dp * dp_display)1434 bool msm_dp_wide_bus_available(const struct msm_dp *dp_display)
1435 {
1436 	struct dp_display_private *dp;
1437 
1438 	dp = container_of(dp_display, struct dp_display_private, dp_display);
1439 
1440 	if (dp->dp_mode.out_fmt_is_yuv_420)
1441 		return false;
1442 
1443 	return dp->wide_bus_supported;
1444 }
1445 
dp_display_debugfs_init(struct msm_dp * dp_display,struct dentry * root,bool is_edp)1446 void dp_display_debugfs_init(struct msm_dp *dp_display, struct dentry *root, bool is_edp)
1447 {
1448 	struct dp_display_private *dp;
1449 	struct device *dev;
1450 	int rc;
1451 
1452 	dp = container_of(dp_display, struct dp_display_private, dp_display);
1453 	dev = &dp->dp_display.pdev->dev;
1454 
1455 	rc = dp_debug_init(dev, dp->panel, dp->link, dp->dp_display.connector, root, is_edp);
1456 	if (rc)
1457 		DRM_ERROR("failed to initialize debug, rc = %d\n", rc);
1458 }
1459 
msm_dp_modeset_init(struct msm_dp * dp_display,struct drm_device * dev,struct drm_encoder * encoder,bool yuv_supported)1460 int msm_dp_modeset_init(struct msm_dp *dp_display, struct drm_device *dev,
1461 			struct drm_encoder *encoder, bool yuv_supported)
1462 {
1463 	struct dp_display_private *dp_priv;
1464 	int ret;
1465 
1466 	dp_display->drm_dev = dev;
1467 
1468 	dp_priv = container_of(dp_display, struct dp_display_private, dp_display);
1469 
1470 	ret = dp_bridge_init(dp_display, dev, encoder);
1471 	if (ret) {
1472 		DRM_DEV_ERROR(dev->dev,
1473 			"failed to create dp bridge: %d\n", ret);
1474 		return ret;
1475 	}
1476 
1477 	dp_display->connector = dp_drm_connector_init(dp_display, encoder, yuv_supported);
1478 	if (IS_ERR(dp_display->connector)) {
1479 		ret = PTR_ERR(dp_display->connector);
1480 		DRM_DEV_ERROR(dev->dev,
1481 			"failed to create dp connector: %d\n", ret);
1482 		dp_display->connector = NULL;
1483 		return ret;
1484 	}
1485 
1486 	dp_priv->panel->connector = dp_display->connector;
1487 
1488 	return 0;
1489 }
1490 
dp_bridge_atomic_enable(struct drm_bridge * drm_bridge,struct drm_bridge_state * old_bridge_state)1491 void dp_bridge_atomic_enable(struct drm_bridge *drm_bridge,
1492 			     struct drm_bridge_state *old_bridge_state)
1493 {
1494 	struct msm_dp_bridge *dp_bridge = to_dp_bridge(drm_bridge);
1495 	struct msm_dp *dp = dp_bridge->dp_display;
1496 	int rc = 0;
1497 	struct dp_display_private *dp_display;
1498 	u32 state;
1499 	bool force_link_train = false;
1500 
1501 	dp_display = container_of(dp, struct dp_display_private, dp_display);
1502 	if (!dp_display->dp_mode.drm_mode.clock) {
1503 		DRM_ERROR("invalid params\n");
1504 		return;
1505 	}
1506 
1507 	if (dp->is_edp)
1508 		dp_hpd_plug_handle(dp_display, 0);
1509 
1510 	mutex_lock(&dp_display->event_mutex);
1511 	if (pm_runtime_resume_and_get(&dp->pdev->dev)) {
1512 		DRM_ERROR("failed to pm_runtime_resume\n");
1513 		mutex_unlock(&dp_display->event_mutex);
1514 		return;
1515 	}
1516 
1517 	state = dp_display->hpd_state;
1518 	if (state != ST_DISPLAY_OFF && state != ST_MAINLINK_READY) {
1519 		mutex_unlock(&dp_display->event_mutex);
1520 		return;
1521 	}
1522 
1523 	rc = dp_display_set_mode(dp, &dp_display->dp_mode);
1524 	if (rc) {
1525 		DRM_ERROR("Failed to perform a mode set, rc=%d\n", rc);
1526 		mutex_unlock(&dp_display->event_mutex);
1527 		return;
1528 	}
1529 
1530 	state =  dp_display->hpd_state;
1531 
1532 	if (state == ST_DISPLAY_OFF) {
1533 		dp_display_host_phy_init(dp_display);
1534 		force_link_train = true;
1535 	}
1536 
1537 	dp_display_enable(dp_display, force_link_train);
1538 
1539 	rc = dp_display_post_enable(dp);
1540 	if (rc) {
1541 		DRM_ERROR("DP display post enable failed, rc=%d\n", rc);
1542 		dp_display_disable(dp_display);
1543 	}
1544 
1545 	/* completed connection */
1546 	dp_display->hpd_state = ST_CONNECTED;
1547 
1548 	drm_dbg_dp(dp->drm_dev, "type=%d Done\n", dp->connector_type);
1549 	mutex_unlock(&dp_display->event_mutex);
1550 }
1551 
dp_bridge_atomic_disable(struct drm_bridge * drm_bridge,struct drm_bridge_state * old_bridge_state)1552 void dp_bridge_atomic_disable(struct drm_bridge *drm_bridge,
1553 			      struct drm_bridge_state *old_bridge_state)
1554 {
1555 	struct msm_dp_bridge *dp_bridge = to_dp_bridge(drm_bridge);
1556 	struct msm_dp *dp = dp_bridge->dp_display;
1557 	struct dp_display_private *dp_display;
1558 
1559 	dp_display = container_of(dp, struct dp_display_private, dp_display);
1560 
1561 	dp_ctrl_push_idle(dp_display->ctrl);
1562 }
1563 
dp_bridge_atomic_post_disable(struct drm_bridge * drm_bridge,struct drm_bridge_state * old_bridge_state)1564 void dp_bridge_atomic_post_disable(struct drm_bridge *drm_bridge,
1565 				   struct drm_bridge_state *old_bridge_state)
1566 {
1567 	struct msm_dp_bridge *dp_bridge = to_dp_bridge(drm_bridge);
1568 	struct msm_dp *dp = dp_bridge->dp_display;
1569 	u32 state;
1570 	struct dp_display_private *dp_display;
1571 
1572 	dp_display = container_of(dp, struct dp_display_private, dp_display);
1573 
1574 	if (dp->is_edp)
1575 		dp_hpd_unplug_handle(dp_display, 0);
1576 
1577 	mutex_lock(&dp_display->event_mutex);
1578 
1579 	state = dp_display->hpd_state;
1580 	if (state != ST_DISCONNECT_PENDING && state != ST_CONNECTED)
1581 		drm_dbg_dp(dp->drm_dev, "type=%d wrong hpd_state=%d\n",
1582 			   dp->connector_type, state);
1583 
1584 	dp_display_disable(dp_display);
1585 
1586 	state =  dp_display->hpd_state;
1587 	if (state == ST_DISCONNECT_PENDING) {
1588 		/* completed disconnection */
1589 		dp_display->hpd_state = ST_DISCONNECTED;
1590 	} else {
1591 		dp_display->hpd_state = ST_DISPLAY_OFF;
1592 	}
1593 
1594 	drm_dbg_dp(dp->drm_dev, "type=%d Done\n", dp->connector_type);
1595 
1596 	pm_runtime_put_sync(&dp->pdev->dev);
1597 	mutex_unlock(&dp_display->event_mutex);
1598 }
1599 
dp_bridge_mode_set(struct drm_bridge * drm_bridge,const struct drm_display_mode * mode,const struct drm_display_mode * adjusted_mode)1600 void dp_bridge_mode_set(struct drm_bridge *drm_bridge,
1601 			const struct drm_display_mode *mode,
1602 			const struct drm_display_mode *adjusted_mode)
1603 {
1604 	struct msm_dp_bridge *dp_bridge = to_dp_bridge(drm_bridge);
1605 	struct msm_dp *dp = dp_bridge->dp_display;
1606 	struct dp_display_private *dp_display;
1607 	struct dp_panel *dp_panel;
1608 
1609 	dp_display = container_of(dp, struct dp_display_private, dp_display);
1610 	dp_panel = dp_display->panel;
1611 
1612 	memset(&dp_display->dp_mode, 0x0, sizeof(struct dp_display_mode));
1613 
1614 	if (dp_display_check_video_test(dp))
1615 		dp_display->dp_mode.bpp = dp_display_get_test_bpp(dp);
1616 	else /* Default num_components per pixel = 3 */
1617 		dp_display->dp_mode.bpp = dp->connector->display_info.bpc * 3;
1618 
1619 	if (!dp_display->dp_mode.bpp)
1620 		dp_display->dp_mode.bpp = 24; /* Default bpp */
1621 
1622 	drm_mode_copy(&dp_display->dp_mode.drm_mode, adjusted_mode);
1623 
1624 	dp_display->dp_mode.v_active_low =
1625 		!!(dp_display->dp_mode.drm_mode.flags & DRM_MODE_FLAG_NVSYNC);
1626 
1627 	dp_display->dp_mode.h_active_low =
1628 		!!(dp_display->dp_mode.drm_mode.flags & DRM_MODE_FLAG_NHSYNC);
1629 
1630 	dp_display->dp_mode.out_fmt_is_yuv_420 =
1631 		drm_mode_is_420_only(&dp->connector->display_info, adjusted_mode) &&
1632 		dp_panel->vsc_sdp_supported;
1633 
1634 	/* populate wide_bus_support to different layers */
1635 	dp_display->ctrl->wide_bus_en =
1636 		dp_display->dp_mode.out_fmt_is_yuv_420 ? false : dp_display->wide_bus_supported;
1637 	dp_display->catalog->wide_bus_en =
1638 		dp_display->dp_mode.out_fmt_is_yuv_420 ? false : dp_display->wide_bus_supported;
1639 }
1640 
dp_bridge_hpd_enable(struct drm_bridge * bridge)1641 void dp_bridge_hpd_enable(struct drm_bridge *bridge)
1642 {
1643 	struct msm_dp_bridge *dp_bridge = to_dp_bridge(bridge);
1644 	struct msm_dp *dp_display = dp_bridge->dp_display;
1645 	struct dp_display_private *dp = container_of(dp_display, struct dp_display_private, dp_display);
1646 
1647 	/*
1648 	 * this is for external DP with hpd irq enabled case,
1649 	 * step-1: dp_pm_runtime_resume() enable dp host only
1650 	 * step-2: enable hdp block and have hpd irq enabled here
1651 	 * step-3: waiting for plugin irq while phy is not initialized
1652 	 * step-4: DP PHY is initialized at plugin handler before link training
1653 	 *
1654 	 */
1655 	mutex_lock(&dp->event_mutex);
1656 	if (pm_runtime_resume_and_get(&dp_display->pdev->dev)) {
1657 		DRM_ERROR("failed to resume power\n");
1658 		mutex_unlock(&dp->event_mutex);
1659 		return;
1660 	}
1661 
1662 	dp_catalog_ctrl_hpd_enable(dp->catalog);
1663 
1664 	/* enable HDP interrupts */
1665 	dp_catalog_hpd_config_intr(dp->catalog, DP_DP_HPD_INT_MASK, true);
1666 
1667 	dp_display->internal_hpd = true;
1668 	mutex_unlock(&dp->event_mutex);
1669 }
1670 
dp_bridge_hpd_disable(struct drm_bridge * bridge)1671 void dp_bridge_hpd_disable(struct drm_bridge *bridge)
1672 {
1673 	struct msm_dp_bridge *dp_bridge = to_dp_bridge(bridge);
1674 	struct msm_dp *dp_display = dp_bridge->dp_display;
1675 	struct dp_display_private *dp = container_of(dp_display, struct dp_display_private, dp_display);
1676 
1677 	mutex_lock(&dp->event_mutex);
1678 	/* disable HDP interrupts */
1679 	dp_catalog_hpd_config_intr(dp->catalog, DP_DP_HPD_INT_MASK, false);
1680 	dp_catalog_ctrl_hpd_disable(dp->catalog);
1681 
1682 	dp_display->internal_hpd = false;
1683 
1684 	pm_runtime_put_sync(&dp_display->pdev->dev);
1685 	mutex_unlock(&dp->event_mutex);
1686 }
1687 
dp_bridge_hpd_notify(struct drm_bridge * bridge,enum drm_connector_status status)1688 void dp_bridge_hpd_notify(struct drm_bridge *bridge,
1689 			  enum drm_connector_status status)
1690 {
1691 	struct msm_dp_bridge *dp_bridge = to_dp_bridge(bridge);
1692 	struct msm_dp *dp_display = dp_bridge->dp_display;
1693 	struct dp_display_private *dp = container_of(dp_display, struct dp_display_private, dp_display);
1694 
1695 	/* Without next_bridge interrupts are handled by the DP core directly */
1696 	if (dp_display->internal_hpd)
1697 		return;
1698 
1699 	if (!dp_display->link_ready && status == connector_status_connected)
1700 		dp_add_event(dp, EV_HPD_PLUG_INT, 0, 0);
1701 	else if (dp_display->link_ready && status == connector_status_disconnected)
1702 		dp_add_event(dp, EV_HPD_UNPLUG_INT, 0, 0);
1703 }
1704