1 // SPDX-License-Identifier: GPL-2.0-only 2 /* 3 * Copyright (c) 2017-2020, The Linux Foundation. All rights reserved. 4 */ 5 6 #include <linux/module.h> 7 #include <linux/slab.h> 8 #include <linux/uaccess.h> 9 #include <linux/debugfs.h> 10 #include <linux/component.h> 11 #include <linux/of_irq.h> 12 #include <linux/phy/phy.h> 13 #include <linux/delay.h> 14 #include <drm/display/drm_dp_aux_bus.h> 15 #include <drm/drm_edid.h> 16 17 #include "msm_drv.h" 18 #include "msm_kms.h" 19 #include "dp_ctrl.h" 20 #include "dp_catalog.h" 21 #include "dp_aux.h" 22 #include "dp_reg.h" 23 #include "dp_link.h" 24 #include "dp_panel.h" 25 #include "dp_display.h" 26 #include "dp_drm.h" 27 #include "dp_audio.h" 28 #include "dp_debug.h" 29 30 static bool psr_enabled = false; 31 module_param(psr_enabled, bool, 0); 32 MODULE_PARM_DESC(psr_enabled, "enable PSR for eDP and DP displays"); 33 34 #define HPD_STRING_SIZE 30 35 36 enum { 37 ISR_DISCONNECTED, 38 ISR_CONNECT_PENDING, 39 ISR_CONNECTED, 40 ISR_HPD_REPLUG_COUNT, 41 ISR_IRQ_HPD_PULSE_COUNT, 42 ISR_HPD_LO_GLITH_COUNT, 43 }; 44 45 /* event thread connection state */ 46 enum { 47 ST_DISCONNECTED, 48 ST_MAINLINK_READY, 49 ST_CONNECTED, 50 ST_DISCONNECT_PENDING, 51 ST_DISPLAY_OFF, 52 }; 53 54 enum { 55 EV_NO_EVENT, 56 /* hpd events */ 57 EV_HPD_PLUG_INT, 58 EV_IRQ_HPD_INT, 59 EV_HPD_UNPLUG_INT, 60 EV_USER_NOTIFICATION, 61 }; 62 63 #define EVENT_TIMEOUT (HZ/10) /* 100ms */ 64 #define DP_EVENT_Q_MAX 8 65 66 #define DP_TIMEOUT_NONE 0 67 68 #define WAIT_FOR_RESUME_TIMEOUT_JIFFIES (HZ / 2) 69 70 struct dp_event { 71 u32 event_id; 72 u32 data; 73 u32 delay; 74 }; 75 76 struct dp_display_private { 77 char *name; 78 int irq; 79 80 unsigned int id; 81 82 /* state variables */ 83 bool core_initialized; 84 bool phy_initialized; 85 bool hpd_irq_on; 86 bool audio_supported; 87 88 struct drm_device *drm_dev; 89 struct dentry *root; 90 91 struct dp_catalog *catalog; 92 struct drm_dp_aux *aux; 93 struct dp_link *link; 94 struct dp_panel *panel; 95 struct dp_ctrl *ctrl; 96 struct dp_debug *debug; 97 98 struct dp_display_mode dp_mode; 99 struct msm_dp dp_display; 100 101 /* wait for audio signaling */ 102 struct completion audio_comp; 103 104 /* event related only access by event thread */ 105 struct mutex event_mutex; 106 wait_queue_head_t event_q; 107 u32 hpd_state; 108 u32 event_pndx; 109 u32 event_gndx; 110 struct task_struct *ev_tsk; 111 struct dp_event event_list[DP_EVENT_Q_MAX]; 112 spinlock_t event_lock; 113 114 bool wide_bus_supported; 115 116 struct dp_audio *audio; 117 }; 118 119 struct msm_dp_desc { 120 phys_addr_t io_start; 121 unsigned int id; 122 unsigned int connector_type; 123 bool wide_bus_supported; 124 }; 125 126 static const struct msm_dp_desc sc7180_dp_descs[] = { 127 { .io_start = 0x0ae90000, .id = MSM_DP_CONTROLLER_0, .connector_type = DRM_MODE_CONNECTOR_DisplayPort }, 128 {} 129 }; 130 131 static const struct msm_dp_desc sc7280_dp_descs[] = { 132 { .io_start = 0x0ae90000, .id = MSM_DP_CONTROLLER_0, .connector_type = DRM_MODE_CONNECTOR_DisplayPort, .wide_bus_supported = true }, 133 { .io_start = 0x0aea0000, .id = MSM_DP_CONTROLLER_1, .connector_type = DRM_MODE_CONNECTOR_eDP, .wide_bus_supported = true }, 134 {} 135 }; 136 137 static const struct msm_dp_desc sc8180x_dp_descs[] = { 138 { .io_start = 0x0ae90000, .id = MSM_DP_CONTROLLER_0, .connector_type = DRM_MODE_CONNECTOR_DisplayPort }, 139 { .io_start = 0x0ae98000, .id = MSM_DP_CONTROLLER_1, .connector_type = DRM_MODE_CONNECTOR_DisplayPort }, 140 { .io_start = 0x0ae9a000, .id = MSM_DP_CONTROLLER_2, .connector_type = DRM_MODE_CONNECTOR_eDP }, 141 {} 142 }; 143 144 static const struct msm_dp_desc sc8280xp_dp_descs[] = { 145 { .io_start = 0x0ae90000, .id = MSM_DP_CONTROLLER_0, .connector_type = DRM_MODE_CONNECTOR_DisplayPort, .wide_bus_supported = true }, 146 { .io_start = 0x0ae98000, .id = MSM_DP_CONTROLLER_1, .connector_type = DRM_MODE_CONNECTOR_DisplayPort, .wide_bus_supported = true }, 147 { .io_start = 0x0ae9a000, .id = MSM_DP_CONTROLLER_2, .connector_type = DRM_MODE_CONNECTOR_DisplayPort, .wide_bus_supported = true }, 148 { .io_start = 0x0aea0000, .id = MSM_DP_CONTROLLER_3, .connector_type = DRM_MODE_CONNECTOR_DisplayPort, .wide_bus_supported = true }, 149 { .io_start = 0x22090000, .id = MSM_DP_CONTROLLER_0, .connector_type = DRM_MODE_CONNECTOR_DisplayPort, .wide_bus_supported = true }, 150 { .io_start = 0x22098000, .id = MSM_DP_CONTROLLER_1, .connector_type = DRM_MODE_CONNECTOR_DisplayPort, .wide_bus_supported = true }, 151 { .io_start = 0x2209a000, .id = MSM_DP_CONTROLLER_2, .connector_type = DRM_MODE_CONNECTOR_DisplayPort, .wide_bus_supported = true }, 152 { .io_start = 0x220a0000, .id = MSM_DP_CONTROLLER_3, .connector_type = DRM_MODE_CONNECTOR_DisplayPort, .wide_bus_supported = true }, 153 {} 154 }; 155 156 static const struct msm_dp_desc sc8280xp_edp_descs[] = { 157 { .io_start = 0x0ae9a000, .id = MSM_DP_CONTROLLER_2, .connector_type = DRM_MODE_CONNECTOR_eDP, .wide_bus_supported = true }, 158 { .io_start = 0x0aea0000, .id = MSM_DP_CONTROLLER_3, .connector_type = DRM_MODE_CONNECTOR_eDP, .wide_bus_supported = true }, 159 { .io_start = 0x2209a000, .id = MSM_DP_CONTROLLER_2, .connector_type = DRM_MODE_CONNECTOR_eDP, .wide_bus_supported = true }, 160 { .io_start = 0x220a0000, .id = MSM_DP_CONTROLLER_3, .connector_type = DRM_MODE_CONNECTOR_eDP, .wide_bus_supported = true }, 161 {} 162 }; 163 164 static const struct msm_dp_desc sm8350_dp_descs[] = { 165 { .io_start = 0x0ae90000, .id = MSM_DP_CONTROLLER_0, .connector_type = DRM_MODE_CONNECTOR_DisplayPort }, 166 {} 167 }; 168 169 static const struct msm_dp_desc sm8650_dp_descs[] = { 170 { .io_start = 0x0af54000, .id = MSM_DP_CONTROLLER_0, .connector_type = DRM_MODE_CONNECTOR_DisplayPort }, 171 {} 172 }; 173 174 static const struct of_device_id dp_dt_match[] = { 175 { .compatible = "qcom,sc7180-dp", .data = &sc7180_dp_descs }, 176 { .compatible = "qcom,sc7280-dp", .data = &sc7280_dp_descs }, 177 { .compatible = "qcom,sc7280-edp", .data = &sc7280_dp_descs }, 178 { .compatible = "qcom,sc8180x-dp", .data = &sc8180x_dp_descs }, 179 { .compatible = "qcom,sc8180x-edp", .data = &sc8180x_dp_descs }, 180 { .compatible = "qcom,sc8280xp-dp", .data = &sc8280xp_dp_descs }, 181 { .compatible = "qcom,sc8280xp-edp", .data = &sc8280xp_edp_descs }, 182 { .compatible = "qcom,sdm845-dp", .data = &sc7180_dp_descs }, 183 { .compatible = "qcom,sm8350-dp", .data = &sm8350_dp_descs }, 184 { .compatible = "qcom,sm8650-dp", .data = &sm8650_dp_descs }, 185 {} 186 }; 187 188 static struct dp_display_private *dev_get_dp_display_private(struct device *dev) 189 { 190 struct msm_dp *dp = dev_get_drvdata(dev); 191 192 return container_of(dp, struct dp_display_private, dp_display); 193 } 194 195 static int dp_add_event(struct dp_display_private *dp_priv, u32 event, 196 u32 data, u32 delay) 197 { 198 unsigned long flag; 199 struct dp_event *todo; 200 int pndx; 201 202 spin_lock_irqsave(&dp_priv->event_lock, flag); 203 pndx = dp_priv->event_pndx + 1; 204 pndx %= DP_EVENT_Q_MAX; 205 if (pndx == dp_priv->event_gndx) { 206 pr_err("event_q is full: pndx=%d gndx=%d\n", 207 dp_priv->event_pndx, dp_priv->event_gndx); 208 spin_unlock_irqrestore(&dp_priv->event_lock, flag); 209 return -EPERM; 210 } 211 todo = &dp_priv->event_list[dp_priv->event_pndx++]; 212 dp_priv->event_pndx %= DP_EVENT_Q_MAX; 213 todo->event_id = event; 214 todo->data = data; 215 todo->delay = delay; 216 wake_up(&dp_priv->event_q); 217 spin_unlock_irqrestore(&dp_priv->event_lock, flag); 218 219 return 0; 220 } 221 222 static int dp_del_event(struct dp_display_private *dp_priv, u32 event) 223 { 224 unsigned long flag; 225 struct dp_event *todo; 226 u32 gndx; 227 228 spin_lock_irqsave(&dp_priv->event_lock, flag); 229 if (dp_priv->event_pndx == dp_priv->event_gndx) { 230 spin_unlock_irqrestore(&dp_priv->event_lock, flag); 231 return -ENOENT; 232 } 233 234 gndx = dp_priv->event_gndx; 235 while (dp_priv->event_pndx != gndx) { 236 todo = &dp_priv->event_list[gndx]; 237 if (todo->event_id == event) { 238 todo->event_id = EV_NO_EVENT; /* deleted */ 239 todo->delay = 0; 240 } 241 gndx++; 242 gndx %= DP_EVENT_Q_MAX; 243 } 244 spin_unlock_irqrestore(&dp_priv->event_lock, flag); 245 246 return 0; 247 } 248 249 void dp_display_signal_audio_start(struct msm_dp *dp_display) 250 { 251 struct dp_display_private *dp; 252 253 dp = container_of(dp_display, struct dp_display_private, dp_display); 254 255 reinit_completion(&dp->audio_comp); 256 } 257 258 void dp_display_signal_audio_complete(struct msm_dp *dp_display) 259 { 260 struct dp_display_private *dp; 261 262 dp = container_of(dp_display, struct dp_display_private, dp_display); 263 264 complete_all(&dp->audio_comp); 265 } 266 267 static int dp_hpd_event_thread_start(struct dp_display_private *dp_priv); 268 269 static int dp_display_bind(struct device *dev, struct device *master, 270 void *data) 271 { 272 int rc = 0; 273 struct dp_display_private *dp = dev_get_dp_display_private(dev); 274 struct msm_drm_private *priv = dev_get_drvdata(master); 275 struct drm_device *drm = priv->dev; 276 277 dp->dp_display.drm_dev = drm; 278 priv->dp[dp->id] = &dp->dp_display; 279 280 281 282 dp->drm_dev = drm; 283 dp->aux->drm_dev = drm; 284 rc = dp_aux_register(dp->aux); 285 if (rc) { 286 DRM_ERROR("DRM DP AUX register failed\n"); 287 goto end; 288 } 289 290 291 rc = dp_register_audio_driver(dev, dp->audio); 292 if (rc) { 293 DRM_ERROR("Audio registration Dp failed\n"); 294 goto end; 295 } 296 297 rc = dp_hpd_event_thread_start(dp); 298 if (rc) { 299 DRM_ERROR("Event thread create failed\n"); 300 goto end; 301 } 302 303 return 0; 304 end: 305 return rc; 306 } 307 308 static void dp_display_unbind(struct device *dev, struct device *master, 309 void *data) 310 { 311 struct dp_display_private *dp = dev_get_dp_display_private(dev); 312 struct msm_drm_private *priv = dev_get_drvdata(master); 313 314 kthread_stop(dp->ev_tsk); 315 316 of_dp_aux_depopulate_bus(dp->aux); 317 318 dp_unregister_audio_driver(dev, dp->audio); 319 dp_aux_unregister(dp->aux); 320 dp->drm_dev = NULL; 321 dp->aux->drm_dev = NULL; 322 priv->dp[dp->id] = NULL; 323 } 324 325 static const struct component_ops dp_display_comp_ops = { 326 .bind = dp_display_bind, 327 .unbind = dp_display_unbind, 328 }; 329 330 static void dp_display_send_hpd_event(struct msm_dp *dp_display) 331 { 332 struct dp_display_private *dp; 333 struct drm_connector *connector; 334 335 dp = container_of(dp_display, struct dp_display_private, dp_display); 336 337 connector = dp->dp_display.connector; 338 drm_helper_hpd_irq_event(connector->dev); 339 } 340 341 static int dp_display_send_hpd_notification(struct dp_display_private *dp, 342 bool hpd) 343 { 344 if ((hpd && dp->dp_display.link_ready) || 345 (!hpd && !dp->dp_display.link_ready)) { 346 drm_dbg_dp(dp->drm_dev, "HPD already %s\n", 347 (hpd ? "on" : "off")); 348 return 0; 349 } 350 351 /* reset video pattern flag on disconnect */ 352 if (!hpd) { 353 dp->panel->video_test = false; 354 if (!dp->dp_display.is_edp) 355 drm_dp_set_subconnector_property(dp->dp_display.connector, 356 connector_status_disconnected, 357 dp->panel->dpcd, 358 dp->panel->downstream_ports); 359 } 360 361 dp->dp_display.link_ready = hpd; 362 363 drm_dbg_dp(dp->drm_dev, "type=%d hpd=%d\n", 364 dp->dp_display.connector_type, hpd); 365 dp_display_send_hpd_event(&dp->dp_display); 366 367 return 0; 368 } 369 370 static int dp_display_process_hpd_high(struct dp_display_private *dp) 371 { 372 int rc = 0; 373 struct edid *edid; 374 375 rc = dp_panel_read_sink_caps(dp->panel, dp->dp_display.connector); 376 if (rc) 377 goto end; 378 379 dp_link_process_request(dp->link); 380 381 if (!dp->dp_display.is_edp) 382 drm_dp_set_subconnector_property(dp->dp_display.connector, 383 connector_status_connected, 384 dp->panel->dpcd, 385 dp->panel->downstream_ports); 386 387 edid = dp->panel->edid; 388 389 dp->dp_display.psr_supported = dp->panel->psr_cap.version && psr_enabled; 390 391 dp->audio_supported = drm_detect_monitor_audio(edid); 392 dp_panel_handle_sink_request(dp->panel); 393 394 /* 395 * set sink to normal operation mode -- D0 396 * before dpcd read 397 */ 398 dp_link_psm_config(dp->link, &dp->panel->link_info, false); 399 400 dp_link_reset_phy_params_vx_px(dp->link); 401 rc = dp_ctrl_on_link(dp->ctrl); 402 if (rc) { 403 DRM_ERROR("failed to complete DP link training\n"); 404 goto end; 405 } 406 407 dp_add_event(dp, EV_USER_NOTIFICATION, true, 0); 408 409 end: 410 return rc; 411 } 412 413 static void dp_display_host_phy_init(struct dp_display_private *dp) 414 { 415 drm_dbg_dp(dp->drm_dev, "type=%d core_init=%d phy_init=%d\n", 416 dp->dp_display.connector_type, dp->core_initialized, 417 dp->phy_initialized); 418 419 if (!dp->phy_initialized) { 420 dp_ctrl_phy_init(dp->ctrl); 421 dp->phy_initialized = true; 422 } 423 } 424 425 static void dp_display_host_phy_exit(struct dp_display_private *dp) 426 { 427 drm_dbg_dp(dp->drm_dev, "type=%d core_init=%d phy_init=%d\n", 428 dp->dp_display.connector_type, dp->core_initialized, 429 dp->phy_initialized); 430 431 if (dp->phy_initialized) { 432 dp_ctrl_phy_exit(dp->ctrl); 433 dp->phy_initialized = false; 434 } 435 } 436 437 static void dp_display_host_init(struct dp_display_private *dp) 438 { 439 drm_dbg_dp(dp->drm_dev, "type=%d core_init=%d phy_init=%d\n", 440 dp->dp_display.connector_type, dp->core_initialized, 441 dp->phy_initialized); 442 443 dp_ctrl_core_clk_enable(dp->ctrl); 444 dp_ctrl_reset_irq_ctrl(dp->ctrl, true); 445 dp_aux_init(dp->aux); 446 dp->core_initialized = true; 447 } 448 449 static void dp_display_host_deinit(struct dp_display_private *dp) 450 { 451 drm_dbg_dp(dp->drm_dev, "type=%d core_init=%d phy_init=%d\n", 452 dp->dp_display.connector_type, dp->core_initialized, 453 dp->phy_initialized); 454 455 dp_ctrl_reset_irq_ctrl(dp->ctrl, false); 456 dp_aux_deinit(dp->aux); 457 dp_ctrl_core_clk_disable(dp->ctrl); 458 dp->core_initialized = false; 459 } 460 461 static int dp_display_usbpd_configure_cb(struct device *dev) 462 { 463 struct dp_display_private *dp = dev_get_dp_display_private(dev); 464 465 dp_display_host_phy_init(dp); 466 467 return dp_display_process_hpd_high(dp); 468 } 469 470 static int dp_display_notify_disconnect(struct device *dev) 471 { 472 struct dp_display_private *dp = dev_get_dp_display_private(dev); 473 474 dp_add_event(dp, EV_USER_NOTIFICATION, false, 0); 475 476 return 0; 477 } 478 479 static void dp_display_handle_video_request(struct dp_display_private *dp) 480 { 481 if (dp->link->sink_request & DP_TEST_LINK_VIDEO_PATTERN) { 482 dp->panel->video_test = true; 483 dp_link_send_test_response(dp->link); 484 } 485 } 486 487 static int dp_display_handle_port_ststus_changed(struct dp_display_private *dp) 488 { 489 int rc = 0; 490 491 if (drm_dp_is_branch(dp->panel->dpcd) && dp->link->sink_count == 0) { 492 drm_dbg_dp(dp->drm_dev, "sink count is zero, nothing to do\n"); 493 if (dp->hpd_state != ST_DISCONNECTED) { 494 dp->hpd_state = ST_DISCONNECT_PENDING; 495 dp_add_event(dp, EV_USER_NOTIFICATION, false, 0); 496 } 497 } else { 498 if (dp->hpd_state == ST_DISCONNECTED) { 499 dp->hpd_state = ST_MAINLINK_READY; 500 rc = dp_display_process_hpd_high(dp); 501 if (rc) 502 dp->hpd_state = ST_DISCONNECTED; 503 } 504 } 505 506 return rc; 507 } 508 509 static int dp_display_handle_irq_hpd(struct dp_display_private *dp) 510 { 511 u32 sink_request = dp->link->sink_request; 512 513 drm_dbg_dp(dp->drm_dev, "%d\n", sink_request); 514 if (dp->hpd_state == ST_DISCONNECTED) { 515 if (sink_request & DP_LINK_STATUS_UPDATED) { 516 drm_dbg_dp(dp->drm_dev, "Disconnected sink_request: %d\n", 517 sink_request); 518 DRM_ERROR("Disconnected, no DP_LINK_STATUS_UPDATED\n"); 519 return -EINVAL; 520 } 521 } 522 523 dp_ctrl_handle_sink_request(dp->ctrl); 524 525 if (sink_request & DP_TEST_LINK_VIDEO_PATTERN) 526 dp_display_handle_video_request(dp); 527 528 return 0; 529 } 530 531 static int dp_display_usbpd_attention_cb(struct device *dev) 532 { 533 int rc = 0; 534 u32 sink_request; 535 struct dp_display_private *dp = dev_get_dp_display_private(dev); 536 537 /* check for any test request issued by sink */ 538 rc = dp_link_process_request(dp->link); 539 if (!rc) { 540 sink_request = dp->link->sink_request; 541 drm_dbg_dp(dp->drm_dev, "hpd_state=%d sink_request=%d\n", 542 dp->hpd_state, sink_request); 543 if (sink_request & DS_PORT_STATUS_CHANGED) 544 rc = dp_display_handle_port_ststus_changed(dp); 545 else 546 rc = dp_display_handle_irq_hpd(dp); 547 } 548 549 return rc; 550 } 551 552 static int dp_hpd_plug_handle(struct dp_display_private *dp, u32 data) 553 { 554 u32 state; 555 int ret; 556 struct platform_device *pdev = dp->dp_display.pdev; 557 558 mutex_lock(&dp->event_mutex); 559 560 state = dp->hpd_state; 561 drm_dbg_dp(dp->drm_dev, "Before, type=%d hpd_state=%d\n", 562 dp->dp_display.connector_type, state); 563 564 if (state == ST_DISPLAY_OFF) { 565 mutex_unlock(&dp->event_mutex); 566 return 0; 567 } 568 569 if (state == ST_MAINLINK_READY || state == ST_CONNECTED) { 570 mutex_unlock(&dp->event_mutex); 571 return 0; 572 } 573 574 if (state == ST_DISCONNECT_PENDING) { 575 /* wait until ST_DISCONNECTED */ 576 dp_add_event(dp, EV_HPD_PLUG_INT, 0, 1); /* delay = 1 */ 577 mutex_unlock(&dp->event_mutex); 578 return 0; 579 } 580 581 ret = pm_runtime_resume_and_get(&pdev->dev); 582 if (ret) { 583 DRM_ERROR("failed to pm_runtime_resume\n"); 584 mutex_unlock(&dp->event_mutex); 585 return ret; 586 } 587 588 ret = dp_display_usbpd_configure_cb(&pdev->dev); 589 if (ret) { /* link train failed */ 590 dp->hpd_state = ST_DISCONNECTED; 591 } else { 592 dp->hpd_state = ST_MAINLINK_READY; 593 } 594 595 drm_dbg_dp(dp->drm_dev, "After, type=%d hpd_state=%d\n", 596 dp->dp_display.connector_type, state); 597 mutex_unlock(&dp->event_mutex); 598 599 /* uevent will complete connection part */ 600 return 0; 601 }; 602 603 static void dp_display_handle_plugged_change(struct msm_dp *dp_display, 604 bool plugged) 605 { 606 struct dp_display_private *dp; 607 608 dp = container_of(dp_display, 609 struct dp_display_private, dp_display); 610 611 /* notify audio subsystem only if sink supports audio */ 612 if (dp_display->plugged_cb && dp_display->codec_dev && 613 dp->audio_supported) 614 dp_display->plugged_cb(dp_display->codec_dev, plugged); 615 } 616 617 static int dp_hpd_unplug_handle(struct dp_display_private *dp, u32 data) 618 { 619 u32 state; 620 struct platform_device *pdev = dp->dp_display.pdev; 621 622 mutex_lock(&dp->event_mutex); 623 624 state = dp->hpd_state; 625 626 drm_dbg_dp(dp->drm_dev, "Before, type=%d hpd_state=%d\n", 627 dp->dp_display.connector_type, state); 628 629 /* unplugged, no more irq_hpd handle */ 630 dp_del_event(dp, EV_IRQ_HPD_INT); 631 632 if (state == ST_DISCONNECTED) { 633 /* triggered by irq_hdp with sink_count = 0 */ 634 if (dp->link->sink_count == 0) { 635 dp_display_host_phy_exit(dp); 636 } 637 dp_display_notify_disconnect(&dp->dp_display.pdev->dev); 638 mutex_unlock(&dp->event_mutex); 639 return 0; 640 } else if (state == ST_DISCONNECT_PENDING) { 641 mutex_unlock(&dp->event_mutex); 642 return 0; 643 } else if (state == ST_MAINLINK_READY) { 644 dp_ctrl_off_link(dp->ctrl); 645 dp_display_host_phy_exit(dp); 646 dp->hpd_state = ST_DISCONNECTED; 647 dp_display_notify_disconnect(&dp->dp_display.pdev->dev); 648 mutex_unlock(&dp->event_mutex); 649 return 0; 650 } 651 652 /* 653 * We don't need separate work for disconnect as 654 * connect/attention interrupts are disabled 655 */ 656 dp_display_notify_disconnect(&dp->dp_display.pdev->dev); 657 658 if (state == ST_DISPLAY_OFF) { 659 dp->hpd_state = ST_DISCONNECTED; 660 } else { 661 dp->hpd_state = ST_DISCONNECT_PENDING; 662 } 663 664 /* signal the disconnect event early to ensure proper teardown */ 665 dp_display_handle_plugged_change(&dp->dp_display, false); 666 667 drm_dbg_dp(dp->drm_dev, "After, type=%d hpd_state=%d\n", 668 dp->dp_display.connector_type, state); 669 670 /* uevent will complete disconnection part */ 671 pm_runtime_put_sync(&pdev->dev); 672 mutex_unlock(&dp->event_mutex); 673 return 0; 674 } 675 676 static int dp_irq_hpd_handle(struct dp_display_private *dp, u32 data) 677 { 678 u32 state; 679 680 mutex_lock(&dp->event_mutex); 681 682 /* irq_hpd can happen at either connected or disconnected state */ 683 state = dp->hpd_state; 684 drm_dbg_dp(dp->drm_dev, "Before, type=%d hpd_state=%d\n", 685 dp->dp_display.connector_type, state); 686 687 if (state == ST_DISPLAY_OFF) { 688 mutex_unlock(&dp->event_mutex); 689 return 0; 690 } 691 692 if (state == ST_MAINLINK_READY || state == ST_DISCONNECT_PENDING) { 693 /* wait until ST_CONNECTED */ 694 dp_add_event(dp, EV_IRQ_HPD_INT, 0, 1); /* delay = 1 */ 695 mutex_unlock(&dp->event_mutex); 696 return 0; 697 } 698 699 dp_display_usbpd_attention_cb(&dp->dp_display.pdev->dev); 700 701 drm_dbg_dp(dp->drm_dev, "After, type=%d hpd_state=%d\n", 702 dp->dp_display.connector_type, state); 703 704 mutex_unlock(&dp->event_mutex); 705 706 return 0; 707 } 708 709 static void dp_display_deinit_sub_modules(struct dp_display_private *dp) 710 { 711 dp_audio_put(dp->audio); 712 dp_panel_put(dp->panel); 713 dp_aux_put(dp->aux); 714 } 715 716 static int dp_init_sub_modules(struct dp_display_private *dp) 717 { 718 int rc = 0; 719 struct device *dev = &dp->dp_display.pdev->dev; 720 struct dp_panel_in panel_in = { 721 .dev = dev, 722 }; 723 struct phy *phy; 724 725 phy = devm_phy_get(dev, "dp"); 726 if (IS_ERR(phy)) 727 return PTR_ERR(phy); 728 729 dp->catalog = dp_catalog_get(dev); 730 if (IS_ERR(dp->catalog)) { 731 rc = PTR_ERR(dp->catalog); 732 DRM_ERROR("failed to initialize catalog, rc = %d\n", rc); 733 dp->catalog = NULL; 734 goto error; 735 } 736 737 dp->aux = dp_aux_get(dev, dp->catalog, 738 phy, 739 dp->dp_display.is_edp); 740 if (IS_ERR(dp->aux)) { 741 rc = PTR_ERR(dp->aux); 742 DRM_ERROR("failed to initialize aux, rc = %d\n", rc); 743 dp->aux = NULL; 744 goto error; 745 } 746 747 dp->link = dp_link_get(dev, dp->aux); 748 if (IS_ERR(dp->link)) { 749 rc = PTR_ERR(dp->link); 750 DRM_ERROR("failed to initialize link, rc = %d\n", rc); 751 dp->link = NULL; 752 goto error_link; 753 } 754 755 panel_in.aux = dp->aux; 756 panel_in.catalog = dp->catalog; 757 panel_in.link = dp->link; 758 759 dp->panel = dp_panel_get(&panel_in); 760 if (IS_ERR(dp->panel)) { 761 rc = PTR_ERR(dp->panel); 762 DRM_ERROR("failed to initialize panel, rc = %d\n", rc); 763 dp->panel = NULL; 764 goto error_link; 765 } 766 767 dp->ctrl = dp_ctrl_get(dev, dp->link, dp->panel, dp->aux, 768 dp->catalog, 769 phy); 770 if (IS_ERR(dp->ctrl)) { 771 rc = PTR_ERR(dp->ctrl); 772 DRM_ERROR("failed to initialize ctrl, rc = %d\n", rc); 773 dp->ctrl = NULL; 774 goto error_ctrl; 775 } 776 777 dp->audio = dp_audio_get(dp->dp_display.pdev, dp->panel, dp->catalog); 778 if (IS_ERR(dp->audio)) { 779 rc = PTR_ERR(dp->audio); 780 pr_err("failed to initialize audio, rc = %d\n", rc); 781 dp->audio = NULL; 782 goto error_ctrl; 783 } 784 785 return rc; 786 787 error_ctrl: 788 dp_panel_put(dp->panel); 789 error_link: 790 dp_aux_put(dp->aux); 791 error: 792 return rc; 793 } 794 795 static int dp_display_set_mode(struct msm_dp *dp_display, 796 struct dp_display_mode *mode) 797 { 798 struct dp_display_private *dp; 799 800 dp = container_of(dp_display, struct dp_display_private, dp_display); 801 802 drm_mode_copy(&dp->panel->dp_mode.drm_mode, &mode->drm_mode); 803 dp->panel->dp_mode.bpp = mode->bpp; 804 dp->panel->dp_mode.capabilities = mode->capabilities; 805 dp->panel->dp_mode.out_fmt_is_yuv_420 = mode->out_fmt_is_yuv_420; 806 dp_panel_init_panel_info(dp->panel); 807 return 0; 808 } 809 810 static int dp_display_enable(struct dp_display_private *dp, bool force_link_train) 811 { 812 int rc = 0; 813 struct msm_dp *dp_display = &dp->dp_display; 814 815 drm_dbg_dp(dp->drm_dev, "sink_count=%d\n", dp->link->sink_count); 816 if (dp_display->power_on) { 817 drm_dbg_dp(dp->drm_dev, "Link already setup, return\n"); 818 return 0; 819 } 820 821 rc = dp_ctrl_on_stream(dp->ctrl, force_link_train); 822 if (!rc) 823 dp_display->power_on = true; 824 825 return rc; 826 } 827 828 static int dp_display_post_enable(struct msm_dp *dp_display) 829 { 830 struct dp_display_private *dp; 831 u32 rate; 832 833 dp = container_of(dp_display, struct dp_display_private, dp_display); 834 835 rate = dp->link->link_params.rate; 836 837 if (dp->audio_supported) { 838 dp->audio->bw_code = drm_dp_link_rate_to_bw_code(rate); 839 dp->audio->lane_count = dp->link->link_params.num_lanes; 840 } 841 842 /* signal the connect event late to synchronize video and display */ 843 dp_display_handle_plugged_change(dp_display, true); 844 845 if (dp_display->psr_supported) 846 dp_ctrl_config_psr(dp->ctrl); 847 848 return 0; 849 } 850 851 static int dp_display_disable(struct dp_display_private *dp) 852 { 853 struct msm_dp *dp_display = &dp->dp_display; 854 855 if (!dp_display->power_on) 856 return 0; 857 858 /* wait only if audio was enabled */ 859 if (dp_display->audio_enabled) { 860 /* signal the disconnect event */ 861 dp_display_handle_plugged_change(dp_display, false); 862 if (!wait_for_completion_timeout(&dp->audio_comp, 863 HZ * 5)) 864 DRM_ERROR("audio comp timeout\n"); 865 } 866 867 dp_display->audio_enabled = false; 868 869 if (dp->link->sink_count == 0) { 870 /* 871 * irq_hpd with sink_count = 0 872 * hdmi unplugged out of dongle 873 */ 874 dp_ctrl_off_link_stream(dp->ctrl); 875 } else { 876 /* 877 * unplugged interrupt 878 * dongle unplugged out of DUT 879 */ 880 dp_ctrl_off(dp->ctrl); 881 dp_display_host_phy_exit(dp); 882 } 883 884 dp_display->power_on = false; 885 886 drm_dbg_dp(dp->drm_dev, "sink count: %d\n", dp->link->sink_count); 887 return 0; 888 } 889 890 int dp_display_set_plugged_cb(struct msm_dp *dp_display, 891 hdmi_codec_plugged_cb fn, struct device *codec_dev) 892 { 893 bool plugged; 894 895 dp_display->plugged_cb = fn; 896 dp_display->codec_dev = codec_dev; 897 plugged = dp_display->link_ready; 898 dp_display_handle_plugged_change(dp_display, plugged); 899 900 return 0; 901 } 902 903 /** 904 * dp_bridge_mode_valid - callback to determine if specified mode is valid 905 * @bridge: Pointer to drm bridge structure 906 * @info: display info 907 * @mode: Pointer to drm mode structure 908 * Returns: Validity status for specified mode 909 */ 910 enum drm_mode_status dp_bridge_mode_valid(struct drm_bridge *bridge, 911 const struct drm_display_info *info, 912 const struct drm_display_mode *mode) 913 { 914 const u32 num_components = 3, default_bpp = 24; 915 struct dp_display_private *dp_display; 916 struct dp_link_info *link_info; 917 u32 mode_rate_khz = 0, supported_rate_khz = 0, mode_bpp = 0; 918 struct msm_dp *dp; 919 int mode_pclk_khz = mode->clock; 920 921 dp = to_dp_bridge(bridge)->dp_display; 922 923 if (!dp || !mode_pclk_khz || !dp->connector) { 924 DRM_ERROR("invalid params\n"); 925 return -EINVAL; 926 } 927 928 if (mode->clock > DP_MAX_PIXEL_CLK_KHZ) 929 return MODE_CLOCK_HIGH; 930 931 dp_display = container_of(dp, struct dp_display_private, dp_display); 932 link_info = &dp_display->panel->link_info; 933 934 if (drm_mode_is_420_only(&dp->connector->display_info, mode) && 935 dp_display->panel->vsc_sdp_supported) 936 mode_pclk_khz /= 2; 937 938 mode_bpp = dp->connector->display_info.bpc * num_components; 939 if (!mode_bpp) 940 mode_bpp = default_bpp; 941 942 mode_bpp = dp_panel_get_mode_bpp(dp_display->panel, 943 mode_bpp, mode_pclk_khz); 944 945 mode_rate_khz = mode_pclk_khz * mode_bpp; 946 supported_rate_khz = link_info->num_lanes * link_info->rate * 8; 947 948 if (mode_rate_khz > supported_rate_khz) 949 return MODE_BAD; 950 951 return MODE_OK; 952 } 953 954 int dp_display_get_modes(struct msm_dp *dp) 955 { 956 struct dp_display_private *dp_display; 957 958 if (!dp) { 959 DRM_ERROR("invalid params\n"); 960 return 0; 961 } 962 963 dp_display = container_of(dp, struct dp_display_private, dp_display); 964 965 return dp_panel_get_modes(dp_display->panel, 966 dp->connector); 967 } 968 969 bool dp_display_check_video_test(struct msm_dp *dp) 970 { 971 struct dp_display_private *dp_display; 972 973 dp_display = container_of(dp, struct dp_display_private, dp_display); 974 975 return dp_display->panel->video_test; 976 } 977 978 int dp_display_get_test_bpp(struct msm_dp *dp) 979 { 980 struct dp_display_private *dp_display; 981 982 if (!dp) { 983 DRM_ERROR("invalid params\n"); 984 return 0; 985 } 986 987 dp_display = container_of(dp, struct dp_display_private, dp_display); 988 989 return dp_link_bit_depth_to_bpp( 990 dp_display->link->test_video.test_bit_depth); 991 } 992 993 void msm_dp_snapshot(struct msm_disp_state *disp_state, struct msm_dp *dp) 994 { 995 struct dp_display_private *dp_display; 996 997 dp_display = container_of(dp, struct dp_display_private, dp_display); 998 999 /* 1000 * if we are reading registers we need the link clocks to be on 1001 * however till DP cable is connected this will not happen as we 1002 * do not know the resolution to power up with. Hence check the 1003 * power_on status before dumping DP registers to avoid crash due 1004 * to unclocked access 1005 */ 1006 mutex_lock(&dp_display->event_mutex); 1007 1008 if (!dp->power_on) { 1009 mutex_unlock(&dp_display->event_mutex); 1010 return; 1011 } 1012 1013 dp_catalog_snapshot(dp_display->catalog, disp_state); 1014 1015 mutex_unlock(&dp_display->event_mutex); 1016 } 1017 1018 void dp_display_set_psr(struct msm_dp *dp_display, bool enter) 1019 { 1020 struct dp_display_private *dp; 1021 1022 if (!dp_display) { 1023 DRM_ERROR("invalid params\n"); 1024 return; 1025 } 1026 1027 dp = container_of(dp_display, struct dp_display_private, dp_display); 1028 dp_ctrl_set_psr(dp->ctrl, enter); 1029 } 1030 1031 static int hpd_event_thread(void *data) 1032 { 1033 struct dp_display_private *dp_priv; 1034 unsigned long flag; 1035 struct dp_event *todo; 1036 int timeout_mode = 0; 1037 1038 dp_priv = (struct dp_display_private *)data; 1039 1040 while (1) { 1041 if (timeout_mode) { 1042 wait_event_timeout(dp_priv->event_q, 1043 (dp_priv->event_pndx == dp_priv->event_gndx) || 1044 kthread_should_stop(), EVENT_TIMEOUT); 1045 } else { 1046 wait_event_interruptible(dp_priv->event_q, 1047 (dp_priv->event_pndx != dp_priv->event_gndx) || 1048 kthread_should_stop()); 1049 } 1050 1051 if (kthread_should_stop()) 1052 break; 1053 1054 spin_lock_irqsave(&dp_priv->event_lock, flag); 1055 todo = &dp_priv->event_list[dp_priv->event_gndx]; 1056 if (todo->delay) { 1057 struct dp_event *todo_next; 1058 1059 dp_priv->event_gndx++; 1060 dp_priv->event_gndx %= DP_EVENT_Q_MAX; 1061 1062 /* re enter delay event into q */ 1063 todo_next = &dp_priv->event_list[dp_priv->event_pndx++]; 1064 dp_priv->event_pndx %= DP_EVENT_Q_MAX; 1065 todo_next->event_id = todo->event_id; 1066 todo_next->data = todo->data; 1067 todo_next->delay = todo->delay - 1; 1068 1069 /* clean up older event */ 1070 todo->event_id = EV_NO_EVENT; 1071 todo->delay = 0; 1072 1073 /* switch to timeout mode */ 1074 timeout_mode = 1; 1075 spin_unlock_irqrestore(&dp_priv->event_lock, flag); 1076 continue; 1077 } 1078 1079 /* timeout with no events in q */ 1080 if (dp_priv->event_pndx == dp_priv->event_gndx) { 1081 spin_unlock_irqrestore(&dp_priv->event_lock, flag); 1082 continue; 1083 } 1084 1085 dp_priv->event_gndx++; 1086 dp_priv->event_gndx %= DP_EVENT_Q_MAX; 1087 timeout_mode = 0; 1088 spin_unlock_irqrestore(&dp_priv->event_lock, flag); 1089 1090 switch (todo->event_id) { 1091 case EV_HPD_PLUG_INT: 1092 dp_hpd_plug_handle(dp_priv, todo->data); 1093 break; 1094 case EV_HPD_UNPLUG_INT: 1095 dp_hpd_unplug_handle(dp_priv, todo->data); 1096 break; 1097 case EV_IRQ_HPD_INT: 1098 dp_irq_hpd_handle(dp_priv, todo->data); 1099 break; 1100 case EV_USER_NOTIFICATION: 1101 dp_display_send_hpd_notification(dp_priv, 1102 todo->data); 1103 break; 1104 default: 1105 break; 1106 } 1107 } 1108 1109 return 0; 1110 } 1111 1112 static int dp_hpd_event_thread_start(struct dp_display_private *dp_priv) 1113 { 1114 /* set event q to empty */ 1115 dp_priv->event_gndx = 0; 1116 dp_priv->event_pndx = 0; 1117 1118 dp_priv->ev_tsk = kthread_run(hpd_event_thread, dp_priv, "dp_hpd_handler"); 1119 if (IS_ERR(dp_priv->ev_tsk)) 1120 return PTR_ERR(dp_priv->ev_tsk); 1121 1122 return 0; 1123 } 1124 1125 static irqreturn_t dp_display_irq_handler(int irq, void *dev_id) 1126 { 1127 struct dp_display_private *dp = dev_id; 1128 irqreturn_t ret = IRQ_NONE; 1129 u32 hpd_isr_status; 1130 1131 if (!dp) { 1132 DRM_ERROR("invalid data\n"); 1133 return IRQ_NONE; 1134 } 1135 1136 hpd_isr_status = dp_catalog_hpd_get_intr_status(dp->catalog); 1137 1138 if (hpd_isr_status & 0x0F) { 1139 drm_dbg_dp(dp->drm_dev, "type=%d isr=0x%x\n", 1140 dp->dp_display.connector_type, hpd_isr_status); 1141 /* hpd related interrupts */ 1142 if (hpd_isr_status & DP_DP_HPD_PLUG_INT_MASK) 1143 dp_add_event(dp, EV_HPD_PLUG_INT, 0, 0); 1144 1145 if (hpd_isr_status & DP_DP_IRQ_HPD_INT_MASK) { 1146 dp_add_event(dp, EV_IRQ_HPD_INT, 0, 0); 1147 } 1148 1149 if (hpd_isr_status & DP_DP_HPD_REPLUG_INT_MASK) { 1150 dp_add_event(dp, EV_HPD_UNPLUG_INT, 0, 0); 1151 dp_add_event(dp, EV_HPD_PLUG_INT, 0, 3); 1152 } 1153 1154 if (hpd_isr_status & DP_DP_HPD_UNPLUG_INT_MASK) 1155 dp_add_event(dp, EV_HPD_UNPLUG_INT, 0, 0); 1156 1157 ret = IRQ_HANDLED; 1158 } 1159 1160 /* DP controller isr */ 1161 ret |= dp_ctrl_isr(dp->ctrl); 1162 1163 /* DP aux isr */ 1164 ret |= dp_aux_isr(dp->aux); 1165 1166 return ret; 1167 } 1168 1169 static int dp_display_request_irq(struct dp_display_private *dp) 1170 { 1171 int rc = 0; 1172 struct platform_device *pdev = dp->dp_display.pdev; 1173 1174 dp->irq = platform_get_irq(pdev, 0); 1175 if (dp->irq < 0) { 1176 DRM_ERROR("failed to get irq\n"); 1177 return dp->irq; 1178 } 1179 1180 rc = devm_request_irq(&pdev->dev, dp->irq, dp_display_irq_handler, 1181 IRQF_TRIGGER_HIGH|IRQF_NO_AUTOEN, 1182 "dp_display_isr", dp); 1183 1184 if (rc < 0) { 1185 DRM_ERROR("failed to request IRQ%u: %d\n", 1186 dp->irq, rc); 1187 return rc; 1188 } 1189 1190 return 0; 1191 } 1192 1193 static const struct msm_dp_desc *dp_display_get_desc(struct platform_device *pdev) 1194 { 1195 const struct msm_dp_desc *descs = of_device_get_match_data(&pdev->dev); 1196 struct resource *res; 1197 int i; 1198 1199 res = platform_get_resource(pdev, IORESOURCE_MEM, 0); 1200 if (!res) 1201 return NULL; 1202 1203 for (i = 0; i < descs[i].io_start; i++) { 1204 if (descs[i].io_start == res->start) 1205 return &descs[i]; 1206 } 1207 1208 dev_err(&pdev->dev, "unknown displayport instance\n"); 1209 return NULL; 1210 } 1211 1212 static int dp_display_probe_tail(struct device *dev) 1213 { 1214 struct msm_dp *dp = dev_get_drvdata(dev); 1215 int ret; 1216 1217 /* 1218 * External bridges are mandatory for eDP interfaces: one has to 1219 * provide at least an eDP panel (which gets wrapped into panel-bridge). 1220 * 1221 * For DisplayPort interfaces external bridges are optional, so 1222 * silently ignore an error if one is not present (-ENODEV). 1223 */ 1224 dp->next_bridge = devm_drm_of_get_bridge(&dp->pdev->dev, dp->pdev->dev.of_node, 1, 0); 1225 if (IS_ERR(dp->next_bridge)) { 1226 ret = PTR_ERR(dp->next_bridge); 1227 dp->next_bridge = NULL; 1228 if (dp->is_edp || ret != -ENODEV) 1229 return ret; 1230 } 1231 1232 ret = component_add(dev, &dp_display_comp_ops); 1233 if (ret) 1234 DRM_ERROR("component add failed, rc=%d\n", ret); 1235 1236 return ret; 1237 } 1238 1239 static int dp_auxbus_done_probe(struct drm_dp_aux *aux) 1240 { 1241 return dp_display_probe_tail(aux->dev); 1242 } 1243 1244 static int dp_display_probe(struct platform_device *pdev) 1245 { 1246 int rc = 0; 1247 struct dp_display_private *dp; 1248 const struct msm_dp_desc *desc; 1249 1250 if (!pdev || !pdev->dev.of_node) { 1251 DRM_ERROR("pdev not found\n"); 1252 return -ENODEV; 1253 } 1254 1255 dp = devm_kzalloc(&pdev->dev, sizeof(*dp), GFP_KERNEL); 1256 if (!dp) 1257 return -ENOMEM; 1258 1259 desc = dp_display_get_desc(pdev); 1260 if (!desc) 1261 return -EINVAL; 1262 1263 dp->dp_display.pdev = pdev; 1264 dp->name = "drm_dp"; 1265 dp->id = desc->id; 1266 dp->dp_display.connector_type = desc->connector_type; 1267 dp->wide_bus_supported = desc->wide_bus_supported; 1268 dp->dp_display.is_edp = 1269 (dp->dp_display.connector_type == DRM_MODE_CONNECTOR_eDP); 1270 1271 rc = dp_init_sub_modules(dp); 1272 if (rc) { 1273 DRM_ERROR("init sub module failed\n"); 1274 return -EPROBE_DEFER; 1275 } 1276 1277 /* setup event q */ 1278 mutex_init(&dp->event_mutex); 1279 init_waitqueue_head(&dp->event_q); 1280 spin_lock_init(&dp->event_lock); 1281 1282 /* Store DP audio handle inside DP display */ 1283 dp->dp_display.dp_audio = dp->audio; 1284 1285 init_completion(&dp->audio_comp); 1286 1287 platform_set_drvdata(pdev, &dp->dp_display); 1288 1289 rc = devm_pm_runtime_enable(&pdev->dev); 1290 if (rc) 1291 goto err; 1292 1293 rc = dp_display_request_irq(dp); 1294 if (rc) 1295 goto err; 1296 1297 if (dp->dp_display.is_edp) { 1298 rc = devm_of_dp_aux_populate_bus(dp->aux, dp_auxbus_done_probe); 1299 if (rc) { 1300 DRM_ERROR("eDP auxbus population failed, rc=%d\n", rc); 1301 goto err; 1302 } 1303 } else { 1304 rc = dp_display_probe_tail(&pdev->dev); 1305 if (rc) 1306 goto err; 1307 } 1308 1309 return rc; 1310 1311 err: 1312 dp_display_deinit_sub_modules(dp); 1313 return rc; 1314 } 1315 1316 static void dp_display_remove(struct platform_device *pdev) 1317 { 1318 struct dp_display_private *dp = dev_get_dp_display_private(&pdev->dev); 1319 1320 component_del(&pdev->dev, &dp_display_comp_ops); 1321 dp_display_deinit_sub_modules(dp); 1322 platform_set_drvdata(pdev, NULL); 1323 } 1324 1325 static int dp_pm_runtime_suspend(struct device *dev) 1326 { 1327 struct dp_display_private *dp = dev_get_dp_display_private(dev); 1328 1329 disable_irq(dp->irq); 1330 1331 if (dp->dp_display.is_edp) { 1332 dp_display_host_phy_exit(dp); 1333 dp_catalog_ctrl_hpd_disable(dp->catalog); 1334 } 1335 dp_display_host_deinit(dp); 1336 1337 return 0; 1338 } 1339 1340 static int dp_pm_runtime_resume(struct device *dev) 1341 { 1342 struct dp_display_private *dp = dev_get_dp_display_private(dev); 1343 1344 /* 1345 * for eDP, host cotroller, HPD block and PHY are enabled here 1346 * but with HPD irq disabled 1347 * 1348 * for DP, only host controller is enabled here. 1349 * HPD block is enabled at dp_bridge_hpd_enable() 1350 * PHY will be enabled at plugin handler later 1351 */ 1352 dp_display_host_init(dp); 1353 if (dp->dp_display.is_edp) { 1354 dp_catalog_ctrl_hpd_enable(dp->catalog); 1355 dp_display_host_phy_init(dp); 1356 } 1357 1358 enable_irq(dp->irq); 1359 return 0; 1360 } 1361 1362 static const struct dev_pm_ops dp_pm_ops = { 1363 SET_RUNTIME_PM_OPS(dp_pm_runtime_suspend, dp_pm_runtime_resume, NULL) 1364 SET_SYSTEM_SLEEP_PM_OPS(pm_runtime_force_suspend, 1365 pm_runtime_force_resume) 1366 }; 1367 1368 static struct platform_driver dp_display_driver = { 1369 .probe = dp_display_probe, 1370 .remove_new = dp_display_remove, 1371 .driver = { 1372 .name = "msm-dp-display", 1373 .of_match_table = dp_dt_match, 1374 .suppress_bind_attrs = true, 1375 .pm = &dp_pm_ops, 1376 }, 1377 }; 1378 1379 int __init msm_dp_register(void) 1380 { 1381 int ret; 1382 1383 ret = platform_driver_register(&dp_display_driver); 1384 if (ret) 1385 DRM_ERROR("Dp display driver register failed"); 1386 1387 return ret; 1388 } 1389 1390 void __exit msm_dp_unregister(void) 1391 { 1392 platform_driver_unregister(&dp_display_driver); 1393 } 1394 1395 bool msm_dp_is_yuv_420_enabled(const struct msm_dp *dp_display, 1396 const struct drm_display_mode *mode) 1397 { 1398 struct dp_display_private *dp; 1399 const struct drm_display_info *info; 1400 1401 dp = container_of(dp_display, struct dp_display_private, dp_display); 1402 info = &dp_display->connector->display_info; 1403 1404 return dp->panel->vsc_sdp_supported && drm_mode_is_420_only(info, mode); 1405 } 1406 1407 bool msm_dp_needs_periph_flush(const struct msm_dp *dp_display, 1408 const struct drm_display_mode *mode) 1409 { 1410 return msm_dp_is_yuv_420_enabled(dp_display, mode); 1411 } 1412 1413 bool msm_dp_wide_bus_available(const struct msm_dp *dp_display) 1414 { 1415 struct dp_display_private *dp; 1416 1417 dp = container_of(dp_display, struct dp_display_private, dp_display); 1418 1419 if (dp->dp_mode.out_fmt_is_yuv_420) 1420 return false; 1421 1422 return dp->wide_bus_supported; 1423 } 1424 1425 void dp_display_debugfs_init(struct msm_dp *dp_display, struct dentry *root, bool is_edp) 1426 { 1427 struct dp_display_private *dp; 1428 struct device *dev; 1429 int rc; 1430 1431 dp = container_of(dp_display, struct dp_display_private, dp_display); 1432 dev = &dp->dp_display.pdev->dev; 1433 1434 dp->debug = dp_debug_get(dev, dp->panel, 1435 dp->link, dp->dp_display.connector, 1436 root, is_edp); 1437 if (IS_ERR(dp->debug)) { 1438 rc = PTR_ERR(dp->debug); 1439 DRM_ERROR("failed to initialize debug, rc = %d\n", rc); 1440 dp->debug = NULL; 1441 } 1442 } 1443 1444 int msm_dp_modeset_init(struct msm_dp *dp_display, struct drm_device *dev, 1445 struct drm_encoder *encoder, bool yuv_supported) 1446 { 1447 struct dp_display_private *dp_priv; 1448 int ret; 1449 1450 dp_display->drm_dev = dev; 1451 1452 dp_priv = container_of(dp_display, struct dp_display_private, dp_display); 1453 1454 ret = dp_bridge_init(dp_display, dev, encoder); 1455 if (ret) { 1456 DRM_DEV_ERROR(dev->dev, 1457 "failed to create dp bridge: %d\n", ret); 1458 return ret; 1459 } 1460 1461 dp_display->connector = dp_drm_connector_init(dp_display, encoder, yuv_supported); 1462 if (IS_ERR(dp_display->connector)) { 1463 ret = PTR_ERR(dp_display->connector); 1464 DRM_DEV_ERROR(dev->dev, 1465 "failed to create dp connector: %d\n", ret); 1466 dp_display->connector = NULL; 1467 return ret; 1468 } 1469 1470 dp_priv->panel->connector = dp_display->connector; 1471 1472 return 0; 1473 } 1474 1475 void dp_bridge_atomic_enable(struct drm_bridge *drm_bridge, 1476 struct drm_bridge_state *old_bridge_state) 1477 { 1478 struct msm_dp_bridge *dp_bridge = to_dp_bridge(drm_bridge); 1479 struct msm_dp *dp = dp_bridge->dp_display; 1480 int rc = 0; 1481 struct dp_display_private *dp_display; 1482 u32 state; 1483 bool force_link_train = false; 1484 1485 dp_display = container_of(dp, struct dp_display_private, dp_display); 1486 if (!dp_display->dp_mode.drm_mode.clock) { 1487 DRM_ERROR("invalid params\n"); 1488 return; 1489 } 1490 1491 if (dp->is_edp) 1492 dp_hpd_plug_handle(dp_display, 0); 1493 1494 mutex_lock(&dp_display->event_mutex); 1495 if (pm_runtime_resume_and_get(&dp->pdev->dev)) { 1496 DRM_ERROR("failed to pm_runtime_resume\n"); 1497 mutex_unlock(&dp_display->event_mutex); 1498 return; 1499 } 1500 1501 state = dp_display->hpd_state; 1502 if (state != ST_DISPLAY_OFF && state != ST_MAINLINK_READY) { 1503 mutex_unlock(&dp_display->event_mutex); 1504 return; 1505 } 1506 1507 rc = dp_display_set_mode(dp, &dp_display->dp_mode); 1508 if (rc) { 1509 DRM_ERROR("Failed to perform a mode set, rc=%d\n", rc); 1510 mutex_unlock(&dp_display->event_mutex); 1511 return; 1512 } 1513 1514 state = dp_display->hpd_state; 1515 1516 if (state == ST_DISPLAY_OFF) { 1517 dp_display_host_phy_init(dp_display); 1518 force_link_train = true; 1519 } 1520 1521 dp_display_enable(dp_display, force_link_train); 1522 1523 rc = dp_display_post_enable(dp); 1524 if (rc) { 1525 DRM_ERROR("DP display post enable failed, rc=%d\n", rc); 1526 dp_display_disable(dp_display); 1527 } 1528 1529 /* completed connection */ 1530 dp_display->hpd_state = ST_CONNECTED; 1531 1532 drm_dbg_dp(dp->drm_dev, "type=%d Done\n", dp->connector_type); 1533 mutex_unlock(&dp_display->event_mutex); 1534 } 1535 1536 void dp_bridge_atomic_disable(struct drm_bridge *drm_bridge, 1537 struct drm_bridge_state *old_bridge_state) 1538 { 1539 struct msm_dp_bridge *dp_bridge = to_dp_bridge(drm_bridge); 1540 struct msm_dp *dp = dp_bridge->dp_display; 1541 struct dp_display_private *dp_display; 1542 1543 dp_display = container_of(dp, struct dp_display_private, dp_display); 1544 1545 dp_ctrl_push_idle(dp_display->ctrl); 1546 } 1547 1548 void dp_bridge_atomic_post_disable(struct drm_bridge *drm_bridge, 1549 struct drm_bridge_state *old_bridge_state) 1550 { 1551 struct msm_dp_bridge *dp_bridge = to_dp_bridge(drm_bridge); 1552 struct msm_dp *dp = dp_bridge->dp_display; 1553 u32 state; 1554 struct dp_display_private *dp_display; 1555 1556 dp_display = container_of(dp, struct dp_display_private, dp_display); 1557 1558 if (dp->is_edp) 1559 dp_hpd_unplug_handle(dp_display, 0); 1560 1561 mutex_lock(&dp_display->event_mutex); 1562 1563 state = dp_display->hpd_state; 1564 if (state != ST_DISCONNECT_PENDING && state != ST_CONNECTED) 1565 drm_dbg_dp(dp->drm_dev, "type=%d wrong hpd_state=%d\n", 1566 dp->connector_type, state); 1567 1568 dp_display_disable(dp_display); 1569 1570 state = dp_display->hpd_state; 1571 if (state == ST_DISCONNECT_PENDING) { 1572 /* completed disconnection */ 1573 dp_display->hpd_state = ST_DISCONNECTED; 1574 } else { 1575 dp_display->hpd_state = ST_DISPLAY_OFF; 1576 } 1577 1578 drm_dbg_dp(dp->drm_dev, "type=%d Done\n", dp->connector_type); 1579 1580 pm_runtime_put_sync(&dp->pdev->dev); 1581 mutex_unlock(&dp_display->event_mutex); 1582 } 1583 1584 void dp_bridge_mode_set(struct drm_bridge *drm_bridge, 1585 const struct drm_display_mode *mode, 1586 const struct drm_display_mode *adjusted_mode) 1587 { 1588 struct msm_dp_bridge *dp_bridge = to_dp_bridge(drm_bridge); 1589 struct msm_dp *dp = dp_bridge->dp_display; 1590 struct dp_display_private *dp_display; 1591 struct dp_panel *dp_panel; 1592 1593 dp_display = container_of(dp, struct dp_display_private, dp_display); 1594 dp_panel = dp_display->panel; 1595 1596 memset(&dp_display->dp_mode, 0x0, sizeof(struct dp_display_mode)); 1597 1598 if (dp_display_check_video_test(dp)) 1599 dp_display->dp_mode.bpp = dp_display_get_test_bpp(dp); 1600 else /* Default num_components per pixel = 3 */ 1601 dp_display->dp_mode.bpp = dp->connector->display_info.bpc * 3; 1602 1603 if (!dp_display->dp_mode.bpp) 1604 dp_display->dp_mode.bpp = 24; /* Default bpp */ 1605 1606 drm_mode_copy(&dp_display->dp_mode.drm_mode, adjusted_mode); 1607 1608 dp_display->dp_mode.v_active_low = 1609 !!(dp_display->dp_mode.drm_mode.flags & DRM_MODE_FLAG_NVSYNC); 1610 1611 dp_display->dp_mode.h_active_low = 1612 !!(dp_display->dp_mode.drm_mode.flags & DRM_MODE_FLAG_NHSYNC); 1613 1614 dp_display->dp_mode.out_fmt_is_yuv_420 = 1615 drm_mode_is_420_only(&dp->connector->display_info, adjusted_mode) && 1616 dp_panel->vsc_sdp_supported; 1617 1618 /* populate wide_bus_support to different layers */ 1619 dp_display->ctrl->wide_bus_en = 1620 dp_display->dp_mode.out_fmt_is_yuv_420 ? false : dp_display->wide_bus_supported; 1621 dp_display->catalog->wide_bus_en = 1622 dp_display->dp_mode.out_fmt_is_yuv_420 ? false : dp_display->wide_bus_supported; 1623 } 1624 1625 void dp_bridge_hpd_enable(struct drm_bridge *bridge) 1626 { 1627 struct msm_dp_bridge *dp_bridge = to_dp_bridge(bridge); 1628 struct msm_dp *dp_display = dp_bridge->dp_display; 1629 struct dp_display_private *dp = container_of(dp_display, struct dp_display_private, dp_display); 1630 1631 /* 1632 * this is for external DP with hpd irq enabled case, 1633 * step-1: dp_pm_runtime_resume() enable dp host only 1634 * step-2: enable hdp block and have hpd irq enabled here 1635 * step-3: waiting for plugin irq while phy is not initialized 1636 * step-4: DP PHY is initialized at plugin handler before link training 1637 * 1638 */ 1639 mutex_lock(&dp->event_mutex); 1640 if (pm_runtime_resume_and_get(&dp_display->pdev->dev)) { 1641 DRM_ERROR("failed to resume power\n"); 1642 mutex_unlock(&dp->event_mutex); 1643 return; 1644 } 1645 1646 dp_catalog_ctrl_hpd_enable(dp->catalog); 1647 1648 /* enable HDP interrupts */ 1649 dp_catalog_hpd_config_intr(dp->catalog, DP_DP_HPD_INT_MASK, true); 1650 1651 dp_display->internal_hpd = true; 1652 mutex_unlock(&dp->event_mutex); 1653 } 1654 1655 void dp_bridge_hpd_disable(struct drm_bridge *bridge) 1656 { 1657 struct msm_dp_bridge *dp_bridge = to_dp_bridge(bridge); 1658 struct msm_dp *dp_display = dp_bridge->dp_display; 1659 struct dp_display_private *dp = container_of(dp_display, struct dp_display_private, dp_display); 1660 1661 mutex_lock(&dp->event_mutex); 1662 /* disable HDP interrupts */ 1663 dp_catalog_hpd_config_intr(dp->catalog, DP_DP_HPD_INT_MASK, false); 1664 dp_catalog_ctrl_hpd_disable(dp->catalog); 1665 1666 dp_display->internal_hpd = false; 1667 1668 pm_runtime_put_sync(&dp_display->pdev->dev); 1669 mutex_unlock(&dp->event_mutex); 1670 } 1671 1672 void dp_bridge_hpd_notify(struct drm_bridge *bridge, 1673 enum drm_connector_status status) 1674 { 1675 struct msm_dp_bridge *dp_bridge = to_dp_bridge(bridge); 1676 struct msm_dp *dp_display = dp_bridge->dp_display; 1677 struct dp_display_private *dp = container_of(dp_display, struct dp_display_private, dp_display); 1678 1679 /* Without next_bridge interrupts are handled by the DP core directly */ 1680 if (dp_display->internal_hpd) 1681 return; 1682 1683 if (!dp_display->link_ready && status == connector_status_connected) 1684 dp_add_event(dp, EV_HPD_PLUG_INT, 0, 0); 1685 else if (dp_display->link_ready && status == connector_status_disconnected) 1686 dp_add_event(dp, EV_HPD_UNPLUG_INT, 0, 0); 1687 } 1688