1 // SPDX-License-Identifier: GPL-2.0-only 2 /* 3 * Copyright (c) 2017-2020, The Linux Foundation. All rights reserved. 4 */ 5 6 #include <linux/module.h> 7 #include <linux/slab.h> 8 #include <linux/uaccess.h> 9 #include <linux/debugfs.h> 10 #include <linux/component.h> 11 #include <linux/of_irq.h> 12 #include <linux/delay.h> 13 #include <drm/display/drm_dp_aux_bus.h> 14 #include <drm/drm_edid.h> 15 16 #include "msm_drv.h" 17 #include "msm_kms.h" 18 #include "dp_parser.h" 19 #include "dp_power.h" 20 #include "dp_catalog.h" 21 #include "dp_aux.h" 22 #include "dp_reg.h" 23 #include "dp_link.h" 24 #include "dp_panel.h" 25 #include "dp_ctrl.h" 26 #include "dp_display.h" 27 #include "dp_drm.h" 28 #include "dp_audio.h" 29 #include "dp_debug.h" 30 31 static bool psr_enabled = false; 32 module_param(psr_enabled, bool, 0); 33 MODULE_PARM_DESC(psr_enabled, "enable PSR for eDP and DP displays"); 34 35 #define HPD_STRING_SIZE 30 36 37 enum { 38 ISR_DISCONNECTED, 39 ISR_CONNECT_PENDING, 40 ISR_CONNECTED, 41 ISR_HPD_REPLUG_COUNT, 42 ISR_IRQ_HPD_PULSE_COUNT, 43 ISR_HPD_LO_GLITH_COUNT, 44 }; 45 46 /* event thread connection state */ 47 enum { 48 ST_DISCONNECTED, 49 ST_MAINLINK_READY, 50 ST_CONNECTED, 51 ST_DISCONNECT_PENDING, 52 ST_DISPLAY_OFF, 53 }; 54 55 enum { 56 EV_NO_EVENT, 57 /* hpd events */ 58 EV_HPD_PLUG_INT, 59 EV_IRQ_HPD_INT, 60 EV_HPD_UNPLUG_INT, 61 EV_USER_NOTIFICATION, 62 }; 63 64 #define EVENT_TIMEOUT (HZ/10) /* 100ms */ 65 #define DP_EVENT_Q_MAX 8 66 67 #define DP_TIMEOUT_NONE 0 68 69 #define WAIT_FOR_RESUME_TIMEOUT_JIFFIES (HZ / 2) 70 71 struct dp_event { 72 u32 event_id; 73 u32 data; 74 u32 delay; 75 }; 76 77 struct dp_display_private { 78 char *name; 79 int irq; 80 81 unsigned int id; 82 83 /* state variables */ 84 bool core_initialized; 85 bool phy_initialized; 86 bool hpd_irq_on; 87 bool audio_supported; 88 89 struct drm_device *drm_dev; 90 struct dentry *root; 91 92 struct dp_parser *parser; 93 struct dp_power *power; 94 struct dp_catalog *catalog; 95 struct drm_dp_aux *aux; 96 struct dp_link *link; 97 struct dp_panel *panel; 98 struct dp_ctrl *ctrl; 99 struct dp_debug *debug; 100 101 struct dp_display_mode dp_mode; 102 struct msm_dp dp_display; 103 104 /* wait for audio signaling */ 105 struct completion audio_comp; 106 107 /* event related only access by event thread */ 108 struct mutex event_mutex; 109 wait_queue_head_t event_q; 110 u32 hpd_state; 111 u32 event_pndx; 112 u32 event_gndx; 113 struct task_struct *ev_tsk; 114 struct dp_event event_list[DP_EVENT_Q_MAX]; 115 spinlock_t event_lock; 116 117 bool wide_bus_en; 118 119 struct dp_audio *audio; 120 }; 121 122 struct msm_dp_desc { 123 phys_addr_t io_start; 124 unsigned int id; 125 unsigned int connector_type; 126 bool wide_bus_en; 127 }; 128 129 static const struct msm_dp_desc sc7180_dp_descs[] = { 130 { .io_start = 0x0ae90000, .id = MSM_DP_CONTROLLER_0, .connector_type = DRM_MODE_CONNECTOR_DisplayPort }, 131 {} 132 }; 133 134 static const struct msm_dp_desc sc7280_dp_descs[] = { 135 { .io_start = 0x0ae90000, .id = MSM_DP_CONTROLLER_0, .connector_type = DRM_MODE_CONNECTOR_DisplayPort, .wide_bus_en = true }, 136 { .io_start = 0x0aea0000, .id = MSM_DP_CONTROLLER_1, .connector_type = DRM_MODE_CONNECTOR_eDP, .wide_bus_en = true }, 137 {} 138 }; 139 140 static const struct msm_dp_desc sc8180x_dp_descs[] = { 141 { .io_start = 0x0ae90000, .id = MSM_DP_CONTROLLER_0, .connector_type = DRM_MODE_CONNECTOR_DisplayPort }, 142 { .io_start = 0x0ae98000, .id = MSM_DP_CONTROLLER_1, .connector_type = DRM_MODE_CONNECTOR_DisplayPort }, 143 { .io_start = 0x0ae9a000, .id = MSM_DP_CONTROLLER_2, .connector_type = DRM_MODE_CONNECTOR_eDP }, 144 {} 145 }; 146 147 static const struct msm_dp_desc sc8280xp_dp_descs[] = { 148 { .io_start = 0x0ae90000, .id = MSM_DP_CONTROLLER_0, .connector_type = DRM_MODE_CONNECTOR_DisplayPort, .wide_bus_en = true }, 149 { .io_start = 0x0ae98000, .id = MSM_DP_CONTROLLER_1, .connector_type = DRM_MODE_CONNECTOR_DisplayPort, .wide_bus_en = true }, 150 { .io_start = 0x0ae9a000, .id = MSM_DP_CONTROLLER_2, .connector_type = DRM_MODE_CONNECTOR_DisplayPort, .wide_bus_en = true }, 151 { .io_start = 0x0aea0000, .id = MSM_DP_CONTROLLER_3, .connector_type = DRM_MODE_CONNECTOR_DisplayPort, .wide_bus_en = true }, 152 { .io_start = 0x22090000, .id = MSM_DP_CONTROLLER_0, .connector_type = DRM_MODE_CONNECTOR_DisplayPort, .wide_bus_en = true }, 153 { .io_start = 0x22098000, .id = MSM_DP_CONTROLLER_1, .connector_type = DRM_MODE_CONNECTOR_DisplayPort, .wide_bus_en = true }, 154 { .io_start = 0x2209a000, .id = MSM_DP_CONTROLLER_2, .connector_type = DRM_MODE_CONNECTOR_DisplayPort, .wide_bus_en = true }, 155 { .io_start = 0x220a0000, .id = MSM_DP_CONTROLLER_3, .connector_type = DRM_MODE_CONNECTOR_DisplayPort, .wide_bus_en = true }, 156 {} 157 }; 158 159 static const struct msm_dp_desc sc8280xp_edp_descs[] = { 160 { .io_start = 0x0ae9a000, .id = MSM_DP_CONTROLLER_2, .connector_type = DRM_MODE_CONNECTOR_eDP, .wide_bus_en = true }, 161 { .io_start = 0x0aea0000, .id = MSM_DP_CONTROLLER_3, .connector_type = DRM_MODE_CONNECTOR_eDP, .wide_bus_en = true }, 162 { .io_start = 0x2209a000, .id = MSM_DP_CONTROLLER_2, .connector_type = DRM_MODE_CONNECTOR_eDP, .wide_bus_en = true }, 163 { .io_start = 0x220a0000, .id = MSM_DP_CONTROLLER_3, .connector_type = DRM_MODE_CONNECTOR_eDP, .wide_bus_en = true }, 164 {} 165 }; 166 167 static const struct msm_dp_desc sm8350_dp_descs[] = { 168 { .io_start = 0x0ae90000, .id = MSM_DP_CONTROLLER_0, .connector_type = DRM_MODE_CONNECTOR_DisplayPort }, 169 {} 170 }; 171 172 static const struct msm_dp_desc sm8650_dp_descs[] = { 173 { .io_start = 0x0af54000, .id = MSM_DP_CONTROLLER_0, .connector_type = DRM_MODE_CONNECTOR_DisplayPort }, 174 {} 175 }; 176 177 static const struct of_device_id dp_dt_match[] = { 178 { .compatible = "qcom,sc7180-dp", .data = &sc7180_dp_descs }, 179 { .compatible = "qcom,sc7280-dp", .data = &sc7280_dp_descs }, 180 { .compatible = "qcom,sc7280-edp", .data = &sc7280_dp_descs }, 181 { .compatible = "qcom,sc8180x-dp", .data = &sc8180x_dp_descs }, 182 { .compatible = "qcom,sc8180x-edp", .data = &sc8180x_dp_descs }, 183 { .compatible = "qcom,sc8280xp-dp", .data = &sc8280xp_dp_descs }, 184 { .compatible = "qcom,sc8280xp-edp", .data = &sc8280xp_edp_descs }, 185 { .compatible = "qcom,sdm845-dp", .data = &sc7180_dp_descs }, 186 { .compatible = "qcom,sm8350-dp", .data = &sm8350_dp_descs }, 187 { .compatible = "qcom,sm8650-dp", .data = &sm8650_dp_descs }, 188 {} 189 }; 190 191 static struct dp_display_private *dev_get_dp_display_private(struct device *dev) 192 { 193 struct msm_dp *dp = dev_get_drvdata(dev); 194 195 return container_of(dp, struct dp_display_private, dp_display); 196 } 197 198 static int dp_add_event(struct dp_display_private *dp_priv, u32 event, 199 u32 data, u32 delay) 200 { 201 unsigned long flag; 202 struct dp_event *todo; 203 int pndx; 204 205 spin_lock_irqsave(&dp_priv->event_lock, flag); 206 pndx = dp_priv->event_pndx + 1; 207 pndx %= DP_EVENT_Q_MAX; 208 if (pndx == dp_priv->event_gndx) { 209 pr_err("event_q is full: pndx=%d gndx=%d\n", 210 dp_priv->event_pndx, dp_priv->event_gndx); 211 spin_unlock_irqrestore(&dp_priv->event_lock, flag); 212 return -EPERM; 213 } 214 todo = &dp_priv->event_list[dp_priv->event_pndx++]; 215 dp_priv->event_pndx %= DP_EVENT_Q_MAX; 216 todo->event_id = event; 217 todo->data = data; 218 todo->delay = delay; 219 wake_up(&dp_priv->event_q); 220 spin_unlock_irqrestore(&dp_priv->event_lock, flag); 221 222 return 0; 223 } 224 225 static int dp_del_event(struct dp_display_private *dp_priv, u32 event) 226 { 227 unsigned long flag; 228 struct dp_event *todo; 229 u32 gndx; 230 231 spin_lock_irqsave(&dp_priv->event_lock, flag); 232 if (dp_priv->event_pndx == dp_priv->event_gndx) { 233 spin_unlock_irqrestore(&dp_priv->event_lock, flag); 234 return -ENOENT; 235 } 236 237 gndx = dp_priv->event_gndx; 238 while (dp_priv->event_pndx != gndx) { 239 todo = &dp_priv->event_list[gndx]; 240 if (todo->event_id == event) { 241 todo->event_id = EV_NO_EVENT; /* deleted */ 242 todo->delay = 0; 243 } 244 gndx++; 245 gndx %= DP_EVENT_Q_MAX; 246 } 247 spin_unlock_irqrestore(&dp_priv->event_lock, flag); 248 249 return 0; 250 } 251 252 void dp_display_signal_audio_start(struct msm_dp *dp_display) 253 { 254 struct dp_display_private *dp; 255 256 dp = container_of(dp_display, struct dp_display_private, dp_display); 257 258 reinit_completion(&dp->audio_comp); 259 } 260 261 void dp_display_signal_audio_complete(struct msm_dp *dp_display) 262 { 263 struct dp_display_private *dp; 264 265 dp = container_of(dp_display, struct dp_display_private, dp_display); 266 267 complete_all(&dp->audio_comp); 268 } 269 270 static int dp_hpd_event_thread_start(struct dp_display_private *dp_priv); 271 272 static int dp_display_bind(struct device *dev, struct device *master, 273 void *data) 274 { 275 int rc = 0; 276 struct dp_display_private *dp = dev_get_dp_display_private(dev); 277 struct msm_drm_private *priv = dev_get_drvdata(master); 278 struct drm_device *drm = priv->dev; 279 280 dp->dp_display.drm_dev = drm; 281 priv->dp[dp->id] = &dp->dp_display; 282 283 284 285 dp->drm_dev = drm; 286 dp->aux->drm_dev = drm; 287 rc = dp_aux_register(dp->aux); 288 if (rc) { 289 DRM_ERROR("DRM DP AUX register failed\n"); 290 goto end; 291 } 292 293 294 rc = dp_register_audio_driver(dev, dp->audio); 295 if (rc) { 296 DRM_ERROR("Audio registration Dp failed\n"); 297 goto end; 298 } 299 300 rc = dp_hpd_event_thread_start(dp); 301 if (rc) { 302 DRM_ERROR("Event thread create failed\n"); 303 goto end; 304 } 305 306 return 0; 307 end: 308 return rc; 309 } 310 311 static void dp_display_unbind(struct device *dev, struct device *master, 312 void *data) 313 { 314 struct dp_display_private *dp = dev_get_dp_display_private(dev); 315 struct msm_drm_private *priv = dev_get_drvdata(master); 316 317 kthread_stop(dp->ev_tsk); 318 319 of_dp_aux_depopulate_bus(dp->aux); 320 321 dp_unregister_audio_driver(dev, dp->audio); 322 dp_aux_unregister(dp->aux); 323 dp->drm_dev = NULL; 324 dp->aux->drm_dev = NULL; 325 priv->dp[dp->id] = NULL; 326 } 327 328 static const struct component_ops dp_display_comp_ops = { 329 .bind = dp_display_bind, 330 .unbind = dp_display_unbind, 331 }; 332 333 static int dp_display_send_hpd_notification(struct dp_display_private *dp, 334 bool hpd) 335 { 336 struct drm_bridge *bridge = dp->dp_display.bridge; 337 338 /* reset video pattern flag on disconnect */ 339 if (!hpd) { 340 dp->panel->video_test = false; 341 if (!dp->dp_display.is_edp) 342 drm_dp_set_subconnector_property(dp->dp_display.connector, 343 connector_status_disconnected, 344 dp->panel->dpcd, 345 dp->panel->downstream_ports); 346 } 347 348 dp->dp_display.link_ready = hpd; 349 350 drm_dbg_dp(dp->drm_dev, "type=%d hpd=%d\n", 351 dp->dp_display.connector_type, hpd); 352 drm_bridge_hpd_notify(bridge, dp->dp_display.link_ready); 353 354 return 0; 355 } 356 357 static int dp_display_process_hpd_high(struct dp_display_private *dp) 358 { 359 int rc = 0; 360 struct edid *edid; 361 362 dp->panel->max_dp_lanes = dp->parser->max_dp_lanes; 363 dp->panel->max_dp_link_rate = dp->parser->max_dp_link_rate; 364 365 drm_dbg_dp(dp->drm_dev, "max_lanes=%d max_link_rate=%d\n", 366 dp->panel->max_dp_lanes, dp->panel->max_dp_link_rate); 367 368 rc = dp_panel_read_sink_caps(dp->panel, dp->dp_display.connector); 369 if (rc) 370 goto end; 371 372 dp_link_process_request(dp->link); 373 374 if (!dp->dp_display.is_edp) 375 drm_dp_set_subconnector_property(dp->dp_display.connector, 376 connector_status_connected, 377 dp->panel->dpcd, 378 dp->panel->downstream_ports); 379 380 edid = dp->panel->edid; 381 382 dp->dp_display.psr_supported = dp->panel->psr_cap.version && psr_enabled; 383 384 dp->audio_supported = drm_detect_monitor_audio(edid); 385 dp_panel_handle_sink_request(dp->panel); 386 387 dp->dp_display.max_dp_lanes = dp->parser->max_dp_lanes; 388 389 /* 390 * set sink to normal operation mode -- D0 391 * before dpcd read 392 */ 393 dp_link_psm_config(dp->link, &dp->panel->link_info, false); 394 395 dp_link_reset_phy_params_vx_px(dp->link); 396 rc = dp_ctrl_on_link(dp->ctrl); 397 if (rc) { 398 DRM_ERROR("failed to complete DP link training\n"); 399 goto end; 400 } 401 402 dp_add_event(dp, EV_USER_NOTIFICATION, true, 0); 403 404 end: 405 return rc; 406 } 407 408 static void dp_display_host_phy_init(struct dp_display_private *dp) 409 { 410 drm_dbg_dp(dp->drm_dev, "type=%d core_init=%d phy_init=%d\n", 411 dp->dp_display.connector_type, dp->core_initialized, 412 dp->phy_initialized); 413 414 if (!dp->phy_initialized) { 415 dp_ctrl_phy_init(dp->ctrl); 416 dp->phy_initialized = true; 417 } 418 } 419 420 static void dp_display_host_phy_exit(struct dp_display_private *dp) 421 { 422 drm_dbg_dp(dp->drm_dev, "type=%d core_init=%d phy_init=%d\n", 423 dp->dp_display.connector_type, dp->core_initialized, 424 dp->phy_initialized); 425 426 if (dp->phy_initialized) { 427 dp_ctrl_phy_exit(dp->ctrl); 428 dp->phy_initialized = false; 429 } 430 } 431 432 static void dp_display_host_init(struct dp_display_private *dp) 433 { 434 drm_dbg_dp(dp->drm_dev, "type=%d core_init=%d phy_init=%d\n", 435 dp->dp_display.connector_type, dp->core_initialized, 436 dp->phy_initialized); 437 438 dp_power_init(dp->power); 439 dp_ctrl_reset_irq_ctrl(dp->ctrl, true); 440 dp_aux_init(dp->aux); 441 dp->core_initialized = true; 442 } 443 444 static void dp_display_host_deinit(struct dp_display_private *dp) 445 { 446 drm_dbg_dp(dp->drm_dev, "type=%d core_init=%d phy_init=%d\n", 447 dp->dp_display.connector_type, dp->core_initialized, 448 dp->phy_initialized); 449 450 dp_ctrl_reset_irq_ctrl(dp->ctrl, false); 451 dp_aux_deinit(dp->aux); 452 dp_power_deinit(dp->power); 453 dp->core_initialized = false; 454 } 455 456 static int dp_display_usbpd_configure_cb(struct device *dev) 457 { 458 struct dp_display_private *dp = dev_get_dp_display_private(dev); 459 460 dp_display_host_phy_init(dp); 461 462 return dp_display_process_hpd_high(dp); 463 } 464 465 static int dp_display_notify_disconnect(struct device *dev) 466 { 467 struct dp_display_private *dp = dev_get_dp_display_private(dev); 468 469 dp_add_event(dp, EV_USER_NOTIFICATION, false, 0); 470 471 return 0; 472 } 473 474 static void dp_display_handle_video_request(struct dp_display_private *dp) 475 { 476 if (dp->link->sink_request & DP_TEST_LINK_VIDEO_PATTERN) { 477 dp->panel->video_test = true; 478 dp_link_send_test_response(dp->link); 479 } 480 } 481 482 static int dp_display_handle_port_ststus_changed(struct dp_display_private *dp) 483 { 484 int rc = 0; 485 486 if (drm_dp_is_branch(dp->panel->dpcd) && dp->link->sink_count == 0) { 487 drm_dbg_dp(dp->drm_dev, "sink count is zero, nothing to do\n"); 488 if (dp->hpd_state != ST_DISCONNECTED) { 489 dp->hpd_state = ST_DISCONNECT_PENDING; 490 dp_add_event(dp, EV_USER_NOTIFICATION, false, 0); 491 } 492 } else { 493 if (dp->hpd_state == ST_DISCONNECTED) { 494 dp->hpd_state = ST_MAINLINK_READY; 495 rc = dp_display_process_hpd_high(dp); 496 if (rc) 497 dp->hpd_state = ST_DISCONNECTED; 498 } 499 } 500 501 return rc; 502 } 503 504 static int dp_display_handle_irq_hpd(struct dp_display_private *dp) 505 { 506 u32 sink_request = dp->link->sink_request; 507 508 drm_dbg_dp(dp->drm_dev, "%d\n", sink_request); 509 if (dp->hpd_state == ST_DISCONNECTED) { 510 if (sink_request & DP_LINK_STATUS_UPDATED) { 511 drm_dbg_dp(dp->drm_dev, "Disconnected sink_request: %d\n", 512 sink_request); 513 DRM_ERROR("Disconnected, no DP_LINK_STATUS_UPDATED\n"); 514 return -EINVAL; 515 } 516 } 517 518 dp_ctrl_handle_sink_request(dp->ctrl); 519 520 if (sink_request & DP_TEST_LINK_VIDEO_PATTERN) 521 dp_display_handle_video_request(dp); 522 523 return 0; 524 } 525 526 static int dp_display_usbpd_attention_cb(struct device *dev) 527 { 528 int rc = 0; 529 u32 sink_request; 530 struct dp_display_private *dp = dev_get_dp_display_private(dev); 531 532 /* check for any test request issued by sink */ 533 rc = dp_link_process_request(dp->link); 534 if (!rc) { 535 sink_request = dp->link->sink_request; 536 drm_dbg_dp(dp->drm_dev, "hpd_state=%d sink_request=%d\n", 537 dp->hpd_state, sink_request); 538 if (sink_request & DS_PORT_STATUS_CHANGED) 539 rc = dp_display_handle_port_ststus_changed(dp); 540 else 541 rc = dp_display_handle_irq_hpd(dp); 542 } 543 544 return rc; 545 } 546 547 static int dp_hpd_plug_handle(struct dp_display_private *dp, u32 data) 548 { 549 u32 state; 550 int ret; 551 struct platform_device *pdev = dp->dp_display.pdev; 552 553 mutex_lock(&dp->event_mutex); 554 555 state = dp->hpd_state; 556 drm_dbg_dp(dp->drm_dev, "Before, type=%d hpd_state=%d\n", 557 dp->dp_display.connector_type, state); 558 559 if (state == ST_DISPLAY_OFF) { 560 mutex_unlock(&dp->event_mutex); 561 return 0; 562 } 563 564 if (state == ST_MAINLINK_READY || state == ST_CONNECTED) { 565 mutex_unlock(&dp->event_mutex); 566 return 0; 567 } 568 569 if (state == ST_DISCONNECT_PENDING) { 570 /* wait until ST_DISCONNECTED */ 571 dp_add_event(dp, EV_HPD_PLUG_INT, 0, 1); /* delay = 1 */ 572 mutex_unlock(&dp->event_mutex); 573 return 0; 574 } 575 576 ret = pm_runtime_resume_and_get(&pdev->dev); 577 if (ret) { 578 DRM_ERROR("failed to pm_runtime_resume\n"); 579 mutex_unlock(&dp->event_mutex); 580 return ret; 581 } 582 583 ret = dp_display_usbpd_configure_cb(&pdev->dev); 584 if (ret) { /* link train failed */ 585 dp->hpd_state = ST_DISCONNECTED; 586 } else { 587 dp->hpd_state = ST_MAINLINK_READY; 588 } 589 590 drm_dbg_dp(dp->drm_dev, "After, type=%d hpd_state=%d\n", 591 dp->dp_display.connector_type, state); 592 mutex_unlock(&dp->event_mutex); 593 594 /* uevent will complete connection part */ 595 return 0; 596 }; 597 598 static void dp_display_handle_plugged_change(struct msm_dp *dp_display, 599 bool plugged) 600 { 601 struct dp_display_private *dp; 602 603 dp = container_of(dp_display, 604 struct dp_display_private, dp_display); 605 606 /* notify audio subsystem only if sink supports audio */ 607 if (dp_display->plugged_cb && dp_display->codec_dev && 608 dp->audio_supported) 609 dp_display->plugged_cb(dp_display->codec_dev, plugged); 610 } 611 612 static int dp_hpd_unplug_handle(struct dp_display_private *dp, u32 data) 613 { 614 u32 state; 615 struct platform_device *pdev = dp->dp_display.pdev; 616 617 mutex_lock(&dp->event_mutex); 618 619 state = dp->hpd_state; 620 621 drm_dbg_dp(dp->drm_dev, "Before, type=%d hpd_state=%d\n", 622 dp->dp_display.connector_type, state); 623 624 /* unplugged, no more irq_hpd handle */ 625 dp_del_event(dp, EV_IRQ_HPD_INT); 626 627 if (state == ST_DISCONNECTED) { 628 /* triggered by irq_hdp with sink_count = 0 */ 629 if (dp->link->sink_count == 0) { 630 dp_display_host_phy_exit(dp); 631 } 632 dp_display_notify_disconnect(&dp->dp_display.pdev->dev); 633 mutex_unlock(&dp->event_mutex); 634 return 0; 635 } else if (state == ST_DISCONNECT_PENDING) { 636 mutex_unlock(&dp->event_mutex); 637 return 0; 638 } else if (state == ST_MAINLINK_READY) { 639 dp_ctrl_off_link(dp->ctrl); 640 dp_display_host_phy_exit(dp); 641 dp->hpd_state = ST_DISCONNECTED; 642 dp_display_notify_disconnect(&dp->dp_display.pdev->dev); 643 mutex_unlock(&dp->event_mutex); 644 return 0; 645 } 646 647 /* 648 * We don't need separate work for disconnect as 649 * connect/attention interrupts are disabled 650 */ 651 dp_display_notify_disconnect(&dp->dp_display.pdev->dev); 652 653 if (state == ST_DISPLAY_OFF) { 654 dp->hpd_state = ST_DISCONNECTED; 655 } else { 656 dp->hpd_state = ST_DISCONNECT_PENDING; 657 } 658 659 /* signal the disconnect event early to ensure proper teardown */ 660 dp_display_handle_plugged_change(&dp->dp_display, false); 661 662 drm_dbg_dp(dp->drm_dev, "After, type=%d hpd_state=%d\n", 663 dp->dp_display.connector_type, state); 664 665 /* uevent will complete disconnection part */ 666 pm_runtime_put_sync(&pdev->dev); 667 mutex_unlock(&dp->event_mutex); 668 return 0; 669 } 670 671 static int dp_irq_hpd_handle(struct dp_display_private *dp, u32 data) 672 { 673 u32 state; 674 675 mutex_lock(&dp->event_mutex); 676 677 /* irq_hpd can happen at either connected or disconnected state */ 678 state = dp->hpd_state; 679 drm_dbg_dp(dp->drm_dev, "Before, type=%d hpd_state=%d\n", 680 dp->dp_display.connector_type, state); 681 682 if (state == ST_DISPLAY_OFF) { 683 mutex_unlock(&dp->event_mutex); 684 return 0; 685 } 686 687 if (state == ST_MAINLINK_READY || state == ST_DISCONNECT_PENDING) { 688 /* wait until ST_CONNECTED */ 689 dp_add_event(dp, EV_IRQ_HPD_INT, 0, 1); /* delay = 1 */ 690 mutex_unlock(&dp->event_mutex); 691 return 0; 692 } 693 694 dp_display_usbpd_attention_cb(&dp->dp_display.pdev->dev); 695 696 drm_dbg_dp(dp->drm_dev, "After, type=%d hpd_state=%d\n", 697 dp->dp_display.connector_type, state); 698 699 mutex_unlock(&dp->event_mutex); 700 701 return 0; 702 } 703 704 static void dp_display_deinit_sub_modules(struct dp_display_private *dp) 705 { 706 dp_audio_put(dp->audio); 707 dp_panel_put(dp->panel); 708 dp_aux_put(dp->aux); 709 } 710 711 static int dp_init_sub_modules(struct dp_display_private *dp) 712 { 713 int rc = 0; 714 struct device *dev = &dp->dp_display.pdev->dev; 715 struct dp_panel_in panel_in = { 716 .dev = dev, 717 }; 718 719 dp->parser = dp_parser_get(dp->dp_display.pdev); 720 if (IS_ERR(dp->parser)) { 721 rc = PTR_ERR(dp->parser); 722 DRM_ERROR("failed to initialize parser, rc = %d\n", rc); 723 dp->parser = NULL; 724 goto error; 725 } 726 727 dp->catalog = dp_catalog_get(dev, &dp->parser->io); 728 if (IS_ERR(dp->catalog)) { 729 rc = PTR_ERR(dp->catalog); 730 DRM_ERROR("failed to initialize catalog, rc = %d\n", rc); 731 dp->catalog = NULL; 732 goto error; 733 } 734 735 dp->power = dp_power_get(dev, dp->parser); 736 if (IS_ERR(dp->power)) { 737 rc = PTR_ERR(dp->power); 738 DRM_ERROR("failed to initialize power, rc = %d\n", rc); 739 dp->power = NULL; 740 goto error; 741 } 742 743 dp->aux = dp_aux_get(dev, dp->catalog, dp->dp_display.is_edp); 744 if (IS_ERR(dp->aux)) { 745 rc = PTR_ERR(dp->aux); 746 DRM_ERROR("failed to initialize aux, rc = %d\n", rc); 747 dp->aux = NULL; 748 goto error; 749 } 750 751 dp->link = dp_link_get(dev, dp->aux); 752 if (IS_ERR(dp->link)) { 753 rc = PTR_ERR(dp->link); 754 DRM_ERROR("failed to initialize link, rc = %d\n", rc); 755 dp->link = NULL; 756 goto error_link; 757 } 758 759 panel_in.aux = dp->aux; 760 panel_in.catalog = dp->catalog; 761 panel_in.link = dp->link; 762 763 dp->panel = dp_panel_get(&panel_in); 764 if (IS_ERR(dp->panel)) { 765 rc = PTR_ERR(dp->panel); 766 DRM_ERROR("failed to initialize panel, rc = %d\n", rc); 767 dp->panel = NULL; 768 goto error_link; 769 } 770 771 dp->ctrl = dp_ctrl_get(dev, dp->link, dp->panel, dp->aux, 772 dp->power, dp->catalog, dp->parser); 773 if (IS_ERR(dp->ctrl)) { 774 rc = PTR_ERR(dp->ctrl); 775 DRM_ERROR("failed to initialize ctrl, rc = %d\n", rc); 776 dp->ctrl = NULL; 777 goto error_ctrl; 778 } 779 780 dp->audio = dp_audio_get(dp->dp_display.pdev, dp->panel, dp->catalog); 781 if (IS_ERR(dp->audio)) { 782 rc = PTR_ERR(dp->audio); 783 pr_err("failed to initialize audio, rc = %d\n", rc); 784 dp->audio = NULL; 785 goto error_ctrl; 786 } 787 788 /* populate wide_bus_en to differernt layers */ 789 dp->ctrl->wide_bus_en = dp->wide_bus_en; 790 dp->catalog->wide_bus_en = dp->wide_bus_en; 791 792 return rc; 793 794 error_ctrl: 795 dp_panel_put(dp->panel); 796 error_link: 797 dp_aux_put(dp->aux); 798 error: 799 return rc; 800 } 801 802 static int dp_display_set_mode(struct msm_dp *dp_display, 803 struct dp_display_mode *mode) 804 { 805 struct dp_display_private *dp; 806 807 dp = container_of(dp_display, struct dp_display_private, dp_display); 808 809 drm_mode_copy(&dp->panel->dp_mode.drm_mode, &mode->drm_mode); 810 dp->panel->dp_mode.bpp = mode->bpp; 811 dp->panel->dp_mode.capabilities = mode->capabilities; 812 dp_panel_init_panel_info(dp->panel); 813 return 0; 814 } 815 816 static int dp_display_enable(struct dp_display_private *dp, bool force_link_train) 817 { 818 int rc = 0; 819 struct msm_dp *dp_display = &dp->dp_display; 820 821 drm_dbg_dp(dp->drm_dev, "sink_count=%d\n", dp->link->sink_count); 822 if (dp_display->power_on) { 823 drm_dbg_dp(dp->drm_dev, "Link already setup, return\n"); 824 return 0; 825 } 826 827 rc = dp_ctrl_on_stream(dp->ctrl, force_link_train); 828 if (!rc) 829 dp_display->power_on = true; 830 831 return rc; 832 } 833 834 static int dp_display_post_enable(struct msm_dp *dp_display) 835 { 836 struct dp_display_private *dp; 837 u32 rate; 838 839 dp = container_of(dp_display, struct dp_display_private, dp_display); 840 841 rate = dp->link->link_params.rate; 842 843 if (dp->audio_supported) { 844 dp->audio->bw_code = drm_dp_link_rate_to_bw_code(rate); 845 dp->audio->lane_count = dp->link->link_params.num_lanes; 846 } 847 848 /* signal the connect event late to synchronize video and display */ 849 dp_display_handle_plugged_change(dp_display, true); 850 851 if (dp_display->psr_supported) 852 dp_ctrl_config_psr(dp->ctrl); 853 854 return 0; 855 } 856 857 static int dp_display_disable(struct dp_display_private *dp) 858 { 859 struct msm_dp *dp_display = &dp->dp_display; 860 861 if (!dp_display->power_on) 862 return 0; 863 864 /* wait only if audio was enabled */ 865 if (dp_display->audio_enabled) { 866 /* signal the disconnect event */ 867 dp_display_handle_plugged_change(dp_display, false); 868 if (!wait_for_completion_timeout(&dp->audio_comp, 869 HZ * 5)) 870 DRM_ERROR("audio comp timeout\n"); 871 } 872 873 dp_display->audio_enabled = false; 874 875 if (dp->link->sink_count == 0) { 876 /* 877 * irq_hpd with sink_count = 0 878 * hdmi unplugged out of dongle 879 */ 880 dp_ctrl_off_link_stream(dp->ctrl); 881 } else { 882 /* 883 * unplugged interrupt 884 * dongle unplugged out of DUT 885 */ 886 dp_ctrl_off(dp->ctrl); 887 dp_display_host_phy_exit(dp); 888 } 889 890 dp_display->power_on = false; 891 892 drm_dbg_dp(dp->drm_dev, "sink count: %d\n", dp->link->sink_count); 893 return 0; 894 } 895 896 int dp_display_set_plugged_cb(struct msm_dp *dp_display, 897 hdmi_codec_plugged_cb fn, struct device *codec_dev) 898 { 899 bool plugged; 900 901 dp_display->plugged_cb = fn; 902 dp_display->codec_dev = codec_dev; 903 plugged = dp_display->link_ready; 904 dp_display_handle_plugged_change(dp_display, plugged); 905 906 return 0; 907 } 908 909 /** 910 * dp_bridge_mode_valid - callback to determine if specified mode is valid 911 * @bridge: Pointer to drm bridge structure 912 * @info: display info 913 * @mode: Pointer to drm mode structure 914 * Returns: Validity status for specified mode 915 */ 916 enum drm_mode_status dp_bridge_mode_valid(struct drm_bridge *bridge, 917 const struct drm_display_info *info, 918 const struct drm_display_mode *mode) 919 { 920 const u32 num_components = 3, default_bpp = 24; 921 struct dp_display_private *dp_display; 922 struct dp_link_info *link_info; 923 u32 mode_rate_khz = 0, supported_rate_khz = 0, mode_bpp = 0; 924 struct msm_dp *dp; 925 int mode_pclk_khz = mode->clock; 926 927 dp = to_dp_bridge(bridge)->dp_display; 928 929 if (!dp || !mode_pclk_khz || !dp->connector) { 930 DRM_ERROR("invalid params\n"); 931 return -EINVAL; 932 } 933 934 if (mode->clock > DP_MAX_PIXEL_CLK_KHZ) 935 return MODE_CLOCK_HIGH; 936 937 dp_display = container_of(dp, struct dp_display_private, dp_display); 938 link_info = &dp_display->panel->link_info; 939 940 mode_bpp = dp->connector->display_info.bpc * num_components; 941 if (!mode_bpp) 942 mode_bpp = default_bpp; 943 944 mode_bpp = dp_panel_get_mode_bpp(dp_display->panel, 945 mode_bpp, mode_pclk_khz); 946 947 mode_rate_khz = mode_pclk_khz * mode_bpp; 948 supported_rate_khz = link_info->num_lanes * link_info->rate * 8; 949 950 if (mode_rate_khz > supported_rate_khz) 951 return MODE_BAD; 952 953 return MODE_OK; 954 } 955 956 int dp_display_get_modes(struct msm_dp *dp) 957 { 958 struct dp_display_private *dp_display; 959 960 if (!dp) { 961 DRM_ERROR("invalid params\n"); 962 return 0; 963 } 964 965 dp_display = container_of(dp, struct dp_display_private, dp_display); 966 967 return dp_panel_get_modes(dp_display->panel, 968 dp->connector); 969 } 970 971 bool dp_display_check_video_test(struct msm_dp *dp) 972 { 973 struct dp_display_private *dp_display; 974 975 dp_display = container_of(dp, struct dp_display_private, dp_display); 976 977 return dp_display->panel->video_test; 978 } 979 980 int dp_display_get_test_bpp(struct msm_dp *dp) 981 { 982 struct dp_display_private *dp_display; 983 984 if (!dp) { 985 DRM_ERROR("invalid params\n"); 986 return 0; 987 } 988 989 dp_display = container_of(dp, struct dp_display_private, dp_display); 990 991 return dp_link_bit_depth_to_bpp( 992 dp_display->link->test_video.test_bit_depth); 993 } 994 995 void msm_dp_snapshot(struct msm_disp_state *disp_state, struct msm_dp *dp) 996 { 997 struct dp_display_private *dp_display; 998 999 dp_display = container_of(dp, struct dp_display_private, dp_display); 1000 1001 /* 1002 * if we are reading registers we need the link clocks to be on 1003 * however till DP cable is connected this will not happen as we 1004 * do not know the resolution to power up with. Hence check the 1005 * power_on status before dumping DP registers to avoid crash due 1006 * to unclocked access 1007 */ 1008 mutex_lock(&dp_display->event_mutex); 1009 1010 if (!dp->power_on) { 1011 mutex_unlock(&dp_display->event_mutex); 1012 return; 1013 } 1014 1015 dp_catalog_snapshot(dp_display->catalog, disp_state); 1016 1017 mutex_unlock(&dp_display->event_mutex); 1018 } 1019 1020 void dp_display_set_psr(struct msm_dp *dp_display, bool enter) 1021 { 1022 struct dp_display_private *dp; 1023 1024 if (!dp_display) { 1025 DRM_ERROR("invalid params\n"); 1026 return; 1027 } 1028 1029 dp = container_of(dp_display, struct dp_display_private, dp_display); 1030 dp_ctrl_set_psr(dp->ctrl, enter); 1031 } 1032 1033 static int hpd_event_thread(void *data) 1034 { 1035 struct dp_display_private *dp_priv; 1036 unsigned long flag; 1037 struct dp_event *todo; 1038 int timeout_mode = 0; 1039 1040 dp_priv = (struct dp_display_private *)data; 1041 1042 while (1) { 1043 if (timeout_mode) { 1044 wait_event_timeout(dp_priv->event_q, 1045 (dp_priv->event_pndx == dp_priv->event_gndx) || 1046 kthread_should_stop(), EVENT_TIMEOUT); 1047 } else { 1048 wait_event_interruptible(dp_priv->event_q, 1049 (dp_priv->event_pndx != dp_priv->event_gndx) || 1050 kthread_should_stop()); 1051 } 1052 1053 if (kthread_should_stop()) 1054 break; 1055 1056 spin_lock_irqsave(&dp_priv->event_lock, flag); 1057 todo = &dp_priv->event_list[dp_priv->event_gndx]; 1058 if (todo->delay) { 1059 struct dp_event *todo_next; 1060 1061 dp_priv->event_gndx++; 1062 dp_priv->event_gndx %= DP_EVENT_Q_MAX; 1063 1064 /* re enter delay event into q */ 1065 todo_next = &dp_priv->event_list[dp_priv->event_pndx++]; 1066 dp_priv->event_pndx %= DP_EVENT_Q_MAX; 1067 todo_next->event_id = todo->event_id; 1068 todo_next->data = todo->data; 1069 todo_next->delay = todo->delay - 1; 1070 1071 /* clean up older event */ 1072 todo->event_id = EV_NO_EVENT; 1073 todo->delay = 0; 1074 1075 /* switch to timeout mode */ 1076 timeout_mode = 1; 1077 spin_unlock_irqrestore(&dp_priv->event_lock, flag); 1078 continue; 1079 } 1080 1081 /* timeout with no events in q */ 1082 if (dp_priv->event_pndx == dp_priv->event_gndx) { 1083 spin_unlock_irqrestore(&dp_priv->event_lock, flag); 1084 continue; 1085 } 1086 1087 dp_priv->event_gndx++; 1088 dp_priv->event_gndx %= DP_EVENT_Q_MAX; 1089 timeout_mode = 0; 1090 spin_unlock_irqrestore(&dp_priv->event_lock, flag); 1091 1092 switch (todo->event_id) { 1093 case EV_HPD_PLUG_INT: 1094 dp_hpd_plug_handle(dp_priv, todo->data); 1095 break; 1096 case EV_HPD_UNPLUG_INT: 1097 dp_hpd_unplug_handle(dp_priv, todo->data); 1098 break; 1099 case EV_IRQ_HPD_INT: 1100 dp_irq_hpd_handle(dp_priv, todo->data); 1101 break; 1102 case EV_USER_NOTIFICATION: 1103 dp_display_send_hpd_notification(dp_priv, 1104 todo->data); 1105 break; 1106 default: 1107 break; 1108 } 1109 } 1110 1111 return 0; 1112 } 1113 1114 static int dp_hpd_event_thread_start(struct dp_display_private *dp_priv) 1115 { 1116 /* set event q to empty */ 1117 dp_priv->event_gndx = 0; 1118 dp_priv->event_pndx = 0; 1119 1120 dp_priv->ev_tsk = kthread_run(hpd_event_thread, dp_priv, "dp_hpd_handler"); 1121 if (IS_ERR(dp_priv->ev_tsk)) 1122 return PTR_ERR(dp_priv->ev_tsk); 1123 1124 return 0; 1125 } 1126 1127 static irqreturn_t dp_display_irq_handler(int irq, void *dev_id) 1128 { 1129 struct dp_display_private *dp = dev_id; 1130 irqreturn_t ret = IRQ_NONE; 1131 u32 hpd_isr_status; 1132 1133 if (!dp) { 1134 DRM_ERROR("invalid data\n"); 1135 return IRQ_NONE; 1136 } 1137 1138 hpd_isr_status = dp_catalog_hpd_get_intr_status(dp->catalog); 1139 1140 if (hpd_isr_status & 0x0F) { 1141 drm_dbg_dp(dp->drm_dev, "type=%d isr=0x%x\n", 1142 dp->dp_display.connector_type, hpd_isr_status); 1143 /* hpd related interrupts */ 1144 if (hpd_isr_status & DP_DP_HPD_PLUG_INT_MASK) 1145 dp_add_event(dp, EV_HPD_PLUG_INT, 0, 0); 1146 1147 if (hpd_isr_status & DP_DP_IRQ_HPD_INT_MASK) { 1148 dp_add_event(dp, EV_IRQ_HPD_INT, 0, 0); 1149 } 1150 1151 if (hpd_isr_status & DP_DP_HPD_REPLUG_INT_MASK) { 1152 dp_add_event(dp, EV_HPD_UNPLUG_INT, 0, 0); 1153 dp_add_event(dp, EV_HPD_PLUG_INT, 0, 3); 1154 } 1155 1156 if (hpd_isr_status & DP_DP_HPD_UNPLUG_INT_MASK) 1157 dp_add_event(dp, EV_HPD_UNPLUG_INT, 0, 0); 1158 1159 ret = IRQ_HANDLED; 1160 } 1161 1162 /* DP controller isr */ 1163 ret |= dp_ctrl_isr(dp->ctrl); 1164 1165 /* DP aux isr */ 1166 ret |= dp_aux_isr(dp->aux); 1167 1168 return ret; 1169 } 1170 1171 static int dp_display_request_irq(struct dp_display_private *dp) 1172 { 1173 int rc = 0; 1174 struct platform_device *pdev = dp->dp_display.pdev; 1175 1176 dp->irq = platform_get_irq(pdev, 0); 1177 if (dp->irq < 0) { 1178 DRM_ERROR("failed to get irq\n"); 1179 return dp->irq; 1180 } 1181 1182 rc = devm_request_irq(&pdev->dev, dp->irq, dp_display_irq_handler, 1183 IRQF_TRIGGER_HIGH|IRQF_NO_AUTOEN, 1184 "dp_display_isr", dp); 1185 1186 if (rc < 0) { 1187 DRM_ERROR("failed to request IRQ%u: %d\n", 1188 dp->irq, rc); 1189 return rc; 1190 } 1191 1192 return 0; 1193 } 1194 1195 static const struct msm_dp_desc *dp_display_get_desc(struct platform_device *pdev) 1196 { 1197 const struct msm_dp_desc *descs = of_device_get_match_data(&pdev->dev); 1198 struct resource *res; 1199 int i; 1200 1201 res = platform_get_resource(pdev, IORESOURCE_MEM, 0); 1202 if (!res) 1203 return NULL; 1204 1205 for (i = 0; i < descs[i].io_start; i++) { 1206 if (descs[i].io_start == res->start) 1207 return &descs[i]; 1208 } 1209 1210 dev_err(&pdev->dev, "unknown displayport instance\n"); 1211 return NULL; 1212 } 1213 1214 static int dp_display_get_next_bridge(struct msm_dp *dp); 1215 1216 static int dp_display_probe_tail(struct device *dev) 1217 { 1218 struct msm_dp *dp = dev_get_drvdata(dev); 1219 int ret; 1220 1221 ret = dp_display_get_next_bridge(dp); 1222 if (ret) 1223 return ret; 1224 1225 ret = component_add(dev, &dp_display_comp_ops); 1226 if (ret) 1227 DRM_ERROR("component add failed, rc=%d\n", ret); 1228 1229 return ret; 1230 } 1231 1232 static int dp_auxbus_done_probe(struct drm_dp_aux *aux) 1233 { 1234 return dp_display_probe_tail(aux->dev); 1235 } 1236 1237 static int dp_display_probe(struct platform_device *pdev) 1238 { 1239 int rc = 0; 1240 struct dp_display_private *dp; 1241 const struct msm_dp_desc *desc; 1242 1243 if (!pdev || !pdev->dev.of_node) { 1244 DRM_ERROR("pdev not found\n"); 1245 return -ENODEV; 1246 } 1247 1248 dp = devm_kzalloc(&pdev->dev, sizeof(*dp), GFP_KERNEL); 1249 if (!dp) 1250 return -ENOMEM; 1251 1252 desc = dp_display_get_desc(pdev); 1253 if (!desc) 1254 return -EINVAL; 1255 1256 dp->dp_display.pdev = pdev; 1257 dp->name = "drm_dp"; 1258 dp->id = desc->id; 1259 dp->dp_display.connector_type = desc->connector_type; 1260 dp->wide_bus_en = desc->wide_bus_en; 1261 dp->dp_display.is_edp = 1262 (dp->dp_display.connector_type == DRM_MODE_CONNECTOR_eDP); 1263 1264 rc = dp_init_sub_modules(dp); 1265 if (rc) { 1266 DRM_ERROR("init sub module failed\n"); 1267 return -EPROBE_DEFER; 1268 } 1269 1270 rc = dp->parser->parse(dp->parser); 1271 if (rc) { 1272 DRM_ERROR("device tree parsing failed\n"); 1273 goto err; 1274 } 1275 1276 rc = dp_power_client_init(dp->power); 1277 if (rc) { 1278 DRM_ERROR("Power client create failed\n"); 1279 goto err; 1280 } 1281 1282 /* setup event q */ 1283 mutex_init(&dp->event_mutex); 1284 init_waitqueue_head(&dp->event_q); 1285 spin_lock_init(&dp->event_lock); 1286 1287 /* Store DP audio handle inside DP display */ 1288 dp->dp_display.dp_audio = dp->audio; 1289 1290 init_completion(&dp->audio_comp); 1291 1292 platform_set_drvdata(pdev, &dp->dp_display); 1293 1294 rc = devm_pm_runtime_enable(&pdev->dev); 1295 if (rc) 1296 goto err; 1297 1298 rc = dp_display_request_irq(dp); 1299 if (rc) 1300 goto err; 1301 1302 if (dp->dp_display.is_edp) { 1303 rc = devm_of_dp_aux_populate_bus(dp->aux, dp_auxbus_done_probe); 1304 if (rc) { 1305 DRM_ERROR("eDP auxbus population failed, rc=%d\n", rc); 1306 goto err; 1307 } 1308 } else { 1309 rc = dp_display_probe_tail(&pdev->dev); 1310 if (rc) 1311 goto err; 1312 } 1313 1314 return rc; 1315 1316 err: 1317 dp_display_deinit_sub_modules(dp); 1318 return rc; 1319 } 1320 1321 static void dp_display_remove(struct platform_device *pdev) 1322 { 1323 struct dp_display_private *dp = dev_get_dp_display_private(&pdev->dev); 1324 1325 component_del(&pdev->dev, &dp_display_comp_ops); 1326 dp_display_deinit_sub_modules(dp); 1327 platform_set_drvdata(pdev, NULL); 1328 } 1329 1330 static int dp_pm_runtime_suspend(struct device *dev) 1331 { 1332 struct dp_display_private *dp = dev_get_dp_display_private(dev); 1333 1334 disable_irq(dp->irq); 1335 1336 if (dp->dp_display.is_edp) { 1337 dp_display_host_phy_exit(dp); 1338 dp_catalog_ctrl_hpd_disable(dp->catalog); 1339 } 1340 dp_display_host_deinit(dp); 1341 1342 return 0; 1343 } 1344 1345 static int dp_pm_runtime_resume(struct device *dev) 1346 { 1347 struct dp_display_private *dp = dev_get_dp_display_private(dev); 1348 1349 /* 1350 * for eDP, host cotroller, HPD block and PHY are enabled here 1351 * but with HPD irq disabled 1352 * 1353 * for DP, only host controller is enabled here. 1354 * HPD block is enabled at dp_bridge_hpd_enable() 1355 * PHY will be enabled at plugin handler later 1356 */ 1357 dp_display_host_init(dp); 1358 if (dp->dp_display.is_edp) { 1359 dp_catalog_ctrl_hpd_enable(dp->catalog); 1360 dp_display_host_phy_init(dp); 1361 } 1362 1363 enable_irq(dp->irq); 1364 return 0; 1365 } 1366 1367 static const struct dev_pm_ops dp_pm_ops = { 1368 SET_RUNTIME_PM_OPS(dp_pm_runtime_suspend, dp_pm_runtime_resume, NULL) 1369 SET_SYSTEM_SLEEP_PM_OPS(pm_runtime_force_suspend, 1370 pm_runtime_force_resume) 1371 }; 1372 1373 static struct platform_driver dp_display_driver = { 1374 .probe = dp_display_probe, 1375 .remove_new = dp_display_remove, 1376 .driver = { 1377 .name = "msm-dp-display", 1378 .of_match_table = dp_dt_match, 1379 .suppress_bind_attrs = true, 1380 .pm = &dp_pm_ops, 1381 }, 1382 }; 1383 1384 int __init msm_dp_register(void) 1385 { 1386 int ret; 1387 1388 ret = platform_driver_register(&dp_display_driver); 1389 if (ret) 1390 DRM_ERROR("Dp display driver register failed"); 1391 1392 return ret; 1393 } 1394 1395 void __exit msm_dp_unregister(void) 1396 { 1397 platform_driver_unregister(&dp_display_driver); 1398 } 1399 1400 bool msm_dp_wide_bus_available(const struct msm_dp *dp_display) 1401 { 1402 struct dp_display_private *dp; 1403 1404 dp = container_of(dp_display, struct dp_display_private, dp_display); 1405 1406 return dp->wide_bus_en; 1407 } 1408 1409 void dp_display_debugfs_init(struct msm_dp *dp_display, struct dentry *root, bool is_edp) 1410 { 1411 struct dp_display_private *dp; 1412 struct device *dev; 1413 int rc; 1414 1415 dp = container_of(dp_display, struct dp_display_private, dp_display); 1416 dev = &dp->dp_display.pdev->dev; 1417 1418 dp->debug = dp_debug_get(dev, dp->panel, 1419 dp->link, dp->dp_display.connector, 1420 root, is_edp); 1421 if (IS_ERR(dp->debug)) { 1422 rc = PTR_ERR(dp->debug); 1423 DRM_ERROR("failed to initialize debug, rc = %d\n", rc); 1424 dp->debug = NULL; 1425 } 1426 } 1427 1428 static int dp_display_get_next_bridge(struct msm_dp *dp) 1429 { 1430 int rc; 1431 struct dp_display_private *dp_priv; 1432 1433 dp_priv = container_of(dp, struct dp_display_private, dp_display); 1434 1435 /* 1436 * External bridges are mandatory for eDP interfaces: one has to 1437 * provide at least an eDP panel (which gets wrapped into panel-bridge). 1438 * 1439 * For DisplayPort interfaces external bridges are optional, so 1440 * silently ignore an error if one is not present (-ENODEV). 1441 */ 1442 rc = devm_dp_parser_find_next_bridge(&dp->pdev->dev, dp_priv->parser); 1443 if (!dp->is_edp && rc == -ENODEV) 1444 return 0; 1445 1446 if (!rc) 1447 dp->next_bridge = dp_priv->parser->next_bridge; 1448 1449 return rc; 1450 } 1451 1452 int msm_dp_modeset_init(struct msm_dp *dp_display, struct drm_device *dev, 1453 struct drm_encoder *encoder) 1454 { 1455 struct dp_display_private *dp_priv; 1456 int ret; 1457 1458 dp_display->drm_dev = dev; 1459 1460 dp_priv = container_of(dp_display, struct dp_display_private, dp_display); 1461 1462 ret = dp_bridge_init(dp_display, dev, encoder); 1463 if (ret) { 1464 DRM_DEV_ERROR(dev->dev, 1465 "failed to create dp bridge: %d\n", ret); 1466 return ret; 1467 } 1468 1469 dp_display->connector = dp_drm_connector_init(dp_display, encoder); 1470 if (IS_ERR(dp_display->connector)) { 1471 ret = PTR_ERR(dp_display->connector); 1472 DRM_DEV_ERROR(dev->dev, 1473 "failed to create dp connector: %d\n", ret); 1474 dp_display->connector = NULL; 1475 return ret; 1476 } 1477 1478 dp_priv->panel->connector = dp_display->connector; 1479 1480 return 0; 1481 } 1482 1483 void dp_bridge_atomic_enable(struct drm_bridge *drm_bridge, 1484 struct drm_bridge_state *old_bridge_state) 1485 { 1486 struct msm_dp_bridge *dp_bridge = to_dp_bridge(drm_bridge); 1487 struct msm_dp *dp = dp_bridge->dp_display; 1488 int rc = 0; 1489 struct dp_display_private *dp_display; 1490 u32 state; 1491 bool force_link_train = false; 1492 1493 dp_display = container_of(dp, struct dp_display_private, dp_display); 1494 if (!dp_display->dp_mode.drm_mode.clock) { 1495 DRM_ERROR("invalid params\n"); 1496 return; 1497 } 1498 1499 if (dp->is_edp) 1500 dp_hpd_plug_handle(dp_display, 0); 1501 1502 mutex_lock(&dp_display->event_mutex); 1503 if (pm_runtime_resume_and_get(&dp->pdev->dev)) { 1504 DRM_ERROR("failed to pm_runtime_resume\n"); 1505 mutex_unlock(&dp_display->event_mutex); 1506 return; 1507 } 1508 1509 state = dp_display->hpd_state; 1510 if (state != ST_DISPLAY_OFF && state != ST_MAINLINK_READY) { 1511 mutex_unlock(&dp_display->event_mutex); 1512 return; 1513 } 1514 1515 rc = dp_display_set_mode(dp, &dp_display->dp_mode); 1516 if (rc) { 1517 DRM_ERROR("Failed to perform a mode set, rc=%d\n", rc); 1518 mutex_unlock(&dp_display->event_mutex); 1519 return; 1520 } 1521 1522 state = dp_display->hpd_state; 1523 1524 if (state == ST_DISPLAY_OFF) { 1525 dp_display_host_phy_init(dp_display); 1526 force_link_train = true; 1527 } 1528 1529 dp_display_enable(dp_display, force_link_train); 1530 1531 rc = dp_display_post_enable(dp); 1532 if (rc) { 1533 DRM_ERROR("DP display post enable failed, rc=%d\n", rc); 1534 dp_display_disable(dp_display); 1535 } 1536 1537 /* completed connection */ 1538 dp_display->hpd_state = ST_CONNECTED; 1539 1540 drm_dbg_dp(dp->drm_dev, "type=%d Done\n", dp->connector_type); 1541 mutex_unlock(&dp_display->event_mutex); 1542 } 1543 1544 void dp_bridge_atomic_disable(struct drm_bridge *drm_bridge, 1545 struct drm_bridge_state *old_bridge_state) 1546 { 1547 struct msm_dp_bridge *dp_bridge = to_dp_bridge(drm_bridge); 1548 struct msm_dp *dp = dp_bridge->dp_display; 1549 struct dp_display_private *dp_display; 1550 1551 dp_display = container_of(dp, struct dp_display_private, dp_display); 1552 1553 dp_ctrl_push_idle(dp_display->ctrl); 1554 } 1555 1556 void dp_bridge_atomic_post_disable(struct drm_bridge *drm_bridge, 1557 struct drm_bridge_state *old_bridge_state) 1558 { 1559 struct msm_dp_bridge *dp_bridge = to_dp_bridge(drm_bridge); 1560 struct msm_dp *dp = dp_bridge->dp_display; 1561 u32 state; 1562 struct dp_display_private *dp_display; 1563 1564 dp_display = container_of(dp, struct dp_display_private, dp_display); 1565 1566 if (dp->is_edp) 1567 dp_hpd_unplug_handle(dp_display, 0); 1568 1569 mutex_lock(&dp_display->event_mutex); 1570 1571 state = dp_display->hpd_state; 1572 if (state != ST_DISCONNECT_PENDING && state != ST_CONNECTED) 1573 drm_dbg_dp(dp->drm_dev, "type=%d wrong hpd_state=%d\n", 1574 dp->connector_type, state); 1575 1576 dp_display_disable(dp_display); 1577 1578 state = dp_display->hpd_state; 1579 if (state == ST_DISCONNECT_PENDING) { 1580 /* completed disconnection */ 1581 dp_display->hpd_state = ST_DISCONNECTED; 1582 } else { 1583 dp_display->hpd_state = ST_DISPLAY_OFF; 1584 } 1585 1586 drm_dbg_dp(dp->drm_dev, "type=%d Done\n", dp->connector_type); 1587 1588 pm_runtime_put_sync(&dp->pdev->dev); 1589 mutex_unlock(&dp_display->event_mutex); 1590 } 1591 1592 void dp_bridge_mode_set(struct drm_bridge *drm_bridge, 1593 const struct drm_display_mode *mode, 1594 const struct drm_display_mode *adjusted_mode) 1595 { 1596 struct msm_dp_bridge *dp_bridge = to_dp_bridge(drm_bridge); 1597 struct msm_dp *dp = dp_bridge->dp_display; 1598 struct dp_display_private *dp_display; 1599 1600 dp_display = container_of(dp, struct dp_display_private, dp_display); 1601 1602 memset(&dp_display->dp_mode, 0x0, sizeof(struct dp_display_mode)); 1603 1604 if (dp_display_check_video_test(dp)) 1605 dp_display->dp_mode.bpp = dp_display_get_test_bpp(dp); 1606 else /* Default num_components per pixel = 3 */ 1607 dp_display->dp_mode.bpp = dp->connector->display_info.bpc * 3; 1608 1609 if (!dp_display->dp_mode.bpp) 1610 dp_display->dp_mode.bpp = 24; /* Default bpp */ 1611 1612 drm_mode_copy(&dp_display->dp_mode.drm_mode, adjusted_mode); 1613 1614 dp_display->dp_mode.v_active_low = 1615 !!(dp_display->dp_mode.drm_mode.flags & DRM_MODE_FLAG_NVSYNC); 1616 1617 dp_display->dp_mode.h_active_low = 1618 !!(dp_display->dp_mode.drm_mode.flags & DRM_MODE_FLAG_NHSYNC); 1619 } 1620 1621 void dp_bridge_hpd_enable(struct drm_bridge *bridge) 1622 { 1623 struct msm_dp_bridge *dp_bridge = to_dp_bridge(bridge); 1624 struct msm_dp *dp_display = dp_bridge->dp_display; 1625 struct dp_display_private *dp = container_of(dp_display, struct dp_display_private, dp_display); 1626 1627 /* 1628 * this is for external DP with hpd irq enabled case, 1629 * step-1: dp_pm_runtime_resume() enable dp host only 1630 * step-2: enable hdp block and have hpd irq enabled here 1631 * step-3: waiting for plugin irq while phy is not initialized 1632 * step-4: DP PHY is initialized at plugin handler before link training 1633 * 1634 */ 1635 mutex_lock(&dp->event_mutex); 1636 if (pm_runtime_resume_and_get(&dp_display->pdev->dev)) { 1637 DRM_ERROR("failed to resume power\n"); 1638 mutex_unlock(&dp->event_mutex); 1639 return; 1640 } 1641 1642 dp_catalog_ctrl_hpd_enable(dp->catalog); 1643 1644 /* enable HDP interrupts */ 1645 dp_catalog_hpd_config_intr(dp->catalog, DP_DP_HPD_INT_MASK, true); 1646 1647 dp_display->internal_hpd = true; 1648 mutex_unlock(&dp->event_mutex); 1649 } 1650 1651 void dp_bridge_hpd_disable(struct drm_bridge *bridge) 1652 { 1653 struct msm_dp_bridge *dp_bridge = to_dp_bridge(bridge); 1654 struct msm_dp *dp_display = dp_bridge->dp_display; 1655 struct dp_display_private *dp = container_of(dp_display, struct dp_display_private, dp_display); 1656 1657 mutex_lock(&dp->event_mutex); 1658 /* disable HDP interrupts */ 1659 dp_catalog_hpd_config_intr(dp->catalog, DP_DP_HPD_INT_MASK, false); 1660 dp_catalog_ctrl_hpd_disable(dp->catalog); 1661 1662 dp_display->internal_hpd = false; 1663 1664 pm_runtime_put_sync(&dp_display->pdev->dev); 1665 mutex_unlock(&dp->event_mutex); 1666 } 1667 1668 void dp_bridge_hpd_notify(struct drm_bridge *bridge, 1669 enum drm_connector_status status) 1670 { 1671 struct msm_dp_bridge *dp_bridge = to_dp_bridge(bridge); 1672 struct msm_dp *dp_display = dp_bridge->dp_display; 1673 struct dp_display_private *dp = container_of(dp_display, struct dp_display_private, dp_display); 1674 1675 /* Without next_bridge interrupts are handled by the DP core directly */ 1676 if (dp_display->internal_hpd) 1677 return; 1678 1679 if (!dp_display->link_ready && status == connector_status_connected) 1680 dp_add_event(dp, EV_HPD_PLUG_INT, 0, 0); 1681 else if (dp_display->link_ready && status == connector_status_disconnected) 1682 dp_add_event(dp, EV_HPD_UNPLUG_INT, 0, 0); 1683 } 1684