1 // SPDX-License-Identifier: MIT 2 /* 3 * Copyright © 2020 Intel Corporation 4 * 5 * DisplayPort support for G4x,ILK,SNB,IVB,VLV,CHV (HSW+ handled by the DDI code). 6 */ 7 8 #include <linux/string_helpers.h> 9 10 #include "g4x_dp.h" 11 #include "i915_reg.h" 12 #include "intel_audio.h" 13 #include "intel_backlight.h" 14 #include "intel_connector.h" 15 #include "intel_crtc.h" 16 #include "intel_de.h" 17 #include "intel_display_power.h" 18 #include "intel_display_types.h" 19 #include "intel_dp.h" 20 #include "intel_dp_aux.h" 21 #include "intel_dp_link_training.h" 22 #include "intel_dpio_phy.h" 23 #include "intel_encoder.h" 24 #include "intel_fifo_underrun.h" 25 #include "intel_hdmi.h" 26 #include "intel_hotplug.h" 27 #include "intel_pch_display.h" 28 #include "intel_pps.h" 29 #include "vlv_sideband.h" 30 31 static const struct dpll g4x_dpll[] = { 32 { .dot = 162000, .p1 = 2, .p2 = 10, .n = 2, .m1 = 23, .m2 = 8, }, 33 { .dot = 270000, .p1 = 1, .p2 = 10, .n = 1, .m1 = 14, .m2 = 2, }, 34 }; 35 36 static const struct dpll pch_dpll[] = { 37 { .dot = 162000, .p1 = 2, .p2 = 10, .n = 1, .m1 = 12, .m2 = 9, }, 38 { .dot = 270000, .p1 = 1, .p2 = 10, .n = 2, .m1 = 14, .m2 = 8, }, 39 }; 40 41 static const struct dpll vlv_dpll[] = { 42 { .dot = 162000, .p1 = 3, .p2 = 2, .n = 5, .m1 = 3, .m2 = 81, }, 43 { .dot = 270000, .p1 = 2, .p2 = 2, .n = 1, .m1 = 2, .m2 = 27, }, 44 }; 45 46 static const struct dpll chv_dpll[] = { 47 /* m2 is .22 binary fixed point */ 48 { .dot = 162000, .p1 = 4, .p2 = 2, .n = 1, .m1 = 2, .m2 = 0x819999a /* 32.4 */ }, 49 { .dot = 270000, .p1 = 4, .p2 = 1, .n = 1, .m1 = 2, .m2 = 0x6c00000 /* 27.0 */ }, 50 }; 51 52 const struct dpll *vlv_get_dpll(struct drm_i915_private *i915) 53 { 54 return IS_CHERRYVIEW(i915) ? &chv_dpll[0] : &vlv_dpll[0]; 55 } 56 57 void g4x_dp_set_clock(struct intel_encoder *encoder, 58 struct intel_crtc_state *pipe_config) 59 { 60 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 61 const struct dpll *divisor = NULL; 62 int i, count = 0; 63 64 if (IS_G4X(dev_priv)) { 65 divisor = g4x_dpll; 66 count = ARRAY_SIZE(g4x_dpll); 67 } else if (HAS_PCH_SPLIT(dev_priv)) { 68 divisor = pch_dpll; 69 count = ARRAY_SIZE(pch_dpll); 70 } else if (IS_CHERRYVIEW(dev_priv)) { 71 divisor = chv_dpll; 72 count = ARRAY_SIZE(chv_dpll); 73 } else if (IS_VALLEYVIEW(dev_priv)) { 74 divisor = vlv_dpll; 75 count = ARRAY_SIZE(vlv_dpll); 76 } 77 78 if (divisor && count) { 79 for (i = 0; i < count; i++) { 80 if (pipe_config->port_clock == divisor[i].dot) { 81 pipe_config->dpll = divisor[i]; 82 pipe_config->clock_set = true; 83 break; 84 } 85 } 86 } 87 } 88 89 static void intel_dp_prepare(struct intel_encoder *encoder, 90 const struct intel_crtc_state *pipe_config) 91 { 92 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 93 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 94 enum port port = encoder->port; 95 struct intel_crtc *crtc = to_intel_crtc(pipe_config->uapi.crtc); 96 const struct drm_display_mode *adjusted_mode = &pipe_config->hw.adjusted_mode; 97 98 intel_dp_set_link_params(intel_dp, 99 pipe_config->port_clock, 100 pipe_config->lane_count); 101 102 /* 103 * There are four kinds of DP registers: 104 * IBX PCH 105 * SNB CPU 106 * IVB CPU 107 * CPT PCH 108 * 109 * IBX PCH and CPU are the same for almost everything, 110 * except that the CPU DP PLL is configured in this 111 * register 112 * 113 * CPT PCH is quite different, having many bits moved 114 * to the TRANS_DP_CTL register instead. That 115 * configuration happens (oddly) in ilk_pch_enable 116 */ 117 118 /* Preserve the BIOS-computed detected bit. This is 119 * supposed to be read-only. 120 */ 121 intel_dp->DP = intel_de_read(dev_priv, intel_dp->output_reg) & DP_DETECTED; 122 123 /* Handle DP bits in common between all three register formats */ 124 intel_dp->DP |= DP_VOLTAGE_0_4 | DP_PRE_EMPHASIS_0; 125 intel_dp->DP |= DP_PORT_WIDTH(pipe_config->lane_count); 126 127 /* Split out the IBX/CPU vs CPT settings */ 128 129 if (IS_IVYBRIDGE(dev_priv) && port == PORT_A) { 130 if (adjusted_mode->flags & DRM_MODE_FLAG_PHSYNC) 131 intel_dp->DP |= DP_SYNC_HS_HIGH; 132 if (adjusted_mode->flags & DRM_MODE_FLAG_PVSYNC) 133 intel_dp->DP |= DP_SYNC_VS_HIGH; 134 intel_dp->DP |= DP_LINK_TRAIN_OFF_CPT; 135 136 if (drm_dp_enhanced_frame_cap(intel_dp->dpcd)) 137 intel_dp->DP |= DP_ENHANCED_FRAMING; 138 139 intel_dp->DP |= DP_PIPE_SEL_IVB(crtc->pipe); 140 } else if (HAS_PCH_CPT(dev_priv) && port != PORT_A) { 141 intel_dp->DP |= DP_LINK_TRAIN_OFF_CPT; 142 143 intel_de_rmw(dev_priv, TRANS_DP_CTL(crtc->pipe), 144 TRANS_DP_ENH_FRAMING, 145 pipe_config->enhanced_framing ? 146 TRANS_DP_ENH_FRAMING : 0); 147 } else { 148 if (IS_G4X(dev_priv) && pipe_config->limited_color_range) 149 intel_dp->DP |= DP_COLOR_RANGE_16_235; 150 151 if (adjusted_mode->flags & DRM_MODE_FLAG_PHSYNC) 152 intel_dp->DP |= DP_SYNC_HS_HIGH; 153 if (adjusted_mode->flags & DRM_MODE_FLAG_PVSYNC) 154 intel_dp->DP |= DP_SYNC_VS_HIGH; 155 intel_dp->DP |= DP_LINK_TRAIN_OFF; 156 157 if (pipe_config->enhanced_framing) 158 intel_dp->DP |= DP_ENHANCED_FRAMING; 159 160 if (IS_CHERRYVIEW(dev_priv)) 161 intel_dp->DP |= DP_PIPE_SEL_CHV(crtc->pipe); 162 else 163 intel_dp->DP |= DP_PIPE_SEL(crtc->pipe); 164 } 165 } 166 167 static void assert_dp_port(struct intel_dp *intel_dp, bool state) 168 { 169 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp); 170 struct drm_i915_private *dev_priv = to_i915(dig_port->base.base.dev); 171 bool cur_state = intel_de_read(dev_priv, intel_dp->output_reg) & DP_PORT_EN; 172 173 I915_STATE_WARN(dev_priv, cur_state != state, 174 "[ENCODER:%d:%s] state assertion failure (expected %s, current %s)\n", 175 dig_port->base.base.base.id, dig_port->base.base.name, 176 str_on_off(state), str_on_off(cur_state)); 177 } 178 #define assert_dp_port_disabled(d) assert_dp_port((d), false) 179 180 static void assert_edp_pll(struct drm_i915_private *dev_priv, bool state) 181 { 182 bool cur_state = intel_de_read(dev_priv, DP_A) & DP_PLL_ENABLE; 183 184 I915_STATE_WARN(dev_priv, cur_state != state, 185 "eDP PLL state assertion failure (expected %s, current %s)\n", 186 str_on_off(state), str_on_off(cur_state)); 187 } 188 #define assert_edp_pll_enabled(d) assert_edp_pll((d), true) 189 #define assert_edp_pll_disabled(d) assert_edp_pll((d), false) 190 191 static void ilk_edp_pll_on(struct intel_dp *intel_dp, 192 const struct intel_crtc_state *pipe_config) 193 { 194 struct intel_crtc *crtc = to_intel_crtc(pipe_config->uapi.crtc); 195 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev); 196 197 assert_transcoder_disabled(dev_priv, pipe_config->cpu_transcoder); 198 assert_dp_port_disabled(intel_dp); 199 assert_edp_pll_disabled(dev_priv); 200 201 drm_dbg_kms(&dev_priv->drm, "enabling eDP PLL for clock %d\n", 202 pipe_config->port_clock); 203 204 intel_dp->DP &= ~DP_PLL_FREQ_MASK; 205 206 if (pipe_config->port_clock == 162000) 207 intel_dp->DP |= DP_PLL_FREQ_162MHZ; 208 else 209 intel_dp->DP |= DP_PLL_FREQ_270MHZ; 210 211 intel_de_write(dev_priv, DP_A, intel_dp->DP); 212 intel_de_posting_read(dev_priv, DP_A); 213 udelay(500); 214 215 /* 216 * [DevILK] Work around required when enabling DP PLL 217 * while a pipe is enabled going to FDI: 218 * 1. Wait for the start of vertical blank on the enabled pipe going to FDI 219 * 2. Program DP PLL enable 220 */ 221 if (IS_IRONLAKE(dev_priv)) 222 intel_wait_for_vblank_if_active(dev_priv, !crtc->pipe); 223 224 intel_dp->DP |= DP_PLL_ENABLE; 225 226 intel_de_write(dev_priv, DP_A, intel_dp->DP); 227 intel_de_posting_read(dev_priv, DP_A); 228 udelay(200); 229 } 230 231 static void ilk_edp_pll_off(struct intel_dp *intel_dp, 232 const struct intel_crtc_state *old_crtc_state) 233 { 234 struct intel_crtc *crtc = to_intel_crtc(old_crtc_state->uapi.crtc); 235 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev); 236 237 assert_transcoder_disabled(dev_priv, old_crtc_state->cpu_transcoder); 238 assert_dp_port_disabled(intel_dp); 239 assert_edp_pll_enabled(dev_priv); 240 241 drm_dbg_kms(&dev_priv->drm, "disabling eDP PLL\n"); 242 243 intel_dp->DP &= ~DP_PLL_ENABLE; 244 245 intel_de_write(dev_priv, DP_A, intel_dp->DP); 246 intel_de_posting_read(dev_priv, DP_A); 247 udelay(200); 248 } 249 250 static bool cpt_dp_port_selected(struct drm_i915_private *dev_priv, 251 enum port port, enum pipe *pipe) 252 { 253 enum pipe p; 254 255 for_each_pipe(dev_priv, p) { 256 u32 val = intel_de_read(dev_priv, TRANS_DP_CTL(p)); 257 258 if ((val & TRANS_DP_PORT_SEL_MASK) == TRANS_DP_PORT_SEL(port)) { 259 *pipe = p; 260 return true; 261 } 262 } 263 264 drm_dbg_kms(&dev_priv->drm, "No pipe for DP port %c found\n", 265 port_name(port)); 266 267 /* must initialize pipe to something for the asserts */ 268 *pipe = PIPE_A; 269 270 return false; 271 } 272 273 bool g4x_dp_port_enabled(struct drm_i915_private *dev_priv, 274 i915_reg_t dp_reg, enum port port, 275 enum pipe *pipe) 276 { 277 bool ret; 278 u32 val; 279 280 val = intel_de_read(dev_priv, dp_reg); 281 282 ret = val & DP_PORT_EN; 283 284 /* asserts want to know the pipe even if the port is disabled */ 285 if (IS_IVYBRIDGE(dev_priv) && port == PORT_A) 286 *pipe = (val & DP_PIPE_SEL_MASK_IVB) >> DP_PIPE_SEL_SHIFT_IVB; 287 else if (HAS_PCH_CPT(dev_priv) && port != PORT_A) 288 ret &= cpt_dp_port_selected(dev_priv, port, pipe); 289 else if (IS_CHERRYVIEW(dev_priv)) 290 *pipe = (val & DP_PIPE_SEL_MASK_CHV) >> DP_PIPE_SEL_SHIFT_CHV; 291 else 292 *pipe = (val & DP_PIPE_SEL_MASK) >> DP_PIPE_SEL_SHIFT; 293 294 return ret; 295 } 296 297 static bool intel_dp_get_hw_state(struct intel_encoder *encoder, 298 enum pipe *pipe) 299 { 300 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 301 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 302 intel_wakeref_t wakeref; 303 bool ret; 304 305 wakeref = intel_display_power_get_if_enabled(dev_priv, 306 encoder->power_domain); 307 if (!wakeref) 308 return false; 309 310 ret = g4x_dp_port_enabled(dev_priv, intel_dp->output_reg, 311 encoder->port, pipe); 312 313 intel_display_power_put(dev_priv, encoder->power_domain, wakeref); 314 315 return ret; 316 } 317 318 static void g4x_dp_get_m_n(struct intel_crtc_state *crtc_state) 319 { 320 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc); 321 322 if (crtc_state->has_pch_encoder) { 323 intel_pch_transcoder_get_m1_n1(crtc, &crtc_state->dp_m_n); 324 intel_pch_transcoder_get_m2_n2(crtc, &crtc_state->dp_m2_n2); 325 } else { 326 intel_cpu_transcoder_get_m1_n1(crtc, crtc_state->cpu_transcoder, 327 &crtc_state->dp_m_n); 328 intel_cpu_transcoder_get_m2_n2(crtc, crtc_state->cpu_transcoder, 329 &crtc_state->dp_m2_n2); 330 } 331 } 332 333 static void intel_dp_get_config(struct intel_encoder *encoder, 334 struct intel_crtc_state *pipe_config) 335 { 336 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 337 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 338 u32 tmp, flags = 0; 339 enum port port = encoder->port; 340 struct intel_crtc *crtc = to_intel_crtc(pipe_config->uapi.crtc); 341 342 if (encoder->type == INTEL_OUTPUT_EDP) 343 pipe_config->output_types |= BIT(INTEL_OUTPUT_EDP); 344 else 345 pipe_config->output_types |= BIT(INTEL_OUTPUT_DP); 346 347 tmp = intel_de_read(dev_priv, intel_dp->output_reg); 348 349 pipe_config->has_audio = tmp & DP_AUDIO_OUTPUT_ENABLE && port != PORT_A; 350 351 if (HAS_PCH_CPT(dev_priv) && port != PORT_A) { 352 u32 trans_dp = intel_de_read(dev_priv, 353 TRANS_DP_CTL(crtc->pipe)); 354 355 if (trans_dp & TRANS_DP_ENH_FRAMING) 356 pipe_config->enhanced_framing = true; 357 358 if (trans_dp & TRANS_DP_HSYNC_ACTIVE_HIGH) 359 flags |= DRM_MODE_FLAG_PHSYNC; 360 else 361 flags |= DRM_MODE_FLAG_NHSYNC; 362 363 if (trans_dp & TRANS_DP_VSYNC_ACTIVE_HIGH) 364 flags |= DRM_MODE_FLAG_PVSYNC; 365 else 366 flags |= DRM_MODE_FLAG_NVSYNC; 367 } else { 368 if (tmp & DP_ENHANCED_FRAMING) 369 pipe_config->enhanced_framing = true; 370 371 if (tmp & DP_SYNC_HS_HIGH) 372 flags |= DRM_MODE_FLAG_PHSYNC; 373 else 374 flags |= DRM_MODE_FLAG_NHSYNC; 375 376 if (tmp & DP_SYNC_VS_HIGH) 377 flags |= DRM_MODE_FLAG_PVSYNC; 378 else 379 flags |= DRM_MODE_FLAG_NVSYNC; 380 } 381 382 pipe_config->hw.adjusted_mode.flags |= flags; 383 384 if (IS_G4X(dev_priv) && tmp & DP_COLOR_RANGE_16_235) 385 pipe_config->limited_color_range = true; 386 387 pipe_config->lane_count = 388 ((tmp & DP_PORT_WIDTH_MASK) >> DP_PORT_WIDTH_SHIFT) + 1; 389 390 g4x_dp_get_m_n(pipe_config); 391 392 if (port == PORT_A) { 393 if ((intel_de_read(dev_priv, DP_A) & DP_PLL_FREQ_MASK) == DP_PLL_FREQ_162MHZ) 394 pipe_config->port_clock = 162000; 395 else 396 pipe_config->port_clock = 270000; 397 } 398 399 pipe_config->hw.adjusted_mode.crtc_clock = 400 intel_dotclock_calculate(pipe_config->port_clock, 401 &pipe_config->dp_m_n); 402 403 if (intel_dp_is_edp(intel_dp)) 404 intel_edp_fixup_vbt_bpp(encoder, pipe_config->pipe_bpp); 405 406 intel_audio_codec_get_config(encoder, pipe_config); 407 } 408 409 static void 410 intel_dp_link_down(struct intel_encoder *encoder, 411 const struct intel_crtc_state *old_crtc_state) 412 { 413 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 414 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 415 struct intel_crtc *crtc = to_intel_crtc(old_crtc_state->uapi.crtc); 416 enum port port = encoder->port; 417 418 if (drm_WARN_ON(&dev_priv->drm, 419 (intel_de_read(dev_priv, intel_dp->output_reg) & 420 DP_PORT_EN) == 0)) 421 return; 422 423 drm_dbg_kms(&dev_priv->drm, "\n"); 424 425 if ((IS_IVYBRIDGE(dev_priv) && port == PORT_A) || 426 (HAS_PCH_CPT(dev_priv) && port != PORT_A)) { 427 intel_dp->DP &= ~DP_LINK_TRAIN_MASK_CPT; 428 intel_dp->DP |= DP_LINK_TRAIN_PAT_IDLE_CPT; 429 } else { 430 intel_dp->DP &= ~DP_LINK_TRAIN_MASK; 431 intel_dp->DP |= DP_LINK_TRAIN_PAT_IDLE; 432 } 433 intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP); 434 intel_de_posting_read(dev_priv, intel_dp->output_reg); 435 436 intel_dp->DP &= ~DP_PORT_EN; 437 intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP); 438 intel_de_posting_read(dev_priv, intel_dp->output_reg); 439 440 /* 441 * HW workaround for IBX, we need to move the port 442 * to transcoder A after disabling it to allow the 443 * matching HDMI port to be enabled on transcoder A. 444 */ 445 if (HAS_PCH_IBX(dev_priv) && crtc->pipe == PIPE_B && port != PORT_A) { 446 /* 447 * We get CPU/PCH FIFO underruns on the other pipe when 448 * doing the workaround. Sweep them under the rug. 449 */ 450 intel_set_cpu_fifo_underrun_reporting(dev_priv, PIPE_A, false); 451 intel_set_pch_fifo_underrun_reporting(dev_priv, PIPE_A, false); 452 453 /* always enable with pattern 1 (as per spec) */ 454 intel_dp->DP &= ~(DP_PIPE_SEL_MASK | DP_LINK_TRAIN_MASK); 455 intel_dp->DP |= DP_PORT_EN | DP_PIPE_SEL(PIPE_A) | 456 DP_LINK_TRAIN_PAT_1; 457 intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP); 458 intel_de_posting_read(dev_priv, intel_dp->output_reg); 459 460 intel_dp->DP &= ~DP_PORT_EN; 461 intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP); 462 intel_de_posting_read(dev_priv, intel_dp->output_reg); 463 464 intel_wait_for_vblank_if_active(dev_priv, PIPE_A); 465 intel_set_cpu_fifo_underrun_reporting(dev_priv, PIPE_A, true); 466 intel_set_pch_fifo_underrun_reporting(dev_priv, PIPE_A, true); 467 } 468 469 msleep(intel_dp->pps.panel_power_down_delay); 470 471 if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv)) { 472 intel_wakeref_t wakeref; 473 474 with_intel_pps_lock(intel_dp, wakeref) 475 intel_dp->pps.active_pipe = INVALID_PIPE; 476 } 477 } 478 479 static void g4x_dp_audio_enable(struct intel_encoder *encoder, 480 const struct intel_crtc_state *crtc_state, 481 const struct drm_connector_state *conn_state) 482 { 483 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 484 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 485 486 if (!crtc_state->has_audio) 487 return; 488 489 /* Enable audio presence detect */ 490 intel_dp->DP |= DP_AUDIO_OUTPUT_ENABLE; 491 intel_de_write(i915, intel_dp->output_reg, intel_dp->DP); 492 493 intel_audio_codec_enable(encoder, crtc_state, conn_state); 494 } 495 496 static void g4x_dp_audio_disable(struct intel_encoder *encoder, 497 const struct intel_crtc_state *old_crtc_state, 498 const struct drm_connector_state *old_conn_state) 499 { 500 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 501 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 502 503 if (!old_crtc_state->has_audio) 504 return; 505 506 intel_audio_codec_disable(encoder, old_crtc_state, old_conn_state); 507 508 /* Disable audio presence detect */ 509 intel_dp->DP &= ~DP_AUDIO_OUTPUT_ENABLE; 510 intel_de_write(i915, intel_dp->output_reg, intel_dp->DP); 511 } 512 513 static void intel_disable_dp(struct intel_atomic_state *state, 514 struct intel_encoder *encoder, 515 const struct intel_crtc_state *old_crtc_state, 516 const struct drm_connector_state *old_conn_state) 517 { 518 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 519 520 intel_dp->link_trained = false; 521 522 /* 523 * Make sure the panel is off before trying to change the mode. 524 * But also ensure that we have vdd while we switch off the panel. 525 */ 526 intel_pps_vdd_on(intel_dp); 527 intel_edp_backlight_off(old_conn_state); 528 intel_dp_set_power(intel_dp, DP_SET_POWER_D3); 529 intel_pps_off(intel_dp); 530 } 531 532 static void g4x_disable_dp(struct intel_atomic_state *state, 533 struct intel_encoder *encoder, 534 const struct intel_crtc_state *old_crtc_state, 535 const struct drm_connector_state *old_conn_state) 536 { 537 intel_disable_dp(state, encoder, old_crtc_state, old_conn_state); 538 } 539 540 static void vlv_disable_dp(struct intel_atomic_state *state, 541 struct intel_encoder *encoder, 542 const struct intel_crtc_state *old_crtc_state, 543 const struct drm_connector_state *old_conn_state) 544 { 545 intel_disable_dp(state, encoder, old_crtc_state, old_conn_state); 546 } 547 548 static void g4x_post_disable_dp(struct intel_atomic_state *state, 549 struct intel_encoder *encoder, 550 const struct intel_crtc_state *old_crtc_state, 551 const struct drm_connector_state *old_conn_state) 552 { 553 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 554 enum port port = encoder->port; 555 556 /* 557 * Bspec does not list a specific disable sequence for g4x DP. 558 * Follow the ilk+ sequence (disable pipe before the port) for 559 * g4x DP as it does not suffer from underruns like the normal 560 * g4x modeset sequence (disable pipe after the port). 561 */ 562 intel_dp_link_down(encoder, old_crtc_state); 563 564 /* Only ilk+ has port A */ 565 if (port == PORT_A) 566 ilk_edp_pll_off(intel_dp, old_crtc_state); 567 } 568 569 static void vlv_post_disable_dp(struct intel_atomic_state *state, 570 struct intel_encoder *encoder, 571 const struct intel_crtc_state *old_crtc_state, 572 const struct drm_connector_state *old_conn_state) 573 { 574 intel_dp_link_down(encoder, old_crtc_state); 575 } 576 577 static void chv_post_disable_dp(struct intel_atomic_state *state, 578 struct intel_encoder *encoder, 579 const struct intel_crtc_state *old_crtc_state, 580 const struct drm_connector_state *old_conn_state) 581 { 582 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 583 584 intel_dp_link_down(encoder, old_crtc_state); 585 586 vlv_dpio_get(dev_priv); 587 588 /* Assert data lane reset */ 589 chv_data_lane_soft_reset(encoder, old_crtc_state, true); 590 591 vlv_dpio_put(dev_priv); 592 } 593 594 static void 595 cpt_set_link_train(struct intel_dp *intel_dp, 596 const struct intel_crtc_state *crtc_state, 597 u8 dp_train_pat) 598 { 599 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp); 600 601 intel_dp->DP &= ~DP_LINK_TRAIN_MASK_CPT; 602 603 switch (intel_dp_training_pattern_symbol(dp_train_pat)) { 604 case DP_TRAINING_PATTERN_DISABLE: 605 intel_dp->DP |= DP_LINK_TRAIN_OFF_CPT; 606 break; 607 case DP_TRAINING_PATTERN_1: 608 intel_dp->DP |= DP_LINK_TRAIN_PAT_1_CPT; 609 break; 610 case DP_TRAINING_PATTERN_2: 611 intel_dp->DP |= DP_LINK_TRAIN_PAT_2_CPT; 612 break; 613 default: 614 MISSING_CASE(intel_dp_training_pattern_symbol(dp_train_pat)); 615 return; 616 } 617 618 intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP); 619 intel_de_posting_read(dev_priv, intel_dp->output_reg); 620 } 621 622 static void 623 g4x_set_link_train(struct intel_dp *intel_dp, 624 const struct intel_crtc_state *crtc_state, 625 u8 dp_train_pat) 626 { 627 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp); 628 629 intel_dp->DP &= ~DP_LINK_TRAIN_MASK; 630 631 switch (intel_dp_training_pattern_symbol(dp_train_pat)) { 632 case DP_TRAINING_PATTERN_DISABLE: 633 intel_dp->DP |= DP_LINK_TRAIN_OFF; 634 break; 635 case DP_TRAINING_PATTERN_1: 636 intel_dp->DP |= DP_LINK_TRAIN_PAT_1; 637 break; 638 case DP_TRAINING_PATTERN_2: 639 intel_dp->DP |= DP_LINK_TRAIN_PAT_2; 640 break; 641 default: 642 MISSING_CASE(intel_dp_training_pattern_symbol(dp_train_pat)); 643 return; 644 } 645 646 intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP); 647 intel_de_posting_read(dev_priv, intel_dp->output_reg); 648 } 649 650 static void intel_dp_enable_port(struct intel_dp *intel_dp, 651 const struct intel_crtc_state *crtc_state) 652 { 653 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp); 654 655 /* enable with pattern 1 (as per spec) */ 656 657 intel_dp_program_link_training_pattern(intel_dp, crtc_state, 658 DP_PHY_DPRX, DP_TRAINING_PATTERN_1); 659 660 /* 661 * Magic for VLV/CHV. We _must_ first set up the register 662 * without actually enabling the port, and then do another 663 * write to enable the port. Otherwise link training will 664 * fail when the power sequencer is freshly used for this port. 665 */ 666 intel_dp->DP |= DP_PORT_EN; 667 668 intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP); 669 intel_de_posting_read(dev_priv, intel_dp->output_reg); 670 } 671 672 static void intel_enable_dp(struct intel_atomic_state *state, 673 struct intel_encoder *encoder, 674 const struct intel_crtc_state *pipe_config, 675 const struct drm_connector_state *conn_state) 676 { 677 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 678 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 679 u32 dp_reg = intel_de_read(dev_priv, intel_dp->output_reg); 680 intel_wakeref_t wakeref; 681 682 if (drm_WARN_ON(&dev_priv->drm, dp_reg & DP_PORT_EN)) 683 return; 684 685 with_intel_pps_lock(intel_dp, wakeref) { 686 if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv)) 687 vlv_pps_init(encoder, pipe_config); 688 689 intel_dp_enable_port(intel_dp, pipe_config); 690 691 intel_pps_vdd_on_unlocked(intel_dp); 692 intel_pps_on_unlocked(intel_dp); 693 intel_pps_vdd_off_unlocked(intel_dp, true); 694 } 695 696 if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv)) { 697 unsigned int lane_mask = 0x0; 698 699 if (IS_CHERRYVIEW(dev_priv)) 700 lane_mask = intel_dp_unused_lane_mask(pipe_config->lane_count); 701 702 vlv_wait_port_ready(dev_priv, dp_to_dig_port(intel_dp), 703 lane_mask); 704 } 705 706 intel_dp_set_power(intel_dp, DP_SET_POWER_D0); 707 intel_dp_configure_protocol_converter(intel_dp, pipe_config); 708 intel_dp_check_frl_training(intel_dp); 709 intel_dp_pcon_dsc_configure(intel_dp, pipe_config); 710 intel_dp_start_link_train(state, intel_dp, pipe_config); 711 intel_dp_stop_link_train(intel_dp, pipe_config); 712 } 713 714 static void g4x_enable_dp(struct intel_atomic_state *state, 715 struct intel_encoder *encoder, 716 const struct intel_crtc_state *pipe_config, 717 const struct drm_connector_state *conn_state) 718 { 719 intel_enable_dp(state, encoder, pipe_config, conn_state); 720 intel_edp_backlight_on(pipe_config, conn_state); 721 } 722 723 static void vlv_enable_dp(struct intel_atomic_state *state, 724 struct intel_encoder *encoder, 725 const struct intel_crtc_state *pipe_config, 726 const struct drm_connector_state *conn_state) 727 { 728 intel_edp_backlight_on(pipe_config, conn_state); 729 } 730 731 static void g4x_pre_enable_dp(struct intel_atomic_state *state, 732 struct intel_encoder *encoder, 733 const struct intel_crtc_state *pipe_config, 734 const struct drm_connector_state *conn_state) 735 { 736 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 737 enum port port = encoder->port; 738 739 intel_dp_prepare(encoder, pipe_config); 740 741 /* Only ilk+ has port A */ 742 if (port == PORT_A) 743 ilk_edp_pll_on(intel_dp, pipe_config); 744 } 745 746 static void vlv_pre_enable_dp(struct intel_atomic_state *state, 747 struct intel_encoder *encoder, 748 const struct intel_crtc_state *pipe_config, 749 const struct drm_connector_state *conn_state) 750 { 751 vlv_phy_pre_encoder_enable(encoder, pipe_config); 752 753 intel_enable_dp(state, encoder, pipe_config, conn_state); 754 } 755 756 static void vlv_dp_pre_pll_enable(struct intel_atomic_state *state, 757 struct intel_encoder *encoder, 758 const struct intel_crtc_state *pipe_config, 759 const struct drm_connector_state *conn_state) 760 { 761 intel_dp_prepare(encoder, pipe_config); 762 763 vlv_phy_pre_pll_enable(encoder, pipe_config); 764 } 765 766 static void chv_pre_enable_dp(struct intel_atomic_state *state, 767 struct intel_encoder *encoder, 768 const struct intel_crtc_state *pipe_config, 769 const struct drm_connector_state *conn_state) 770 { 771 chv_phy_pre_encoder_enable(encoder, pipe_config); 772 773 intel_enable_dp(state, encoder, pipe_config, conn_state); 774 775 /* Second common lane will stay alive on its own now */ 776 chv_phy_release_cl2_override(encoder); 777 } 778 779 static void chv_dp_pre_pll_enable(struct intel_atomic_state *state, 780 struct intel_encoder *encoder, 781 const struct intel_crtc_state *pipe_config, 782 const struct drm_connector_state *conn_state) 783 { 784 intel_dp_prepare(encoder, pipe_config); 785 786 chv_phy_pre_pll_enable(encoder, pipe_config); 787 } 788 789 static void chv_dp_post_pll_disable(struct intel_atomic_state *state, 790 struct intel_encoder *encoder, 791 const struct intel_crtc_state *old_crtc_state, 792 const struct drm_connector_state *old_conn_state) 793 { 794 chv_phy_post_pll_disable(encoder, old_crtc_state); 795 } 796 797 static u8 intel_dp_voltage_max_2(struct intel_dp *intel_dp, 798 const struct intel_crtc_state *crtc_state) 799 { 800 return DP_TRAIN_VOLTAGE_SWING_LEVEL_2; 801 } 802 803 static u8 intel_dp_voltage_max_3(struct intel_dp *intel_dp, 804 const struct intel_crtc_state *crtc_state) 805 { 806 return DP_TRAIN_VOLTAGE_SWING_LEVEL_3; 807 } 808 809 static u8 intel_dp_preemph_max_2(struct intel_dp *intel_dp) 810 { 811 return DP_TRAIN_PRE_EMPH_LEVEL_2; 812 } 813 814 static u8 intel_dp_preemph_max_3(struct intel_dp *intel_dp) 815 { 816 return DP_TRAIN_PRE_EMPH_LEVEL_3; 817 } 818 819 static void vlv_set_signal_levels(struct intel_encoder *encoder, 820 const struct intel_crtc_state *crtc_state) 821 { 822 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 823 unsigned long demph_reg_value, preemph_reg_value, 824 uniqtranscale_reg_value; 825 u8 train_set = intel_dp->train_set[0]; 826 827 switch (train_set & DP_TRAIN_PRE_EMPHASIS_MASK) { 828 case DP_TRAIN_PRE_EMPH_LEVEL_0: 829 preemph_reg_value = 0x0004000; 830 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) { 831 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0: 832 demph_reg_value = 0x2B405555; 833 uniqtranscale_reg_value = 0x552AB83A; 834 break; 835 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1: 836 demph_reg_value = 0x2B404040; 837 uniqtranscale_reg_value = 0x5548B83A; 838 break; 839 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2: 840 demph_reg_value = 0x2B245555; 841 uniqtranscale_reg_value = 0x5560B83A; 842 break; 843 case DP_TRAIN_VOLTAGE_SWING_LEVEL_3: 844 demph_reg_value = 0x2B405555; 845 uniqtranscale_reg_value = 0x5598DA3A; 846 break; 847 default: 848 return; 849 } 850 break; 851 case DP_TRAIN_PRE_EMPH_LEVEL_1: 852 preemph_reg_value = 0x0002000; 853 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) { 854 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0: 855 demph_reg_value = 0x2B404040; 856 uniqtranscale_reg_value = 0x5552B83A; 857 break; 858 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1: 859 demph_reg_value = 0x2B404848; 860 uniqtranscale_reg_value = 0x5580B83A; 861 break; 862 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2: 863 demph_reg_value = 0x2B404040; 864 uniqtranscale_reg_value = 0x55ADDA3A; 865 break; 866 default: 867 return; 868 } 869 break; 870 case DP_TRAIN_PRE_EMPH_LEVEL_2: 871 preemph_reg_value = 0x0000000; 872 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) { 873 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0: 874 demph_reg_value = 0x2B305555; 875 uniqtranscale_reg_value = 0x5570B83A; 876 break; 877 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1: 878 demph_reg_value = 0x2B2B4040; 879 uniqtranscale_reg_value = 0x55ADDA3A; 880 break; 881 default: 882 return; 883 } 884 break; 885 case DP_TRAIN_PRE_EMPH_LEVEL_3: 886 preemph_reg_value = 0x0006000; 887 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) { 888 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0: 889 demph_reg_value = 0x1B405555; 890 uniqtranscale_reg_value = 0x55ADDA3A; 891 break; 892 default: 893 return; 894 } 895 break; 896 default: 897 return; 898 } 899 900 vlv_set_phy_signal_level(encoder, crtc_state, 901 demph_reg_value, preemph_reg_value, 902 uniqtranscale_reg_value, 0); 903 } 904 905 static void chv_set_signal_levels(struct intel_encoder *encoder, 906 const struct intel_crtc_state *crtc_state) 907 { 908 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 909 u32 deemph_reg_value, margin_reg_value; 910 bool uniq_trans_scale = false; 911 u8 train_set = intel_dp->train_set[0]; 912 913 switch (train_set & DP_TRAIN_PRE_EMPHASIS_MASK) { 914 case DP_TRAIN_PRE_EMPH_LEVEL_0: 915 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) { 916 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0: 917 deemph_reg_value = 128; 918 margin_reg_value = 52; 919 break; 920 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1: 921 deemph_reg_value = 128; 922 margin_reg_value = 77; 923 break; 924 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2: 925 deemph_reg_value = 128; 926 margin_reg_value = 102; 927 break; 928 case DP_TRAIN_VOLTAGE_SWING_LEVEL_3: 929 deemph_reg_value = 128; 930 margin_reg_value = 154; 931 uniq_trans_scale = true; 932 break; 933 default: 934 return; 935 } 936 break; 937 case DP_TRAIN_PRE_EMPH_LEVEL_1: 938 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) { 939 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0: 940 deemph_reg_value = 85; 941 margin_reg_value = 78; 942 break; 943 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1: 944 deemph_reg_value = 85; 945 margin_reg_value = 116; 946 break; 947 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2: 948 deemph_reg_value = 85; 949 margin_reg_value = 154; 950 break; 951 default: 952 return; 953 } 954 break; 955 case DP_TRAIN_PRE_EMPH_LEVEL_2: 956 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) { 957 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0: 958 deemph_reg_value = 64; 959 margin_reg_value = 104; 960 break; 961 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1: 962 deemph_reg_value = 64; 963 margin_reg_value = 154; 964 break; 965 default: 966 return; 967 } 968 break; 969 case DP_TRAIN_PRE_EMPH_LEVEL_3: 970 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) { 971 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0: 972 deemph_reg_value = 43; 973 margin_reg_value = 154; 974 break; 975 default: 976 return; 977 } 978 break; 979 default: 980 return; 981 } 982 983 chv_set_phy_signal_level(encoder, crtc_state, 984 deemph_reg_value, margin_reg_value, 985 uniq_trans_scale); 986 } 987 988 static u32 g4x_signal_levels(u8 train_set) 989 { 990 u32 signal_levels = 0; 991 992 switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) { 993 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0: 994 default: 995 signal_levels |= DP_VOLTAGE_0_4; 996 break; 997 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1: 998 signal_levels |= DP_VOLTAGE_0_6; 999 break; 1000 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2: 1001 signal_levels |= DP_VOLTAGE_0_8; 1002 break; 1003 case DP_TRAIN_VOLTAGE_SWING_LEVEL_3: 1004 signal_levels |= DP_VOLTAGE_1_2; 1005 break; 1006 } 1007 switch (train_set & DP_TRAIN_PRE_EMPHASIS_MASK) { 1008 case DP_TRAIN_PRE_EMPH_LEVEL_0: 1009 default: 1010 signal_levels |= DP_PRE_EMPHASIS_0; 1011 break; 1012 case DP_TRAIN_PRE_EMPH_LEVEL_1: 1013 signal_levels |= DP_PRE_EMPHASIS_3_5; 1014 break; 1015 case DP_TRAIN_PRE_EMPH_LEVEL_2: 1016 signal_levels |= DP_PRE_EMPHASIS_6; 1017 break; 1018 case DP_TRAIN_PRE_EMPH_LEVEL_3: 1019 signal_levels |= DP_PRE_EMPHASIS_9_5; 1020 break; 1021 } 1022 return signal_levels; 1023 } 1024 1025 static void 1026 g4x_set_signal_levels(struct intel_encoder *encoder, 1027 const struct intel_crtc_state *crtc_state) 1028 { 1029 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 1030 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 1031 u8 train_set = intel_dp->train_set[0]; 1032 u32 signal_levels; 1033 1034 signal_levels = g4x_signal_levels(train_set); 1035 1036 drm_dbg_kms(&dev_priv->drm, "Using signal levels %08x\n", 1037 signal_levels); 1038 1039 intel_dp->DP &= ~(DP_VOLTAGE_MASK | DP_PRE_EMPHASIS_MASK); 1040 intel_dp->DP |= signal_levels; 1041 1042 intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP); 1043 intel_de_posting_read(dev_priv, intel_dp->output_reg); 1044 } 1045 1046 /* SNB CPU eDP voltage swing and pre-emphasis control */ 1047 static u32 snb_cpu_edp_signal_levels(u8 train_set) 1048 { 1049 u8 signal_levels = train_set & (DP_TRAIN_VOLTAGE_SWING_MASK | 1050 DP_TRAIN_PRE_EMPHASIS_MASK); 1051 1052 switch (signal_levels) { 1053 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_0: 1054 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1 | DP_TRAIN_PRE_EMPH_LEVEL_0: 1055 return EDP_LINK_TRAIN_400_600MV_0DB_SNB_B; 1056 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_1: 1057 return EDP_LINK_TRAIN_400MV_3_5DB_SNB_B; 1058 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_2: 1059 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1 | DP_TRAIN_PRE_EMPH_LEVEL_2: 1060 return EDP_LINK_TRAIN_400_600MV_6DB_SNB_B; 1061 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1 | DP_TRAIN_PRE_EMPH_LEVEL_1: 1062 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2 | DP_TRAIN_PRE_EMPH_LEVEL_1: 1063 return EDP_LINK_TRAIN_600_800MV_3_5DB_SNB_B; 1064 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2 | DP_TRAIN_PRE_EMPH_LEVEL_0: 1065 case DP_TRAIN_VOLTAGE_SWING_LEVEL_3 | DP_TRAIN_PRE_EMPH_LEVEL_0: 1066 return EDP_LINK_TRAIN_800_1200MV_0DB_SNB_B; 1067 default: 1068 MISSING_CASE(signal_levels); 1069 return EDP_LINK_TRAIN_400_600MV_0DB_SNB_B; 1070 } 1071 } 1072 1073 static void 1074 snb_cpu_edp_set_signal_levels(struct intel_encoder *encoder, 1075 const struct intel_crtc_state *crtc_state) 1076 { 1077 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 1078 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 1079 u8 train_set = intel_dp->train_set[0]; 1080 u32 signal_levels; 1081 1082 signal_levels = snb_cpu_edp_signal_levels(train_set); 1083 1084 drm_dbg_kms(&dev_priv->drm, "Using signal levels %08x\n", 1085 signal_levels); 1086 1087 intel_dp->DP &= ~EDP_LINK_TRAIN_VOL_EMP_MASK_SNB; 1088 intel_dp->DP |= signal_levels; 1089 1090 intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP); 1091 intel_de_posting_read(dev_priv, intel_dp->output_reg); 1092 } 1093 1094 /* IVB CPU eDP voltage swing and pre-emphasis control */ 1095 static u32 ivb_cpu_edp_signal_levels(u8 train_set) 1096 { 1097 u8 signal_levels = train_set & (DP_TRAIN_VOLTAGE_SWING_MASK | 1098 DP_TRAIN_PRE_EMPHASIS_MASK); 1099 1100 switch (signal_levels) { 1101 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_0: 1102 return EDP_LINK_TRAIN_400MV_0DB_IVB; 1103 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_1: 1104 return EDP_LINK_TRAIN_400MV_3_5DB_IVB; 1105 case DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_2: 1106 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1 | DP_TRAIN_PRE_EMPH_LEVEL_2: 1107 return EDP_LINK_TRAIN_400MV_6DB_IVB; 1108 1109 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1 | DP_TRAIN_PRE_EMPH_LEVEL_0: 1110 return EDP_LINK_TRAIN_600MV_0DB_IVB; 1111 case DP_TRAIN_VOLTAGE_SWING_LEVEL_1 | DP_TRAIN_PRE_EMPH_LEVEL_1: 1112 return EDP_LINK_TRAIN_600MV_3_5DB_IVB; 1113 1114 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2 | DP_TRAIN_PRE_EMPH_LEVEL_0: 1115 return EDP_LINK_TRAIN_800MV_0DB_IVB; 1116 case DP_TRAIN_VOLTAGE_SWING_LEVEL_2 | DP_TRAIN_PRE_EMPH_LEVEL_1: 1117 return EDP_LINK_TRAIN_800MV_3_5DB_IVB; 1118 1119 default: 1120 MISSING_CASE(signal_levels); 1121 return EDP_LINK_TRAIN_500MV_0DB_IVB; 1122 } 1123 } 1124 1125 static void 1126 ivb_cpu_edp_set_signal_levels(struct intel_encoder *encoder, 1127 const struct intel_crtc_state *crtc_state) 1128 { 1129 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 1130 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 1131 u8 train_set = intel_dp->train_set[0]; 1132 u32 signal_levels; 1133 1134 signal_levels = ivb_cpu_edp_signal_levels(train_set); 1135 1136 drm_dbg_kms(&dev_priv->drm, "Using signal levels %08x\n", 1137 signal_levels); 1138 1139 intel_dp->DP &= ~EDP_LINK_TRAIN_VOL_EMP_MASK_IVB; 1140 intel_dp->DP |= signal_levels; 1141 1142 intel_de_write(dev_priv, intel_dp->output_reg, intel_dp->DP); 1143 intel_de_posting_read(dev_priv, intel_dp->output_reg); 1144 } 1145 1146 /* 1147 * If display is now connected check links status, 1148 * there has been known issues of link loss triggering 1149 * long pulse. 1150 * 1151 * Some sinks (eg. ASUS PB287Q) seem to perform some 1152 * weird HPD ping pong during modesets. So we can apparently 1153 * end up with HPD going low during a modeset, and then 1154 * going back up soon after. And once that happens we must 1155 * retrain the link to get a picture. That's in case no 1156 * userspace component reacted to intermittent HPD dip. 1157 */ 1158 static enum intel_hotplug_state 1159 intel_dp_hotplug(struct intel_encoder *encoder, 1160 struct intel_connector *connector) 1161 { 1162 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 1163 enum intel_hotplug_state state; 1164 1165 if (intel_dp->compliance.test_active && 1166 intel_dp->compliance.test_type == DP_TEST_LINK_PHY_TEST_PATTERN) { 1167 intel_dp_phy_test(encoder); 1168 /* just do the PHY test and nothing else */ 1169 return INTEL_HOTPLUG_UNCHANGED; 1170 } 1171 1172 state = intel_encoder_hotplug(encoder, connector); 1173 1174 intel_dp_check_link_state(intel_dp); 1175 1176 /* 1177 * Keeping it consistent with intel_ddi_hotplug() and 1178 * intel_hdmi_hotplug(). 1179 */ 1180 if (state == INTEL_HOTPLUG_UNCHANGED && !connector->hotplug_retries) 1181 state = INTEL_HOTPLUG_RETRY; 1182 1183 return state; 1184 } 1185 1186 static bool ibx_digital_port_connected(struct intel_encoder *encoder) 1187 { 1188 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 1189 u32 bit = dev_priv->display.hotplug.pch_hpd[encoder->hpd_pin]; 1190 1191 return intel_de_read(dev_priv, SDEISR) & bit; 1192 } 1193 1194 static bool g4x_digital_port_connected(struct intel_encoder *encoder) 1195 { 1196 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 1197 u32 bit; 1198 1199 switch (encoder->hpd_pin) { 1200 case HPD_PORT_B: 1201 bit = PORTB_HOTPLUG_LIVE_STATUS_G4X; 1202 break; 1203 case HPD_PORT_C: 1204 bit = PORTC_HOTPLUG_LIVE_STATUS_G4X; 1205 break; 1206 case HPD_PORT_D: 1207 bit = PORTD_HOTPLUG_LIVE_STATUS_G4X; 1208 break; 1209 default: 1210 MISSING_CASE(encoder->hpd_pin); 1211 return false; 1212 } 1213 1214 return intel_de_read(dev_priv, PORT_HOTPLUG_STAT(dev_priv)) & bit; 1215 } 1216 1217 static bool ilk_digital_port_connected(struct intel_encoder *encoder) 1218 { 1219 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 1220 u32 bit = dev_priv->display.hotplug.hpd[encoder->hpd_pin]; 1221 1222 return intel_de_read(dev_priv, DEISR) & bit; 1223 } 1224 1225 static void g4x_dp_suspend_complete(struct intel_encoder *encoder) 1226 { 1227 /* 1228 * TODO: Move this to intel_dp_encoder_suspend(), 1229 * once modeset locking around that is removed. 1230 */ 1231 intel_encoder_link_check_flush_work(encoder); 1232 } 1233 1234 static void intel_dp_encoder_destroy(struct drm_encoder *encoder) 1235 { 1236 intel_dp_encoder_flush_work(encoder); 1237 1238 drm_encoder_cleanup(encoder); 1239 kfree(enc_to_dig_port(to_intel_encoder(encoder))); 1240 } 1241 1242 enum pipe vlv_active_pipe(struct intel_dp *intel_dp) 1243 { 1244 struct drm_i915_private *dev_priv = dp_to_i915(intel_dp); 1245 struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base; 1246 enum pipe pipe; 1247 1248 if (g4x_dp_port_enabled(dev_priv, intel_dp->output_reg, 1249 encoder->port, &pipe)) 1250 return pipe; 1251 1252 return INVALID_PIPE; 1253 } 1254 1255 static void intel_dp_encoder_reset(struct drm_encoder *encoder) 1256 { 1257 struct drm_i915_private *dev_priv = to_i915(encoder->dev); 1258 struct intel_dp *intel_dp = enc_to_intel_dp(to_intel_encoder(encoder)); 1259 1260 intel_dp->DP = intel_de_read(dev_priv, intel_dp->output_reg); 1261 1262 intel_dp->reset_link_params = true; 1263 1264 if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv)) { 1265 intel_wakeref_t wakeref; 1266 1267 with_intel_pps_lock(intel_dp, wakeref) 1268 intel_dp->pps.active_pipe = vlv_active_pipe(intel_dp); 1269 } 1270 1271 intel_pps_encoder_reset(intel_dp); 1272 } 1273 1274 static const struct drm_encoder_funcs intel_dp_enc_funcs = { 1275 .reset = intel_dp_encoder_reset, 1276 .destroy = intel_dp_encoder_destroy, 1277 }; 1278 1279 bool g4x_dp_init(struct drm_i915_private *dev_priv, 1280 i915_reg_t output_reg, enum port port) 1281 { 1282 const struct intel_bios_encoder_data *devdata; 1283 struct intel_digital_port *dig_port; 1284 struct intel_encoder *intel_encoder; 1285 struct drm_encoder *encoder; 1286 struct intel_connector *intel_connector; 1287 1288 if (!assert_port_valid(dev_priv, port)) 1289 return false; 1290 1291 devdata = intel_bios_encoder_data_lookup(dev_priv, port); 1292 1293 /* FIXME bail? */ 1294 if (!devdata) 1295 drm_dbg_kms(&dev_priv->drm, "No VBT child device for DP-%c\n", 1296 port_name(port)); 1297 1298 dig_port = kzalloc(sizeof(*dig_port), GFP_KERNEL); 1299 if (!dig_port) 1300 return false; 1301 1302 dig_port->aux_ch = AUX_CH_NONE; 1303 1304 intel_connector = intel_connector_alloc(); 1305 if (!intel_connector) 1306 goto err_connector_alloc; 1307 1308 intel_encoder = &dig_port->base; 1309 encoder = &intel_encoder->base; 1310 1311 intel_encoder->devdata = devdata; 1312 1313 mutex_init(&dig_port->hdcp_mutex); 1314 1315 if (drm_encoder_init(&dev_priv->drm, &intel_encoder->base, 1316 &intel_dp_enc_funcs, DRM_MODE_ENCODER_TMDS, 1317 "DP %c", port_name(port))) 1318 goto err_encoder_init; 1319 1320 intel_encoder_link_check_init(intel_encoder, intel_dp_link_check); 1321 1322 intel_encoder->hotplug = intel_dp_hotplug; 1323 intel_encoder->compute_config = intel_dp_compute_config; 1324 intel_encoder->get_hw_state = intel_dp_get_hw_state; 1325 intel_encoder->get_config = intel_dp_get_config; 1326 intel_encoder->sync_state = intel_dp_sync_state; 1327 intel_encoder->initial_fastset_check = intel_dp_initial_fastset_check; 1328 intel_encoder->update_pipe = intel_backlight_update; 1329 intel_encoder->suspend = intel_dp_encoder_suspend; 1330 intel_encoder->suspend_complete = g4x_dp_suspend_complete; 1331 intel_encoder->shutdown = intel_dp_encoder_shutdown; 1332 if (IS_CHERRYVIEW(dev_priv)) { 1333 intel_encoder->pre_pll_enable = chv_dp_pre_pll_enable; 1334 intel_encoder->pre_enable = chv_pre_enable_dp; 1335 intel_encoder->enable = vlv_enable_dp; 1336 intel_encoder->disable = vlv_disable_dp; 1337 intel_encoder->post_disable = chv_post_disable_dp; 1338 intel_encoder->post_pll_disable = chv_dp_post_pll_disable; 1339 } else if (IS_VALLEYVIEW(dev_priv)) { 1340 intel_encoder->pre_pll_enable = vlv_dp_pre_pll_enable; 1341 intel_encoder->pre_enable = vlv_pre_enable_dp; 1342 intel_encoder->enable = vlv_enable_dp; 1343 intel_encoder->disable = vlv_disable_dp; 1344 intel_encoder->post_disable = vlv_post_disable_dp; 1345 } else { 1346 intel_encoder->pre_enable = g4x_pre_enable_dp; 1347 intel_encoder->enable = g4x_enable_dp; 1348 intel_encoder->disable = g4x_disable_dp; 1349 intel_encoder->post_disable = g4x_post_disable_dp; 1350 } 1351 intel_encoder->audio_enable = g4x_dp_audio_enable; 1352 intel_encoder->audio_disable = g4x_dp_audio_disable; 1353 1354 if ((IS_IVYBRIDGE(dev_priv) && port == PORT_A) || 1355 (HAS_PCH_CPT(dev_priv) && port != PORT_A)) 1356 dig_port->dp.set_link_train = cpt_set_link_train; 1357 else 1358 dig_port->dp.set_link_train = g4x_set_link_train; 1359 1360 if (IS_CHERRYVIEW(dev_priv)) 1361 intel_encoder->set_signal_levels = chv_set_signal_levels; 1362 else if (IS_VALLEYVIEW(dev_priv)) 1363 intel_encoder->set_signal_levels = vlv_set_signal_levels; 1364 else if (IS_IVYBRIDGE(dev_priv) && port == PORT_A) 1365 intel_encoder->set_signal_levels = ivb_cpu_edp_set_signal_levels; 1366 else if (IS_SANDYBRIDGE(dev_priv) && port == PORT_A) 1367 intel_encoder->set_signal_levels = snb_cpu_edp_set_signal_levels; 1368 else 1369 intel_encoder->set_signal_levels = g4x_set_signal_levels; 1370 1371 if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv) || 1372 (HAS_PCH_SPLIT(dev_priv) && port != PORT_A)) { 1373 dig_port->dp.preemph_max = intel_dp_preemph_max_3; 1374 dig_port->dp.voltage_max = intel_dp_voltage_max_3; 1375 } else { 1376 dig_port->dp.preemph_max = intel_dp_preemph_max_2; 1377 dig_port->dp.voltage_max = intel_dp_voltage_max_2; 1378 } 1379 1380 dig_port->dp.output_reg = output_reg; 1381 dig_port->max_lanes = 4; 1382 1383 intel_encoder->type = INTEL_OUTPUT_DP; 1384 intel_encoder->power_domain = intel_display_power_ddi_lanes_domain(dev_priv, port); 1385 if (IS_CHERRYVIEW(dev_priv)) { 1386 if (port == PORT_D) 1387 intel_encoder->pipe_mask = BIT(PIPE_C); 1388 else 1389 intel_encoder->pipe_mask = BIT(PIPE_A) | BIT(PIPE_B); 1390 } else { 1391 intel_encoder->pipe_mask = ~0; 1392 } 1393 intel_encoder->cloneable = 0; 1394 intel_encoder->port = port; 1395 intel_encoder->hpd_pin = intel_hpd_pin_default(dev_priv, port); 1396 1397 dig_port->hpd_pulse = intel_dp_hpd_pulse; 1398 1399 if (HAS_GMCH(dev_priv)) { 1400 dig_port->connected = g4x_digital_port_connected; 1401 } else { 1402 if (port == PORT_A) 1403 dig_port->connected = ilk_digital_port_connected; 1404 else 1405 dig_port->connected = ibx_digital_port_connected; 1406 } 1407 1408 if (port != PORT_A) 1409 intel_infoframe_init(dig_port); 1410 1411 dig_port->aux_ch = intel_dp_aux_ch(intel_encoder); 1412 if (dig_port->aux_ch == AUX_CH_NONE) 1413 goto err_init_connector; 1414 1415 if (!intel_dp_init_connector(dig_port, intel_connector)) 1416 goto err_init_connector; 1417 1418 return true; 1419 1420 err_init_connector: 1421 drm_encoder_cleanup(encoder); 1422 err_encoder_init: 1423 kfree(intel_connector); 1424 err_connector_alloc: 1425 kfree(dig_port); 1426 return false; 1427 } 1428