1 /* 2 * Copyright © 2012 Intel Corporation 3 * 4 * Permission is hereby granted, free of charge, to any person obtaining a 5 * copy of this software and associated documentation files (the "Software"), 6 * to deal in the Software without restriction, including without limitation 7 * the rights to use, copy, modify, merge, publish, distribute, sublicense, 8 * and/or sell copies of the Software, and to permit persons to whom the 9 * Software is furnished to do so, subject to the following conditions: 10 * 11 * The above copyright notice and this permission notice (including the next 12 * paragraph) shall be included in all copies or substantial portions of the 13 * Software. 14 * 15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL 18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING 20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS 21 * IN THE SOFTWARE. 22 * 23 * Authors: 24 * Eugeni Dodonov <eugeni.dodonov@intel.com> 25 * 26 */ 27 28 #include <drm/drm_scdc_helper.h> 29 30 #include "i915_drv.h" 31 #include "intel_audio.h" 32 #include "intel_combo_phy.h" 33 #include "intel_connector.h" 34 #include "intel_crtc.h" 35 #include "intel_ddi.h" 36 #include "intel_ddi_buf_trans.h" 37 #include "intel_de.h" 38 #include "intel_display_types.h" 39 #include "intel_dp.h" 40 #include "intel_dp_link_training.h" 41 #include "intel_dp_mst.h" 42 #include "intel_dpio_phy.h" 43 #include "intel_dsi.h" 44 #include "intel_fdi.h" 45 #include "intel_fifo_underrun.h" 46 #include "intel_gmbus.h" 47 #include "intel_hdcp.h" 48 #include "intel_hdmi.h" 49 #include "intel_hotplug.h" 50 #include "intel_lspcon.h" 51 #include "intel_panel.h" 52 #include "intel_pps.h" 53 #include "intel_psr.h" 54 #include "intel_sprite.h" 55 #include "intel_tc.h" 56 #include "intel_vdsc.h" 57 #include "intel_vrr.h" 58 #include "skl_scaler.h" 59 #include "skl_universal_plane.h" 60 61 static const u8 index_to_dp_signal_levels[] = { 62 [0] = DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_0, 63 [1] = DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_1, 64 [2] = DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_2, 65 [3] = DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_3, 66 [4] = DP_TRAIN_VOLTAGE_SWING_LEVEL_1 | DP_TRAIN_PRE_EMPH_LEVEL_0, 67 [5] = DP_TRAIN_VOLTAGE_SWING_LEVEL_1 | DP_TRAIN_PRE_EMPH_LEVEL_1, 68 [6] = DP_TRAIN_VOLTAGE_SWING_LEVEL_1 | DP_TRAIN_PRE_EMPH_LEVEL_2, 69 [7] = DP_TRAIN_VOLTAGE_SWING_LEVEL_2 | DP_TRAIN_PRE_EMPH_LEVEL_0, 70 [8] = DP_TRAIN_VOLTAGE_SWING_LEVEL_2 | DP_TRAIN_PRE_EMPH_LEVEL_1, 71 [9] = DP_TRAIN_VOLTAGE_SWING_LEVEL_3 | DP_TRAIN_PRE_EMPH_LEVEL_0, 72 }; 73 74 static int intel_ddi_hdmi_level(struct intel_encoder *encoder, 75 const struct intel_crtc_state *crtc_state) 76 { 77 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 78 int n_entries, level, default_entry; 79 80 n_entries = intel_ddi_hdmi_num_entries(encoder, crtc_state, &default_entry); 81 if (n_entries == 0) 82 return 0; 83 level = intel_bios_hdmi_level_shift(encoder); 84 if (level < 0) 85 level = default_entry; 86 87 if (drm_WARN_ON_ONCE(&dev_priv->drm, level >= n_entries)) 88 level = n_entries - 1; 89 90 return level; 91 } 92 93 /* 94 * Starting with Haswell, DDI port buffers must be programmed with correct 95 * values in advance. This function programs the correct values for 96 * DP/eDP/FDI use cases. 97 */ 98 void intel_prepare_dp_ddi_buffers(struct intel_encoder *encoder, 99 const struct intel_crtc_state *crtc_state) 100 { 101 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 102 u32 iboost_bit = 0; 103 int i, n_entries; 104 enum port port = encoder->port; 105 const struct ddi_buf_trans *ddi_translations; 106 107 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_ANALOG)) 108 ddi_translations = intel_ddi_get_buf_trans_fdi(dev_priv, 109 &n_entries); 110 else if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_EDP)) 111 ddi_translations = intel_ddi_get_buf_trans_edp(encoder, 112 &n_entries); 113 else 114 ddi_translations = intel_ddi_get_buf_trans_dp(encoder, 115 &n_entries); 116 117 /* If we're boosting the current, set bit 31 of trans1 */ 118 if (DISPLAY_VER(dev_priv) == 9 && !IS_BROXTON(dev_priv) && 119 intel_bios_encoder_dp_boost_level(encoder->devdata)) 120 iboost_bit = DDI_BUF_BALANCE_LEG_ENABLE; 121 122 for (i = 0; i < n_entries; i++) { 123 intel_de_write(dev_priv, DDI_BUF_TRANS_LO(port, i), 124 ddi_translations[i].trans1 | iboost_bit); 125 intel_de_write(dev_priv, DDI_BUF_TRANS_HI(port, i), 126 ddi_translations[i].trans2); 127 } 128 } 129 130 /* 131 * Starting with Haswell, DDI port buffers must be programmed with correct 132 * values in advance. This function programs the correct values for 133 * HDMI/DVI use cases. 134 */ 135 static void intel_prepare_hdmi_ddi_buffers(struct intel_encoder *encoder, 136 int level) 137 { 138 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 139 u32 iboost_bit = 0; 140 int n_entries; 141 enum port port = encoder->port; 142 const struct ddi_buf_trans *ddi_translations; 143 144 ddi_translations = intel_ddi_get_buf_trans_hdmi(encoder, &n_entries); 145 146 if (drm_WARN_ON_ONCE(&dev_priv->drm, !ddi_translations)) 147 return; 148 if (drm_WARN_ON_ONCE(&dev_priv->drm, level >= n_entries)) 149 level = n_entries - 1; 150 151 /* If we're boosting the current, set bit 31 of trans1 */ 152 if (DISPLAY_VER(dev_priv) == 9 && !IS_BROXTON(dev_priv) && 153 intel_bios_encoder_hdmi_boost_level(encoder->devdata)) 154 iboost_bit = DDI_BUF_BALANCE_LEG_ENABLE; 155 156 /* Entry 9 is for HDMI: */ 157 intel_de_write(dev_priv, DDI_BUF_TRANS_LO(port, 9), 158 ddi_translations[level].trans1 | iboost_bit); 159 intel_de_write(dev_priv, DDI_BUF_TRANS_HI(port, 9), 160 ddi_translations[level].trans2); 161 } 162 163 void intel_wait_ddi_buf_idle(struct drm_i915_private *dev_priv, 164 enum port port) 165 { 166 if (IS_BROXTON(dev_priv)) { 167 udelay(16); 168 return; 169 } 170 171 if (wait_for_us((intel_de_read(dev_priv, DDI_BUF_CTL(port)) & 172 DDI_BUF_IS_IDLE), 8)) 173 drm_err(&dev_priv->drm, "Timeout waiting for DDI BUF %c to get idle\n", 174 port_name(port)); 175 } 176 177 static void intel_wait_ddi_buf_active(struct drm_i915_private *dev_priv, 178 enum port port) 179 { 180 /* Wait > 518 usecs for DDI_BUF_CTL to be non idle */ 181 if (DISPLAY_VER(dev_priv) < 10) { 182 usleep_range(518, 1000); 183 return; 184 } 185 186 if (wait_for_us(!(intel_de_read(dev_priv, DDI_BUF_CTL(port)) & 187 DDI_BUF_IS_IDLE), 500)) 188 drm_err(&dev_priv->drm, "Timeout waiting for DDI BUF %c to get active\n", 189 port_name(port)); 190 } 191 192 static u32 hsw_pll_to_ddi_pll_sel(const struct intel_shared_dpll *pll) 193 { 194 switch (pll->info->id) { 195 case DPLL_ID_WRPLL1: 196 return PORT_CLK_SEL_WRPLL1; 197 case DPLL_ID_WRPLL2: 198 return PORT_CLK_SEL_WRPLL2; 199 case DPLL_ID_SPLL: 200 return PORT_CLK_SEL_SPLL; 201 case DPLL_ID_LCPLL_810: 202 return PORT_CLK_SEL_LCPLL_810; 203 case DPLL_ID_LCPLL_1350: 204 return PORT_CLK_SEL_LCPLL_1350; 205 case DPLL_ID_LCPLL_2700: 206 return PORT_CLK_SEL_LCPLL_2700; 207 default: 208 MISSING_CASE(pll->info->id); 209 return PORT_CLK_SEL_NONE; 210 } 211 } 212 213 static u32 icl_pll_to_ddi_clk_sel(struct intel_encoder *encoder, 214 const struct intel_crtc_state *crtc_state) 215 { 216 const struct intel_shared_dpll *pll = crtc_state->shared_dpll; 217 int clock = crtc_state->port_clock; 218 const enum intel_dpll_id id = pll->info->id; 219 220 switch (id) { 221 default: 222 /* 223 * DPLL_ID_ICL_DPLL0 and DPLL_ID_ICL_DPLL1 should not be used 224 * here, so do warn if this get passed in 225 */ 226 MISSING_CASE(id); 227 return DDI_CLK_SEL_NONE; 228 case DPLL_ID_ICL_TBTPLL: 229 switch (clock) { 230 case 162000: 231 return DDI_CLK_SEL_TBT_162; 232 case 270000: 233 return DDI_CLK_SEL_TBT_270; 234 case 540000: 235 return DDI_CLK_SEL_TBT_540; 236 case 810000: 237 return DDI_CLK_SEL_TBT_810; 238 default: 239 MISSING_CASE(clock); 240 return DDI_CLK_SEL_NONE; 241 } 242 case DPLL_ID_ICL_MGPLL1: 243 case DPLL_ID_ICL_MGPLL2: 244 case DPLL_ID_ICL_MGPLL3: 245 case DPLL_ID_ICL_MGPLL4: 246 case DPLL_ID_TGL_MGPLL5: 247 case DPLL_ID_TGL_MGPLL6: 248 return DDI_CLK_SEL_MG; 249 } 250 } 251 252 static void intel_ddi_init_dp_buf_reg(struct intel_encoder *encoder, 253 const struct intel_crtc_state *crtc_state) 254 { 255 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 256 struct intel_digital_port *dig_port = enc_to_dig_port(encoder); 257 258 intel_dp->DP = dig_port->saved_port_bits | 259 DDI_BUF_CTL_ENABLE | DDI_BUF_TRANS_SELECT(0); 260 intel_dp->DP |= DDI_PORT_WIDTH(crtc_state->lane_count); 261 } 262 263 static int icl_calc_tbt_pll_link(struct drm_i915_private *dev_priv, 264 enum port port) 265 { 266 u32 val = intel_de_read(dev_priv, DDI_CLK_SEL(port)) & DDI_CLK_SEL_MASK; 267 268 switch (val) { 269 case DDI_CLK_SEL_NONE: 270 return 0; 271 case DDI_CLK_SEL_TBT_162: 272 return 162000; 273 case DDI_CLK_SEL_TBT_270: 274 return 270000; 275 case DDI_CLK_SEL_TBT_540: 276 return 540000; 277 case DDI_CLK_SEL_TBT_810: 278 return 810000; 279 default: 280 MISSING_CASE(val); 281 return 0; 282 } 283 } 284 285 static void ddi_dotclock_get(struct intel_crtc_state *pipe_config) 286 { 287 int dotclock; 288 289 if (pipe_config->has_pch_encoder) 290 dotclock = intel_dotclock_calculate(pipe_config->port_clock, 291 &pipe_config->fdi_m_n); 292 else if (intel_crtc_has_dp_encoder(pipe_config)) 293 dotclock = intel_dotclock_calculate(pipe_config->port_clock, 294 &pipe_config->dp_m_n); 295 else if (pipe_config->has_hdmi_sink && pipe_config->pipe_bpp > 24) 296 dotclock = pipe_config->port_clock * 24 / pipe_config->pipe_bpp; 297 else 298 dotclock = pipe_config->port_clock; 299 300 if (pipe_config->output_format == INTEL_OUTPUT_FORMAT_YCBCR420 && 301 !intel_crtc_has_dp_encoder(pipe_config)) 302 dotclock *= 2; 303 304 if (pipe_config->pixel_multiplier) 305 dotclock /= pipe_config->pixel_multiplier; 306 307 pipe_config->hw.adjusted_mode.crtc_clock = dotclock; 308 } 309 310 void intel_ddi_set_dp_msa(const struct intel_crtc_state *crtc_state, 311 const struct drm_connector_state *conn_state) 312 { 313 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc); 314 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev); 315 enum transcoder cpu_transcoder = crtc_state->cpu_transcoder; 316 u32 temp; 317 318 if (!intel_crtc_has_dp_encoder(crtc_state)) 319 return; 320 321 drm_WARN_ON(&dev_priv->drm, transcoder_is_dsi(cpu_transcoder)); 322 323 temp = DP_MSA_MISC_SYNC_CLOCK; 324 325 switch (crtc_state->pipe_bpp) { 326 case 18: 327 temp |= DP_MSA_MISC_6_BPC; 328 break; 329 case 24: 330 temp |= DP_MSA_MISC_8_BPC; 331 break; 332 case 30: 333 temp |= DP_MSA_MISC_10_BPC; 334 break; 335 case 36: 336 temp |= DP_MSA_MISC_12_BPC; 337 break; 338 default: 339 MISSING_CASE(crtc_state->pipe_bpp); 340 break; 341 } 342 343 /* nonsense combination */ 344 drm_WARN_ON(&dev_priv->drm, crtc_state->limited_color_range && 345 crtc_state->output_format != INTEL_OUTPUT_FORMAT_RGB); 346 347 if (crtc_state->limited_color_range) 348 temp |= DP_MSA_MISC_COLOR_CEA_RGB; 349 350 /* 351 * As per DP 1.2 spec section 2.3.4.3 while sending 352 * YCBCR 444 signals we should program MSA MISC1/0 fields with 353 * colorspace information. 354 */ 355 if (crtc_state->output_format == INTEL_OUTPUT_FORMAT_YCBCR444) 356 temp |= DP_MSA_MISC_COLOR_YCBCR_444_BT709; 357 358 /* 359 * As per DP 1.4a spec section 2.2.4.3 [MSA Field for Indication 360 * of Color Encoding Format and Content Color Gamut] while sending 361 * YCBCR 420, HDR BT.2020 signals we should program MSA MISC1 fields 362 * which indicate VSC SDP for the Pixel Encoding/Colorimetry Format. 363 */ 364 if (intel_dp_needs_vsc_sdp(crtc_state, conn_state)) 365 temp |= DP_MSA_MISC_COLOR_VSC_SDP; 366 367 intel_de_write(dev_priv, TRANS_MSA_MISC(cpu_transcoder), temp); 368 } 369 370 static u32 bdw_trans_port_sync_master_select(enum transcoder master_transcoder) 371 { 372 if (master_transcoder == TRANSCODER_EDP) 373 return 0; 374 else 375 return master_transcoder + 1; 376 } 377 378 /* 379 * Returns the TRANS_DDI_FUNC_CTL value based on CRTC state. 380 * 381 * Only intended to be used by intel_ddi_enable_transcoder_func() and 382 * intel_ddi_config_transcoder_func(). 383 */ 384 static u32 385 intel_ddi_transcoder_func_reg_val_get(struct intel_encoder *encoder, 386 const struct intel_crtc_state *crtc_state) 387 { 388 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc); 389 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev); 390 enum pipe pipe = crtc->pipe; 391 enum transcoder cpu_transcoder = crtc_state->cpu_transcoder; 392 enum port port = encoder->port; 393 u32 temp; 394 395 /* Enable TRANS_DDI_FUNC_CTL for the pipe to work in HDMI mode */ 396 temp = TRANS_DDI_FUNC_ENABLE; 397 if (DISPLAY_VER(dev_priv) >= 12) 398 temp |= TGL_TRANS_DDI_SELECT_PORT(port); 399 else 400 temp |= TRANS_DDI_SELECT_PORT(port); 401 402 switch (crtc_state->pipe_bpp) { 403 case 18: 404 temp |= TRANS_DDI_BPC_6; 405 break; 406 case 24: 407 temp |= TRANS_DDI_BPC_8; 408 break; 409 case 30: 410 temp |= TRANS_DDI_BPC_10; 411 break; 412 case 36: 413 temp |= TRANS_DDI_BPC_12; 414 break; 415 default: 416 BUG(); 417 } 418 419 if (crtc_state->hw.adjusted_mode.flags & DRM_MODE_FLAG_PVSYNC) 420 temp |= TRANS_DDI_PVSYNC; 421 if (crtc_state->hw.adjusted_mode.flags & DRM_MODE_FLAG_PHSYNC) 422 temp |= TRANS_DDI_PHSYNC; 423 424 if (cpu_transcoder == TRANSCODER_EDP) { 425 switch (pipe) { 426 case PIPE_A: 427 /* On Haswell, can only use the always-on power well for 428 * eDP when not using the panel fitter, and when not 429 * using motion blur mitigation (which we don't 430 * support). */ 431 if (crtc_state->pch_pfit.force_thru) 432 temp |= TRANS_DDI_EDP_INPUT_A_ONOFF; 433 else 434 temp |= TRANS_DDI_EDP_INPUT_A_ON; 435 break; 436 case PIPE_B: 437 temp |= TRANS_DDI_EDP_INPUT_B_ONOFF; 438 break; 439 case PIPE_C: 440 temp |= TRANS_DDI_EDP_INPUT_C_ONOFF; 441 break; 442 default: 443 BUG(); 444 break; 445 } 446 } 447 448 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI)) { 449 if (crtc_state->has_hdmi_sink) 450 temp |= TRANS_DDI_MODE_SELECT_HDMI; 451 else 452 temp |= TRANS_DDI_MODE_SELECT_DVI; 453 454 if (crtc_state->hdmi_scrambling) 455 temp |= TRANS_DDI_HDMI_SCRAMBLING; 456 if (crtc_state->hdmi_high_tmds_clock_ratio) 457 temp |= TRANS_DDI_HIGH_TMDS_CHAR_RATE; 458 } else if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_ANALOG)) { 459 temp |= TRANS_DDI_MODE_SELECT_FDI; 460 temp |= (crtc_state->fdi_lanes - 1) << 1; 461 } else if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_DP_MST)) { 462 temp |= TRANS_DDI_MODE_SELECT_DP_MST; 463 temp |= DDI_PORT_WIDTH(crtc_state->lane_count); 464 465 if (DISPLAY_VER(dev_priv) >= 12) { 466 enum transcoder master; 467 468 master = crtc_state->mst_master_transcoder; 469 drm_WARN_ON(&dev_priv->drm, 470 master == INVALID_TRANSCODER); 471 temp |= TRANS_DDI_MST_TRANSPORT_SELECT(master); 472 } 473 } else { 474 temp |= TRANS_DDI_MODE_SELECT_DP_SST; 475 temp |= DDI_PORT_WIDTH(crtc_state->lane_count); 476 } 477 478 if (IS_DISPLAY_VER(dev_priv, 8, 10) && 479 crtc_state->master_transcoder != INVALID_TRANSCODER) { 480 u8 master_select = 481 bdw_trans_port_sync_master_select(crtc_state->master_transcoder); 482 483 temp |= TRANS_DDI_PORT_SYNC_ENABLE | 484 TRANS_DDI_PORT_SYNC_MASTER_SELECT(master_select); 485 } 486 487 return temp; 488 } 489 490 void intel_ddi_enable_transcoder_func(struct intel_encoder *encoder, 491 const struct intel_crtc_state *crtc_state) 492 { 493 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc); 494 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev); 495 enum transcoder cpu_transcoder = crtc_state->cpu_transcoder; 496 497 if (DISPLAY_VER(dev_priv) >= 11) { 498 enum transcoder master_transcoder = crtc_state->master_transcoder; 499 u32 ctl2 = 0; 500 501 if (master_transcoder != INVALID_TRANSCODER) { 502 u8 master_select = 503 bdw_trans_port_sync_master_select(master_transcoder); 504 505 ctl2 |= PORT_SYNC_MODE_ENABLE | 506 PORT_SYNC_MODE_MASTER_SELECT(master_select); 507 } 508 509 intel_de_write(dev_priv, 510 TRANS_DDI_FUNC_CTL2(cpu_transcoder), ctl2); 511 } 512 513 intel_de_write(dev_priv, TRANS_DDI_FUNC_CTL(cpu_transcoder), 514 intel_ddi_transcoder_func_reg_val_get(encoder, 515 crtc_state)); 516 } 517 518 /* 519 * Same as intel_ddi_enable_transcoder_func(), but it does not set the enable 520 * bit. 521 */ 522 static void 523 intel_ddi_config_transcoder_func(struct intel_encoder *encoder, 524 const struct intel_crtc_state *crtc_state) 525 { 526 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc); 527 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev); 528 enum transcoder cpu_transcoder = crtc_state->cpu_transcoder; 529 u32 ctl; 530 531 ctl = intel_ddi_transcoder_func_reg_val_get(encoder, crtc_state); 532 ctl &= ~TRANS_DDI_FUNC_ENABLE; 533 intel_de_write(dev_priv, TRANS_DDI_FUNC_CTL(cpu_transcoder), ctl); 534 } 535 536 void intel_ddi_disable_transcoder_func(const struct intel_crtc_state *crtc_state) 537 { 538 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc); 539 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev); 540 enum transcoder cpu_transcoder = crtc_state->cpu_transcoder; 541 u32 ctl; 542 543 if (DISPLAY_VER(dev_priv) >= 11) 544 intel_de_write(dev_priv, 545 TRANS_DDI_FUNC_CTL2(cpu_transcoder), 0); 546 547 ctl = intel_de_read(dev_priv, TRANS_DDI_FUNC_CTL(cpu_transcoder)); 548 549 drm_WARN_ON(crtc->base.dev, ctl & TRANS_DDI_HDCP_SIGNALLING); 550 551 ctl &= ~TRANS_DDI_FUNC_ENABLE; 552 553 if (IS_DISPLAY_VER(dev_priv, 8, 10)) 554 ctl &= ~(TRANS_DDI_PORT_SYNC_ENABLE | 555 TRANS_DDI_PORT_SYNC_MASTER_SELECT_MASK); 556 557 if (DISPLAY_VER(dev_priv) >= 12) { 558 if (!intel_dp_mst_is_master_trans(crtc_state)) { 559 ctl &= ~(TGL_TRANS_DDI_PORT_MASK | 560 TRANS_DDI_MODE_SELECT_MASK); 561 } 562 } else { 563 ctl &= ~(TRANS_DDI_PORT_MASK | TRANS_DDI_MODE_SELECT_MASK); 564 } 565 566 intel_de_write(dev_priv, TRANS_DDI_FUNC_CTL(cpu_transcoder), ctl); 567 568 if (dev_priv->quirks & QUIRK_INCREASE_DDI_DISABLED_TIME && 569 intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI)) { 570 drm_dbg_kms(&dev_priv->drm, 571 "Quirk Increase DDI disabled time\n"); 572 /* Quirk time at 100ms for reliable operation */ 573 msleep(100); 574 } 575 } 576 577 int intel_ddi_toggle_hdcp_bits(struct intel_encoder *intel_encoder, 578 enum transcoder cpu_transcoder, 579 bool enable, u32 hdcp_mask) 580 { 581 struct drm_device *dev = intel_encoder->base.dev; 582 struct drm_i915_private *dev_priv = to_i915(dev); 583 intel_wakeref_t wakeref; 584 int ret = 0; 585 u32 tmp; 586 587 wakeref = intel_display_power_get_if_enabled(dev_priv, 588 intel_encoder->power_domain); 589 if (drm_WARN_ON(dev, !wakeref)) 590 return -ENXIO; 591 592 tmp = intel_de_read(dev_priv, TRANS_DDI_FUNC_CTL(cpu_transcoder)); 593 if (enable) 594 tmp |= hdcp_mask; 595 else 596 tmp &= ~hdcp_mask; 597 intel_de_write(dev_priv, TRANS_DDI_FUNC_CTL(cpu_transcoder), tmp); 598 intel_display_power_put(dev_priv, intel_encoder->power_domain, wakeref); 599 return ret; 600 } 601 602 bool intel_ddi_connector_get_hw_state(struct intel_connector *intel_connector) 603 { 604 struct drm_device *dev = intel_connector->base.dev; 605 struct drm_i915_private *dev_priv = to_i915(dev); 606 struct intel_encoder *encoder = intel_attached_encoder(intel_connector); 607 int type = intel_connector->base.connector_type; 608 enum port port = encoder->port; 609 enum transcoder cpu_transcoder; 610 intel_wakeref_t wakeref; 611 enum pipe pipe = 0; 612 u32 tmp; 613 bool ret; 614 615 wakeref = intel_display_power_get_if_enabled(dev_priv, 616 encoder->power_domain); 617 if (!wakeref) 618 return false; 619 620 if (!encoder->get_hw_state(encoder, &pipe)) { 621 ret = false; 622 goto out; 623 } 624 625 if (HAS_TRANSCODER(dev_priv, TRANSCODER_EDP) && port == PORT_A) 626 cpu_transcoder = TRANSCODER_EDP; 627 else 628 cpu_transcoder = (enum transcoder) pipe; 629 630 tmp = intel_de_read(dev_priv, TRANS_DDI_FUNC_CTL(cpu_transcoder)); 631 632 switch (tmp & TRANS_DDI_MODE_SELECT_MASK) { 633 case TRANS_DDI_MODE_SELECT_HDMI: 634 case TRANS_DDI_MODE_SELECT_DVI: 635 ret = type == DRM_MODE_CONNECTOR_HDMIA; 636 break; 637 638 case TRANS_DDI_MODE_SELECT_DP_SST: 639 ret = type == DRM_MODE_CONNECTOR_eDP || 640 type == DRM_MODE_CONNECTOR_DisplayPort; 641 break; 642 643 case TRANS_DDI_MODE_SELECT_DP_MST: 644 /* if the transcoder is in MST state then 645 * connector isn't connected */ 646 ret = false; 647 break; 648 649 case TRANS_DDI_MODE_SELECT_FDI: 650 ret = type == DRM_MODE_CONNECTOR_VGA; 651 break; 652 653 default: 654 ret = false; 655 break; 656 } 657 658 out: 659 intel_display_power_put(dev_priv, encoder->power_domain, wakeref); 660 661 return ret; 662 } 663 664 static void intel_ddi_get_encoder_pipes(struct intel_encoder *encoder, 665 u8 *pipe_mask, bool *is_dp_mst) 666 { 667 struct drm_device *dev = encoder->base.dev; 668 struct drm_i915_private *dev_priv = to_i915(dev); 669 enum port port = encoder->port; 670 intel_wakeref_t wakeref; 671 enum pipe p; 672 u32 tmp; 673 u8 mst_pipe_mask; 674 675 *pipe_mask = 0; 676 *is_dp_mst = false; 677 678 wakeref = intel_display_power_get_if_enabled(dev_priv, 679 encoder->power_domain); 680 if (!wakeref) 681 return; 682 683 tmp = intel_de_read(dev_priv, DDI_BUF_CTL(port)); 684 if (!(tmp & DDI_BUF_CTL_ENABLE)) 685 goto out; 686 687 if (HAS_TRANSCODER(dev_priv, TRANSCODER_EDP) && port == PORT_A) { 688 tmp = intel_de_read(dev_priv, 689 TRANS_DDI_FUNC_CTL(TRANSCODER_EDP)); 690 691 switch (tmp & TRANS_DDI_EDP_INPUT_MASK) { 692 default: 693 MISSING_CASE(tmp & TRANS_DDI_EDP_INPUT_MASK); 694 fallthrough; 695 case TRANS_DDI_EDP_INPUT_A_ON: 696 case TRANS_DDI_EDP_INPUT_A_ONOFF: 697 *pipe_mask = BIT(PIPE_A); 698 break; 699 case TRANS_DDI_EDP_INPUT_B_ONOFF: 700 *pipe_mask = BIT(PIPE_B); 701 break; 702 case TRANS_DDI_EDP_INPUT_C_ONOFF: 703 *pipe_mask = BIT(PIPE_C); 704 break; 705 } 706 707 goto out; 708 } 709 710 mst_pipe_mask = 0; 711 for_each_pipe(dev_priv, p) { 712 enum transcoder cpu_transcoder = (enum transcoder)p; 713 unsigned int port_mask, ddi_select; 714 intel_wakeref_t trans_wakeref; 715 716 trans_wakeref = intel_display_power_get_if_enabled(dev_priv, 717 POWER_DOMAIN_TRANSCODER(cpu_transcoder)); 718 if (!trans_wakeref) 719 continue; 720 721 if (DISPLAY_VER(dev_priv) >= 12) { 722 port_mask = TGL_TRANS_DDI_PORT_MASK; 723 ddi_select = TGL_TRANS_DDI_SELECT_PORT(port); 724 } else { 725 port_mask = TRANS_DDI_PORT_MASK; 726 ddi_select = TRANS_DDI_SELECT_PORT(port); 727 } 728 729 tmp = intel_de_read(dev_priv, 730 TRANS_DDI_FUNC_CTL(cpu_transcoder)); 731 intel_display_power_put(dev_priv, POWER_DOMAIN_TRANSCODER(cpu_transcoder), 732 trans_wakeref); 733 734 if ((tmp & port_mask) != ddi_select) 735 continue; 736 737 if ((tmp & TRANS_DDI_MODE_SELECT_MASK) == 738 TRANS_DDI_MODE_SELECT_DP_MST) 739 mst_pipe_mask |= BIT(p); 740 741 *pipe_mask |= BIT(p); 742 } 743 744 if (!*pipe_mask) 745 drm_dbg_kms(&dev_priv->drm, 746 "No pipe for [ENCODER:%d:%s] found\n", 747 encoder->base.base.id, encoder->base.name); 748 749 if (!mst_pipe_mask && hweight8(*pipe_mask) > 1) { 750 drm_dbg_kms(&dev_priv->drm, 751 "Multiple pipes for [ENCODER:%d:%s] (pipe_mask %02x)\n", 752 encoder->base.base.id, encoder->base.name, 753 *pipe_mask); 754 *pipe_mask = BIT(ffs(*pipe_mask) - 1); 755 } 756 757 if (mst_pipe_mask && mst_pipe_mask != *pipe_mask) 758 drm_dbg_kms(&dev_priv->drm, 759 "Conflicting MST and non-MST state for [ENCODER:%d:%s] (pipe_mask %02x mst_pipe_mask %02x)\n", 760 encoder->base.base.id, encoder->base.name, 761 *pipe_mask, mst_pipe_mask); 762 else 763 *is_dp_mst = mst_pipe_mask; 764 765 out: 766 if (*pipe_mask && (IS_GEMINILAKE(dev_priv) || IS_BROXTON(dev_priv))) { 767 tmp = intel_de_read(dev_priv, BXT_PHY_CTL(port)); 768 if ((tmp & (BXT_PHY_CMNLANE_POWERDOWN_ACK | 769 BXT_PHY_LANE_POWERDOWN_ACK | 770 BXT_PHY_LANE_ENABLED)) != BXT_PHY_LANE_ENABLED) 771 drm_err(&dev_priv->drm, 772 "[ENCODER:%d:%s] enabled but PHY powered down? (PHY_CTL %08x)\n", 773 encoder->base.base.id, encoder->base.name, tmp); 774 } 775 776 intel_display_power_put(dev_priv, encoder->power_domain, wakeref); 777 } 778 779 bool intel_ddi_get_hw_state(struct intel_encoder *encoder, 780 enum pipe *pipe) 781 { 782 u8 pipe_mask; 783 bool is_mst; 784 785 intel_ddi_get_encoder_pipes(encoder, &pipe_mask, &is_mst); 786 787 if (is_mst || !pipe_mask) 788 return false; 789 790 *pipe = ffs(pipe_mask) - 1; 791 792 return true; 793 } 794 795 static enum intel_display_power_domain 796 intel_ddi_main_link_aux_domain(struct intel_digital_port *dig_port) 797 { 798 /* CNL+ HW requires corresponding AUX IOs to be powered up for PSR with 799 * DC states enabled at the same time, while for driver initiated AUX 800 * transfers we need the same AUX IOs to be powered but with DC states 801 * disabled. Accordingly use the AUX power domain here which leaves DC 802 * states enabled. 803 * However, for non-A AUX ports the corresponding non-EDP transcoders 804 * would have already enabled power well 2 and DC_OFF. This means we can 805 * acquire a wider POWER_DOMAIN_AUX_{B,C,D,F} reference instead of a 806 * specific AUX_IO reference without powering up any extra wells. 807 * Note that PSR is enabled only on Port A even though this function 808 * returns the correct domain for other ports too. 809 */ 810 return dig_port->aux_ch == AUX_CH_A ? POWER_DOMAIN_AUX_IO_A : 811 intel_aux_power_domain(dig_port); 812 } 813 814 static void intel_ddi_get_power_domains(struct intel_encoder *encoder, 815 struct intel_crtc_state *crtc_state) 816 { 817 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 818 struct intel_digital_port *dig_port; 819 enum phy phy = intel_port_to_phy(dev_priv, encoder->port); 820 821 /* 822 * TODO: Add support for MST encoders. Atm, the following should never 823 * happen since fake-MST encoders don't set their get_power_domains() 824 * hook. 825 */ 826 if (drm_WARN_ON(&dev_priv->drm, 827 intel_crtc_has_type(crtc_state, INTEL_OUTPUT_DP_MST))) 828 return; 829 830 dig_port = enc_to_dig_port(encoder); 831 832 if (!intel_phy_is_tc(dev_priv, phy) || 833 dig_port->tc_mode != TC_PORT_TBT_ALT) { 834 drm_WARN_ON(&dev_priv->drm, dig_port->ddi_io_wakeref); 835 dig_port->ddi_io_wakeref = intel_display_power_get(dev_priv, 836 dig_port->ddi_io_power_domain); 837 } 838 839 /* 840 * AUX power is only needed for (e)DP mode, and for HDMI mode on TC 841 * ports. 842 */ 843 if (intel_crtc_has_dp_encoder(crtc_state) || 844 intel_phy_is_tc(dev_priv, phy)) { 845 drm_WARN_ON(&dev_priv->drm, dig_port->aux_wakeref); 846 dig_port->aux_wakeref = 847 intel_display_power_get(dev_priv, 848 intel_ddi_main_link_aux_domain(dig_port)); 849 } 850 } 851 852 void intel_ddi_enable_pipe_clock(struct intel_encoder *encoder, 853 const struct intel_crtc_state *crtc_state) 854 { 855 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc); 856 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev); 857 enum transcoder cpu_transcoder = crtc_state->cpu_transcoder; 858 enum phy phy = intel_port_to_phy(dev_priv, encoder->port); 859 u32 val; 860 861 if (cpu_transcoder != TRANSCODER_EDP) { 862 if (DISPLAY_VER(dev_priv) >= 13) 863 val = TGL_TRANS_CLK_SEL_PORT(phy); 864 else if (DISPLAY_VER(dev_priv) >= 12) 865 val = TGL_TRANS_CLK_SEL_PORT(encoder->port); 866 else 867 val = TRANS_CLK_SEL_PORT(encoder->port); 868 869 intel_de_write(dev_priv, TRANS_CLK_SEL(cpu_transcoder), val); 870 } 871 } 872 873 void intel_ddi_disable_pipe_clock(const struct intel_crtc_state *crtc_state) 874 { 875 struct drm_i915_private *dev_priv = to_i915(crtc_state->uapi.crtc->dev); 876 enum transcoder cpu_transcoder = crtc_state->cpu_transcoder; 877 878 if (cpu_transcoder != TRANSCODER_EDP) { 879 if (DISPLAY_VER(dev_priv) >= 12) 880 intel_de_write(dev_priv, 881 TRANS_CLK_SEL(cpu_transcoder), 882 TGL_TRANS_CLK_SEL_DISABLED); 883 else 884 intel_de_write(dev_priv, 885 TRANS_CLK_SEL(cpu_transcoder), 886 TRANS_CLK_SEL_DISABLED); 887 } 888 } 889 890 static void _skl_ddi_set_iboost(struct drm_i915_private *dev_priv, 891 enum port port, u8 iboost) 892 { 893 u32 tmp; 894 895 tmp = intel_de_read(dev_priv, DISPIO_CR_TX_BMU_CR0); 896 tmp &= ~(BALANCE_LEG_MASK(port) | BALANCE_LEG_DISABLE(port)); 897 if (iboost) 898 tmp |= iboost << BALANCE_LEG_SHIFT(port); 899 else 900 tmp |= BALANCE_LEG_DISABLE(port); 901 intel_de_write(dev_priv, DISPIO_CR_TX_BMU_CR0, tmp); 902 } 903 904 static void skl_ddi_set_iboost(struct intel_encoder *encoder, 905 const struct intel_crtc_state *crtc_state, 906 int level) 907 { 908 struct intel_digital_port *dig_port = enc_to_dig_port(encoder); 909 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 910 u8 iboost; 911 912 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI)) 913 iboost = intel_bios_encoder_hdmi_boost_level(encoder->devdata); 914 else 915 iboost = intel_bios_encoder_dp_boost_level(encoder->devdata); 916 917 if (iboost == 0) { 918 const struct ddi_buf_trans *ddi_translations; 919 int n_entries; 920 921 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI)) 922 ddi_translations = intel_ddi_get_buf_trans_hdmi(encoder, &n_entries); 923 else if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_EDP)) 924 ddi_translations = intel_ddi_get_buf_trans_edp(encoder, &n_entries); 925 else 926 ddi_translations = intel_ddi_get_buf_trans_dp(encoder, &n_entries); 927 928 if (drm_WARN_ON_ONCE(&dev_priv->drm, !ddi_translations)) 929 return; 930 if (drm_WARN_ON_ONCE(&dev_priv->drm, level >= n_entries)) 931 level = n_entries - 1; 932 933 iboost = ddi_translations[level].i_boost; 934 } 935 936 /* Make sure that the requested I_boost is valid */ 937 if (iboost && iboost != 0x1 && iboost != 0x3 && iboost != 0x7) { 938 drm_err(&dev_priv->drm, "Invalid I_boost value %u\n", iboost); 939 return; 940 } 941 942 _skl_ddi_set_iboost(dev_priv, encoder->port, iboost); 943 944 if (encoder->port == PORT_A && dig_port->max_lanes == 4) 945 _skl_ddi_set_iboost(dev_priv, PORT_E, iboost); 946 } 947 948 static void bxt_ddi_vswing_sequence(struct intel_encoder *encoder, 949 const struct intel_crtc_state *crtc_state, 950 int level) 951 { 952 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 953 const struct bxt_ddi_buf_trans *ddi_translations; 954 enum port port = encoder->port; 955 int n_entries; 956 957 ddi_translations = bxt_get_buf_trans(encoder, crtc_state, &n_entries); 958 if (drm_WARN_ON_ONCE(&dev_priv->drm, !ddi_translations)) 959 return; 960 if (drm_WARN_ON_ONCE(&dev_priv->drm, level >= n_entries)) 961 level = n_entries - 1; 962 963 bxt_ddi_phy_set_signal_level(dev_priv, port, 964 ddi_translations[level].margin, 965 ddi_translations[level].scale, 966 ddi_translations[level].enable, 967 ddi_translations[level].deemphasis); 968 } 969 970 static u8 intel_ddi_dp_voltage_max(struct intel_dp *intel_dp, 971 const struct intel_crtc_state *crtc_state) 972 { 973 struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base; 974 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 975 enum port port = encoder->port; 976 enum phy phy = intel_port_to_phy(dev_priv, port); 977 int n_entries; 978 979 if (DISPLAY_VER(dev_priv) >= 12) { 980 if (intel_phy_is_combo(dev_priv, phy)) 981 tgl_get_combo_buf_trans(encoder, crtc_state, &n_entries); 982 else 983 tgl_get_dkl_buf_trans(encoder, crtc_state, &n_entries); 984 } else if (DISPLAY_VER(dev_priv) == 11) { 985 if (IS_PLATFORM(dev_priv, INTEL_JASPERLAKE)) 986 jsl_get_combo_buf_trans(encoder, crtc_state, &n_entries); 987 else if (IS_PLATFORM(dev_priv, INTEL_ELKHARTLAKE)) 988 ehl_get_combo_buf_trans(encoder, crtc_state, &n_entries); 989 else if (intel_phy_is_combo(dev_priv, phy)) 990 icl_get_combo_buf_trans(encoder, crtc_state, &n_entries); 991 else 992 icl_get_mg_buf_trans(encoder, crtc_state, &n_entries); 993 } else if (IS_CANNONLAKE(dev_priv)) { 994 cnl_get_buf_trans(encoder, crtc_state, &n_entries); 995 } else if (IS_GEMINILAKE(dev_priv) || IS_BROXTON(dev_priv)) { 996 bxt_get_buf_trans(encoder, crtc_state, &n_entries); 997 } else { 998 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_EDP)) 999 intel_ddi_get_buf_trans_edp(encoder, &n_entries); 1000 else 1001 intel_ddi_get_buf_trans_dp(encoder, &n_entries); 1002 } 1003 1004 if (drm_WARN_ON(&dev_priv->drm, n_entries < 1)) 1005 n_entries = 1; 1006 if (drm_WARN_ON(&dev_priv->drm, 1007 n_entries > ARRAY_SIZE(index_to_dp_signal_levels))) 1008 n_entries = ARRAY_SIZE(index_to_dp_signal_levels); 1009 1010 return index_to_dp_signal_levels[n_entries - 1] & 1011 DP_TRAIN_VOLTAGE_SWING_MASK; 1012 } 1013 1014 /* 1015 * We assume that the full set of pre-emphasis values can be 1016 * used on all DDI platforms. Should that change we need to 1017 * rethink this code. 1018 */ 1019 static u8 intel_ddi_dp_preemph_max(struct intel_dp *intel_dp) 1020 { 1021 return DP_TRAIN_PRE_EMPH_LEVEL_3; 1022 } 1023 1024 static void cnl_ddi_vswing_program(struct intel_encoder *encoder, 1025 const struct intel_crtc_state *crtc_state, 1026 int level) 1027 { 1028 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 1029 const struct cnl_ddi_buf_trans *ddi_translations; 1030 enum port port = encoder->port; 1031 int n_entries, ln; 1032 u32 val; 1033 1034 ddi_translations = cnl_get_buf_trans(encoder, crtc_state, &n_entries); 1035 1036 if (drm_WARN_ON_ONCE(&dev_priv->drm, !ddi_translations)) 1037 return; 1038 if (drm_WARN_ON_ONCE(&dev_priv->drm, level >= n_entries)) 1039 level = n_entries - 1; 1040 1041 /* Set PORT_TX_DW5 Scaling Mode Sel to 010b. */ 1042 val = intel_de_read(dev_priv, CNL_PORT_TX_DW5_LN0(port)); 1043 val &= ~SCALING_MODE_SEL_MASK; 1044 val |= SCALING_MODE_SEL(2); 1045 intel_de_write(dev_priv, CNL_PORT_TX_DW5_GRP(port), val); 1046 1047 /* Program PORT_TX_DW2 */ 1048 val = intel_de_read(dev_priv, CNL_PORT_TX_DW2_LN0(port)); 1049 val &= ~(SWING_SEL_LOWER_MASK | SWING_SEL_UPPER_MASK | 1050 RCOMP_SCALAR_MASK); 1051 val |= SWING_SEL_UPPER(ddi_translations[level].dw2_swing_sel); 1052 val |= SWING_SEL_LOWER(ddi_translations[level].dw2_swing_sel); 1053 /* Rcomp scalar is fixed as 0x98 for every table entry */ 1054 val |= RCOMP_SCALAR(0x98); 1055 intel_de_write(dev_priv, CNL_PORT_TX_DW2_GRP(port), val); 1056 1057 /* Program PORT_TX_DW4 */ 1058 /* We cannot write to GRP. It would overrite individual loadgen */ 1059 for (ln = 0; ln < 4; ln++) { 1060 val = intel_de_read(dev_priv, CNL_PORT_TX_DW4_LN(ln, port)); 1061 val &= ~(POST_CURSOR_1_MASK | POST_CURSOR_2_MASK | 1062 CURSOR_COEFF_MASK); 1063 val |= POST_CURSOR_1(ddi_translations[level].dw4_post_cursor_1); 1064 val |= POST_CURSOR_2(ddi_translations[level].dw4_post_cursor_2); 1065 val |= CURSOR_COEFF(ddi_translations[level].dw4_cursor_coeff); 1066 intel_de_write(dev_priv, CNL_PORT_TX_DW4_LN(ln, port), val); 1067 } 1068 1069 /* Program PORT_TX_DW5 */ 1070 /* All DW5 values are fixed for every table entry */ 1071 val = intel_de_read(dev_priv, CNL_PORT_TX_DW5_LN0(port)); 1072 val &= ~RTERM_SELECT_MASK; 1073 val |= RTERM_SELECT(6); 1074 val |= TAP3_DISABLE; 1075 intel_de_write(dev_priv, CNL_PORT_TX_DW5_GRP(port), val); 1076 1077 /* Program PORT_TX_DW7 */ 1078 val = intel_de_read(dev_priv, CNL_PORT_TX_DW7_LN0(port)); 1079 val &= ~N_SCALAR_MASK; 1080 val |= N_SCALAR(ddi_translations[level].dw7_n_scalar); 1081 intel_de_write(dev_priv, CNL_PORT_TX_DW7_GRP(port), val); 1082 } 1083 1084 static void cnl_ddi_vswing_sequence(struct intel_encoder *encoder, 1085 const struct intel_crtc_state *crtc_state, 1086 int level) 1087 { 1088 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 1089 enum port port = encoder->port; 1090 int width, rate, ln; 1091 u32 val; 1092 1093 width = crtc_state->lane_count; 1094 rate = crtc_state->port_clock; 1095 1096 /* 1097 * 1. If port type is eDP or DP, 1098 * set PORT_PCS_DW1 cmnkeeper_enable to 1b, 1099 * else clear to 0b. 1100 */ 1101 val = intel_de_read(dev_priv, CNL_PORT_PCS_DW1_LN0(port)); 1102 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI)) 1103 val &= ~COMMON_KEEPER_EN; 1104 else 1105 val |= COMMON_KEEPER_EN; 1106 intel_de_write(dev_priv, CNL_PORT_PCS_DW1_GRP(port), val); 1107 1108 /* 2. Program loadgen select */ 1109 /* 1110 * Program PORT_TX_DW4_LN depending on Bit rate and used lanes 1111 * <= 6 GHz and 4 lanes (LN0=0, LN1=1, LN2=1, LN3=1) 1112 * <= 6 GHz and 1,2 lanes (LN0=0, LN1=1, LN2=1, LN3=0) 1113 * > 6 GHz (LN0=0, LN1=0, LN2=0, LN3=0) 1114 */ 1115 for (ln = 0; ln <= 3; ln++) { 1116 val = intel_de_read(dev_priv, CNL_PORT_TX_DW4_LN(ln, port)); 1117 val &= ~LOADGEN_SELECT; 1118 1119 if ((rate <= 600000 && width == 4 && ln >= 1) || 1120 (rate <= 600000 && width < 4 && (ln == 1 || ln == 2))) { 1121 val |= LOADGEN_SELECT; 1122 } 1123 intel_de_write(dev_priv, CNL_PORT_TX_DW4_LN(ln, port), val); 1124 } 1125 1126 /* 3. Set PORT_CL_DW5 SUS Clock Config to 11b */ 1127 val = intel_de_read(dev_priv, CNL_PORT_CL1CM_DW5); 1128 val |= SUS_CLOCK_CONFIG; 1129 intel_de_write(dev_priv, CNL_PORT_CL1CM_DW5, val); 1130 1131 /* 4. Clear training enable to change swing values */ 1132 val = intel_de_read(dev_priv, CNL_PORT_TX_DW5_LN0(port)); 1133 val &= ~TX_TRAINING_EN; 1134 intel_de_write(dev_priv, CNL_PORT_TX_DW5_GRP(port), val); 1135 1136 /* 5. Program swing and de-emphasis */ 1137 cnl_ddi_vswing_program(encoder, crtc_state, level); 1138 1139 /* 6. Set training enable to trigger update */ 1140 val = intel_de_read(dev_priv, CNL_PORT_TX_DW5_LN0(port)); 1141 val |= TX_TRAINING_EN; 1142 intel_de_write(dev_priv, CNL_PORT_TX_DW5_GRP(port), val); 1143 } 1144 1145 static void icl_ddi_combo_vswing_program(struct intel_encoder *encoder, 1146 const struct intel_crtc_state *crtc_state, 1147 int level) 1148 { 1149 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 1150 const struct cnl_ddi_buf_trans *ddi_translations; 1151 enum phy phy = intel_port_to_phy(dev_priv, encoder->port); 1152 int n_entries, ln; 1153 u32 val; 1154 1155 if (DISPLAY_VER(dev_priv) >= 12) 1156 ddi_translations = tgl_get_combo_buf_trans(encoder, crtc_state, &n_entries); 1157 else if (IS_PLATFORM(dev_priv, INTEL_JASPERLAKE)) 1158 ddi_translations = jsl_get_combo_buf_trans(encoder, crtc_state, &n_entries); 1159 else if (IS_PLATFORM(dev_priv, INTEL_ELKHARTLAKE)) 1160 ddi_translations = ehl_get_combo_buf_trans(encoder, crtc_state, &n_entries); 1161 else 1162 ddi_translations = icl_get_combo_buf_trans(encoder, crtc_state, &n_entries); 1163 1164 if (drm_WARN_ON_ONCE(&dev_priv->drm, !ddi_translations)) 1165 return; 1166 if (drm_WARN_ON_ONCE(&dev_priv->drm, level >= n_entries)) 1167 level = n_entries - 1; 1168 1169 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_EDP)) { 1170 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 1171 1172 val = EDP4K2K_MODE_OVRD_EN | EDP4K2K_MODE_OVRD_OPTIMIZED; 1173 intel_dp->hobl_active = is_hobl_buf_trans(ddi_translations); 1174 intel_de_rmw(dev_priv, ICL_PORT_CL_DW10(phy), val, 1175 intel_dp->hobl_active ? val : 0); 1176 } 1177 1178 /* Set PORT_TX_DW5 */ 1179 val = intel_de_read(dev_priv, ICL_PORT_TX_DW5_LN0(phy)); 1180 val &= ~(SCALING_MODE_SEL_MASK | RTERM_SELECT_MASK | 1181 TAP2_DISABLE | TAP3_DISABLE); 1182 val |= SCALING_MODE_SEL(0x2); 1183 val |= RTERM_SELECT(0x6); 1184 val |= TAP3_DISABLE; 1185 intel_de_write(dev_priv, ICL_PORT_TX_DW5_GRP(phy), val); 1186 1187 /* Program PORT_TX_DW2 */ 1188 val = intel_de_read(dev_priv, ICL_PORT_TX_DW2_LN0(phy)); 1189 val &= ~(SWING_SEL_LOWER_MASK | SWING_SEL_UPPER_MASK | 1190 RCOMP_SCALAR_MASK); 1191 val |= SWING_SEL_UPPER(ddi_translations[level].dw2_swing_sel); 1192 val |= SWING_SEL_LOWER(ddi_translations[level].dw2_swing_sel); 1193 /* Program Rcomp scalar for every table entry */ 1194 val |= RCOMP_SCALAR(0x98); 1195 intel_de_write(dev_priv, ICL_PORT_TX_DW2_GRP(phy), val); 1196 1197 /* Program PORT_TX_DW4 */ 1198 /* We cannot write to GRP. It would overwrite individual loadgen. */ 1199 for (ln = 0; ln <= 3; ln++) { 1200 val = intel_de_read(dev_priv, ICL_PORT_TX_DW4_LN(ln, phy)); 1201 val &= ~(POST_CURSOR_1_MASK | POST_CURSOR_2_MASK | 1202 CURSOR_COEFF_MASK); 1203 val |= POST_CURSOR_1(ddi_translations[level].dw4_post_cursor_1); 1204 val |= POST_CURSOR_2(ddi_translations[level].dw4_post_cursor_2); 1205 val |= CURSOR_COEFF(ddi_translations[level].dw4_cursor_coeff); 1206 intel_de_write(dev_priv, ICL_PORT_TX_DW4_LN(ln, phy), val); 1207 } 1208 1209 /* Program PORT_TX_DW7 */ 1210 val = intel_de_read(dev_priv, ICL_PORT_TX_DW7_LN0(phy)); 1211 val &= ~N_SCALAR_MASK; 1212 val |= N_SCALAR(ddi_translations[level].dw7_n_scalar); 1213 intel_de_write(dev_priv, ICL_PORT_TX_DW7_GRP(phy), val); 1214 } 1215 1216 static void icl_combo_phy_ddi_vswing_sequence(struct intel_encoder *encoder, 1217 const struct intel_crtc_state *crtc_state, 1218 int level) 1219 { 1220 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 1221 enum phy phy = intel_port_to_phy(dev_priv, encoder->port); 1222 int width, rate, ln; 1223 u32 val; 1224 1225 width = crtc_state->lane_count; 1226 rate = crtc_state->port_clock; 1227 1228 /* 1229 * 1. If port type is eDP or DP, 1230 * set PORT_PCS_DW1 cmnkeeper_enable to 1b, 1231 * else clear to 0b. 1232 */ 1233 val = intel_de_read(dev_priv, ICL_PORT_PCS_DW1_LN0(phy)); 1234 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI)) 1235 val &= ~COMMON_KEEPER_EN; 1236 else 1237 val |= COMMON_KEEPER_EN; 1238 intel_de_write(dev_priv, ICL_PORT_PCS_DW1_GRP(phy), val); 1239 1240 /* 2. Program loadgen select */ 1241 /* 1242 * Program PORT_TX_DW4_LN depending on Bit rate and used lanes 1243 * <= 6 GHz and 4 lanes (LN0=0, LN1=1, LN2=1, LN3=1) 1244 * <= 6 GHz and 1,2 lanes (LN0=0, LN1=1, LN2=1, LN3=0) 1245 * > 6 GHz (LN0=0, LN1=0, LN2=0, LN3=0) 1246 */ 1247 for (ln = 0; ln <= 3; ln++) { 1248 val = intel_de_read(dev_priv, ICL_PORT_TX_DW4_LN(ln, phy)); 1249 val &= ~LOADGEN_SELECT; 1250 1251 if ((rate <= 600000 && width == 4 && ln >= 1) || 1252 (rate <= 600000 && width < 4 && (ln == 1 || ln == 2))) { 1253 val |= LOADGEN_SELECT; 1254 } 1255 intel_de_write(dev_priv, ICL_PORT_TX_DW4_LN(ln, phy), val); 1256 } 1257 1258 /* 3. Set PORT_CL_DW5 SUS Clock Config to 11b */ 1259 val = intel_de_read(dev_priv, ICL_PORT_CL_DW5(phy)); 1260 val |= SUS_CLOCK_CONFIG; 1261 intel_de_write(dev_priv, ICL_PORT_CL_DW5(phy), val); 1262 1263 /* 4. Clear training enable to change swing values */ 1264 val = intel_de_read(dev_priv, ICL_PORT_TX_DW5_LN0(phy)); 1265 val &= ~TX_TRAINING_EN; 1266 intel_de_write(dev_priv, ICL_PORT_TX_DW5_GRP(phy), val); 1267 1268 /* 5. Program swing and de-emphasis */ 1269 icl_ddi_combo_vswing_program(encoder, crtc_state, level); 1270 1271 /* 6. Set training enable to trigger update */ 1272 val = intel_de_read(dev_priv, ICL_PORT_TX_DW5_LN0(phy)); 1273 val |= TX_TRAINING_EN; 1274 intel_de_write(dev_priv, ICL_PORT_TX_DW5_GRP(phy), val); 1275 } 1276 1277 static void icl_mg_phy_ddi_vswing_sequence(struct intel_encoder *encoder, 1278 const struct intel_crtc_state *crtc_state, 1279 int level) 1280 { 1281 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 1282 enum tc_port tc_port = intel_port_to_tc(dev_priv, encoder->port); 1283 const struct icl_mg_phy_ddi_buf_trans *ddi_translations; 1284 int n_entries, ln; 1285 u32 val; 1286 1287 if (enc_to_dig_port(encoder)->tc_mode == TC_PORT_TBT_ALT) 1288 return; 1289 1290 ddi_translations = icl_get_mg_buf_trans(encoder, crtc_state, &n_entries); 1291 1292 if (drm_WARN_ON_ONCE(&dev_priv->drm, !ddi_translations)) 1293 return; 1294 if (drm_WARN_ON_ONCE(&dev_priv->drm, level >= n_entries)) 1295 level = n_entries - 1; 1296 1297 /* Set MG_TX_LINK_PARAMS cri_use_fs32 to 0. */ 1298 for (ln = 0; ln < 2; ln++) { 1299 val = intel_de_read(dev_priv, MG_TX1_LINK_PARAMS(ln, tc_port)); 1300 val &= ~CRI_USE_FS32; 1301 intel_de_write(dev_priv, MG_TX1_LINK_PARAMS(ln, tc_port), val); 1302 1303 val = intel_de_read(dev_priv, MG_TX2_LINK_PARAMS(ln, tc_port)); 1304 val &= ~CRI_USE_FS32; 1305 intel_de_write(dev_priv, MG_TX2_LINK_PARAMS(ln, tc_port), val); 1306 } 1307 1308 /* Program MG_TX_SWINGCTRL with values from vswing table */ 1309 for (ln = 0; ln < 2; ln++) { 1310 val = intel_de_read(dev_priv, MG_TX1_SWINGCTRL(ln, tc_port)); 1311 val &= ~CRI_TXDEEMPH_OVERRIDE_17_12_MASK; 1312 val |= CRI_TXDEEMPH_OVERRIDE_17_12( 1313 ddi_translations[level].cri_txdeemph_override_17_12); 1314 intel_de_write(dev_priv, MG_TX1_SWINGCTRL(ln, tc_port), val); 1315 1316 val = intel_de_read(dev_priv, MG_TX2_SWINGCTRL(ln, tc_port)); 1317 val &= ~CRI_TXDEEMPH_OVERRIDE_17_12_MASK; 1318 val |= CRI_TXDEEMPH_OVERRIDE_17_12( 1319 ddi_translations[level].cri_txdeemph_override_17_12); 1320 intel_de_write(dev_priv, MG_TX2_SWINGCTRL(ln, tc_port), val); 1321 } 1322 1323 /* Program MG_TX_DRVCTRL with values from vswing table */ 1324 for (ln = 0; ln < 2; ln++) { 1325 val = intel_de_read(dev_priv, MG_TX1_DRVCTRL(ln, tc_port)); 1326 val &= ~(CRI_TXDEEMPH_OVERRIDE_11_6_MASK | 1327 CRI_TXDEEMPH_OVERRIDE_5_0_MASK); 1328 val |= CRI_TXDEEMPH_OVERRIDE_5_0( 1329 ddi_translations[level].cri_txdeemph_override_5_0) | 1330 CRI_TXDEEMPH_OVERRIDE_11_6( 1331 ddi_translations[level].cri_txdeemph_override_11_6) | 1332 CRI_TXDEEMPH_OVERRIDE_EN; 1333 intel_de_write(dev_priv, MG_TX1_DRVCTRL(ln, tc_port), val); 1334 1335 val = intel_de_read(dev_priv, MG_TX2_DRVCTRL(ln, tc_port)); 1336 val &= ~(CRI_TXDEEMPH_OVERRIDE_11_6_MASK | 1337 CRI_TXDEEMPH_OVERRIDE_5_0_MASK); 1338 val |= CRI_TXDEEMPH_OVERRIDE_5_0( 1339 ddi_translations[level].cri_txdeemph_override_5_0) | 1340 CRI_TXDEEMPH_OVERRIDE_11_6( 1341 ddi_translations[level].cri_txdeemph_override_11_6) | 1342 CRI_TXDEEMPH_OVERRIDE_EN; 1343 intel_de_write(dev_priv, MG_TX2_DRVCTRL(ln, tc_port), val); 1344 1345 /* FIXME: Program CRI_LOADGEN_SEL after the spec is updated */ 1346 } 1347 1348 /* 1349 * Program MG_CLKHUB<LN, port being used> with value from frequency table 1350 * In case of Legacy mode on MG PHY, both TX1 and TX2 enabled so use the 1351 * values from table for which TX1 and TX2 enabled. 1352 */ 1353 for (ln = 0; ln < 2; ln++) { 1354 val = intel_de_read(dev_priv, MG_CLKHUB(ln, tc_port)); 1355 if (crtc_state->port_clock < 300000) 1356 val |= CFG_LOW_RATE_LKREN_EN; 1357 else 1358 val &= ~CFG_LOW_RATE_LKREN_EN; 1359 intel_de_write(dev_priv, MG_CLKHUB(ln, tc_port), val); 1360 } 1361 1362 /* Program the MG_TX_DCC<LN, port being used> based on the link frequency */ 1363 for (ln = 0; ln < 2; ln++) { 1364 val = intel_de_read(dev_priv, MG_TX1_DCC(ln, tc_port)); 1365 val &= ~CFG_AMI_CK_DIV_OVERRIDE_VAL_MASK; 1366 if (crtc_state->port_clock <= 500000) { 1367 val &= ~CFG_AMI_CK_DIV_OVERRIDE_EN; 1368 } else { 1369 val |= CFG_AMI_CK_DIV_OVERRIDE_EN | 1370 CFG_AMI_CK_DIV_OVERRIDE_VAL(1); 1371 } 1372 intel_de_write(dev_priv, MG_TX1_DCC(ln, tc_port), val); 1373 1374 val = intel_de_read(dev_priv, MG_TX2_DCC(ln, tc_port)); 1375 val &= ~CFG_AMI_CK_DIV_OVERRIDE_VAL_MASK; 1376 if (crtc_state->port_clock <= 500000) { 1377 val &= ~CFG_AMI_CK_DIV_OVERRIDE_EN; 1378 } else { 1379 val |= CFG_AMI_CK_DIV_OVERRIDE_EN | 1380 CFG_AMI_CK_DIV_OVERRIDE_VAL(1); 1381 } 1382 intel_de_write(dev_priv, MG_TX2_DCC(ln, tc_port), val); 1383 } 1384 1385 /* Program MG_TX_PISO_READLOAD with values from vswing table */ 1386 for (ln = 0; ln < 2; ln++) { 1387 val = intel_de_read(dev_priv, 1388 MG_TX1_PISO_READLOAD(ln, tc_port)); 1389 val |= CRI_CALCINIT; 1390 intel_de_write(dev_priv, MG_TX1_PISO_READLOAD(ln, tc_port), 1391 val); 1392 1393 val = intel_de_read(dev_priv, 1394 MG_TX2_PISO_READLOAD(ln, tc_port)); 1395 val |= CRI_CALCINIT; 1396 intel_de_write(dev_priv, MG_TX2_PISO_READLOAD(ln, tc_port), 1397 val); 1398 } 1399 } 1400 1401 static void icl_ddi_vswing_sequence(struct intel_encoder *encoder, 1402 const struct intel_crtc_state *crtc_state, 1403 int level) 1404 { 1405 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 1406 enum phy phy = intel_port_to_phy(dev_priv, encoder->port); 1407 1408 if (intel_phy_is_combo(dev_priv, phy)) 1409 icl_combo_phy_ddi_vswing_sequence(encoder, crtc_state, level); 1410 else 1411 icl_mg_phy_ddi_vswing_sequence(encoder, crtc_state, level); 1412 } 1413 1414 static void 1415 tgl_dkl_phy_ddi_vswing_sequence(struct intel_encoder *encoder, 1416 const struct intel_crtc_state *crtc_state, 1417 int level) 1418 { 1419 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 1420 enum tc_port tc_port = intel_port_to_tc(dev_priv, encoder->port); 1421 const struct tgl_dkl_phy_ddi_buf_trans *ddi_translations; 1422 u32 val, dpcnt_mask, dpcnt_val; 1423 int n_entries, ln; 1424 1425 if (enc_to_dig_port(encoder)->tc_mode == TC_PORT_TBT_ALT) 1426 return; 1427 1428 ddi_translations = tgl_get_dkl_buf_trans(encoder, crtc_state, &n_entries); 1429 1430 if (drm_WARN_ON_ONCE(&dev_priv->drm, !ddi_translations)) 1431 return; 1432 if (drm_WARN_ON_ONCE(&dev_priv->drm, level >= n_entries)) 1433 level = n_entries - 1; 1434 1435 dpcnt_mask = (DKL_TX_PRESHOOT_COEFF_MASK | 1436 DKL_TX_DE_EMPAHSIS_COEFF_MASK | 1437 DKL_TX_VSWING_CONTROL_MASK); 1438 dpcnt_val = DKL_TX_VSWING_CONTROL(ddi_translations[level].dkl_vswing_control); 1439 dpcnt_val |= DKL_TX_DE_EMPHASIS_COEFF(ddi_translations[level].dkl_de_emphasis_control); 1440 dpcnt_val |= DKL_TX_PRESHOOT_COEFF(ddi_translations[level].dkl_preshoot_control); 1441 1442 for (ln = 0; ln < 2; ln++) { 1443 intel_de_write(dev_priv, HIP_INDEX_REG(tc_port), 1444 HIP_INDEX_VAL(tc_port, ln)); 1445 1446 intel_de_write(dev_priv, DKL_TX_PMD_LANE_SUS(tc_port), 0); 1447 1448 /* All the registers are RMW */ 1449 val = intel_de_read(dev_priv, DKL_TX_DPCNTL0(tc_port)); 1450 val &= ~dpcnt_mask; 1451 val |= dpcnt_val; 1452 intel_de_write(dev_priv, DKL_TX_DPCNTL0(tc_port), val); 1453 1454 val = intel_de_read(dev_priv, DKL_TX_DPCNTL1(tc_port)); 1455 val &= ~dpcnt_mask; 1456 val |= dpcnt_val; 1457 intel_de_write(dev_priv, DKL_TX_DPCNTL1(tc_port), val); 1458 1459 val = intel_de_read(dev_priv, DKL_TX_DPCNTL2(tc_port)); 1460 val &= ~DKL_TX_DP20BITMODE; 1461 intel_de_write(dev_priv, DKL_TX_DPCNTL2(tc_port), val); 1462 1463 if ((intel_crtc_has_dp_encoder(crtc_state) && 1464 crtc_state->port_clock == 162000) || 1465 (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI) && 1466 crtc_state->port_clock == 594000)) 1467 val |= DKL_TX_LOADGEN_SHARING_PMD_DISABLE; 1468 else 1469 val &= ~DKL_TX_LOADGEN_SHARING_PMD_DISABLE; 1470 } 1471 } 1472 1473 static void tgl_ddi_vswing_sequence(struct intel_encoder *encoder, 1474 const struct intel_crtc_state *crtc_state, 1475 int level) 1476 { 1477 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 1478 enum phy phy = intel_port_to_phy(dev_priv, encoder->port); 1479 1480 if (intel_phy_is_combo(dev_priv, phy)) 1481 icl_combo_phy_ddi_vswing_sequence(encoder, crtc_state, level); 1482 else 1483 tgl_dkl_phy_ddi_vswing_sequence(encoder, crtc_state, level); 1484 } 1485 1486 static int translate_signal_level(struct intel_dp *intel_dp, 1487 u8 signal_levels) 1488 { 1489 struct drm_i915_private *i915 = dp_to_i915(intel_dp); 1490 int i; 1491 1492 for (i = 0; i < ARRAY_SIZE(index_to_dp_signal_levels); i++) { 1493 if (index_to_dp_signal_levels[i] == signal_levels) 1494 return i; 1495 } 1496 1497 drm_WARN(&i915->drm, 1, 1498 "Unsupported voltage swing/pre-emphasis level: 0x%x\n", 1499 signal_levels); 1500 1501 return 0; 1502 } 1503 1504 static int intel_ddi_dp_level(struct intel_dp *intel_dp) 1505 { 1506 u8 train_set = intel_dp->train_set[0]; 1507 u8 signal_levels = train_set & (DP_TRAIN_VOLTAGE_SWING_MASK | 1508 DP_TRAIN_PRE_EMPHASIS_MASK); 1509 1510 return translate_signal_level(intel_dp, signal_levels); 1511 } 1512 1513 static void 1514 tgl_set_signal_levels(struct intel_dp *intel_dp, 1515 const struct intel_crtc_state *crtc_state) 1516 { 1517 struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base; 1518 int level = intel_ddi_dp_level(intel_dp); 1519 1520 tgl_ddi_vswing_sequence(encoder, crtc_state, level); 1521 } 1522 1523 static void 1524 icl_set_signal_levels(struct intel_dp *intel_dp, 1525 const struct intel_crtc_state *crtc_state) 1526 { 1527 struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base; 1528 int level = intel_ddi_dp_level(intel_dp); 1529 1530 icl_ddi_vswing_sequence(encoder, crtc_state, level); 1531 } 1532 1533 static void 1534 cnl_set_signal_levels(struct intel_dp *intel_dp, 1535 const struct intel_crtc_state *crtc_state) 1536 { 1537 struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base; 1538 int level = intel_ddi_dp_level(intel_dp); 1539 1540 cnl_ddi_vswing_sequence(encoder, crtc_state, level); 1541 } 1542 1543 static void 1544 bxt_set_signal_levels(struct intel_dp *intel_dp, 1545 const struct intel_crtc_state *crtc_state) 1546 { 1547 struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base; 1548 int level = intel_ddi_dp_level(intel_dp); 1549 1550 bxt_ddi_vswing_sequence(encoder, crtc_state, level); 1551 } 1552 1553 static void 1554 hsw_set_signal_levels(struct intel_dp *intel_dp, 1555 const struct intel_crtc_state *crtc_state) 1556 { 1557 struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base; 1558 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 1559 int level = intel_ddi_dp_level(intel_dp); 1560 enum port port = encoder->port; 1561 u32 signal_levels; 1562 1563 signal_levels = DDI_BUF_TRANS_SELECT(level); 1564 1565 drm_dbg_kms(&dev_priv->drm, "Using signal levels %08x\n", 1566 signal_levels); 1567 1568 intel_dp->DP &= ~DDI_BUF_EMP_MASK; 1569 intel_dp->DP |= signal_levels; 1570 1571 if (DISPLAY_VER(dev_priv) == 9 && !IS_BROXTON(dev_priv)) 1572 skl_ddi_set_iboost(encoder, crtc_state, level); 1573 1574 intel_de_write(dev_priv, DDI_BUF_CTL(port), intel_dp->DP); 1575 intel_de_posting_read(dev_priv, DDI_BUF_CTL(port)); 1576 } 1577 1578 static void _cnl_ddi_enable_clock(struct drm_i915_private *i915, i915_reg_t reg, 1579 u32 clk_sel_mask, u32 clk_sel, u32 clk_off) 1580 { 1581 mutex_lock(&i915->dpll.lock); 1582 1583 intel_de_rmw(i915, reg, clk_sel_mask, clk_sel); 1584 1585 /* 1586 * "This step and the step before must be 1587 * done with separate register writes." 1588 */ 1589 intel_de_rmw(i915, reg, clk_off, 0); 1590 1591 mutex_unlock(&i915->dpll.lock); 1592 } 1593 1594 static void _cnl_ddi_disable_clock(struct drm_i915_private *i915, i915_reg_t reg, 1595 u32 clk_off) 1596 { 1597 mutex_lock(&i915->dpll.lock); 1598 1599 intel_de_rmw(i915, reg, 0, clk_off); 1600 1601 mutex_unlock(&i915->dpll.lock); 1602 } 1603 1604 static bool _cnl_ddi_is_clock_enabled(struct drm_i915_private *i915, i915_reg_t reg, 1605 u32 clk_off) 1606 { 1607 return !(intel_de_read(i915, reg) & clk_off); 1608 } 1609 1610 static struct intel_shared_dpll * 1611 _cnl_ddi_get_pll(struct drm_i915_private *i915, i915_reg_t reg, 1612 u32 clk_sel_mask, u32 clk_sel_shift) 1613 { 1614 enum intel_dpll_id id; 1615 1616 id = (intel_de_read(i915, reg) & clk_sel_mask) >> clk_sel_shift; 1617 1618 return intel_get_shared_dpll_by_id(i915, id); 1619 } 1620 1621 static void adls_ddi_enable_clock(struct intel_encoder *encoder, 1622 const struct intel_crtc_state *crtc_state) 1623 { 1624 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 1625 const struct intel_shared_dpll *pll = crtc_state->shared_dpll; 1626 enum phy phy = intel_port_to_phy(i915, encoder->port); 1627 1628 if (drm_WARN_ON(&i915->drm, !pll)) 1629 return; 1630 1631 _cnl_ddi_enable_clock(i915, ADLS_DPCLKA_CFGCR(phy), 1632 ADLS_DPCLKA_CFGCR_DDI_CLK_SEL_MASK(phy), 1633 pll->info->id << ADLS_DPCLKA_CFGCR_DDI_SHIFT(phy), 1634 ICL_DPCLKA_CFGCR0_DDI_CLK_OFF(phy)); 1635 } 1636 1637 static void adls_ddi_disable_clock(struct intel_encoder *encoder) 1638 { 1639 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 1640 enum phy phy = intel_port_to_phy(i915, encoder->port); 1641 1642 _cnl_ddi_disable_clock(i915, ADLS_DPCLKA_CFGCR(phy), 1643 ICL_DPCLKA_CFGCR0_DDI_CLK_OFF(phy)); 1644 } 1645 1646 static bool adls_ddi_is_clock_enabled(struct intel_encoder *encoder) 1647 { 1648 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 1649 enum phy phy = intel_port_to_phy(i915, encoder->port); 1650 1651 return _cnl_ddi_is_clock_enabled(i915, ADLS_DPCLKA_CFGCR(phy), 1652 ICL_DPCLKA_CFGCR0_DDI_CLK_OFF(phy)); 1653 } 1654 1655 static struct intel_shared_dpll *adls_ddi_get_pll(struct intel_encoder *encoder) 1656 { 1657 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 1658 enum phy phy = intel_port_to_phy(i915, encoder->port); 1659 1660 return _cnl_ddi_get_pll(i915, ADLS_DPCLKA_CFGCR(phy), 1661 ADLS_DPCLKA_CFGCR_DDI_CLK_SEL_MASK(phy), 1662 ADLS_DPCLKA_CFGCR_DDI_SHIFT(phy)); 1663 } 1664 1665 static void rkl_ddi_enable_clock(struct intel_encoder *encoder, 1666 const struct intel_crtc_state *crtc_state) 1667 { 1668 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 1669 const struct intel_shared_dpll *pll = crtc_state->shared_dpll; 1670 enum phy phy = intel_port_to_phy(i915, encoder->port); 1671 1672 if (drm_WARN_ON(&i915->drm, !pll)) 1673 return; 1674 1675 _cnl_ddi_enable_clock(i915, ICL_DPCLKA_CFGCR0, 1676 RKL_DPCLKA_CFGCR0_DDI_CLK_SEL_MASK(phy), 1677 RKL_DPCLKA_CFGCR0_DDI_CLK_SEL(pll->info->id, phy), 1678 RKL_DPCLKA_CFGCR0_DDI_CLK_OFF(phy)); 1679 } 1680 1681 static void rkl_ddi_disable_clock(struct intel_encoder *encoder) 1682 { 1683 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 1684 enum phy phy = intel_port_to_phy(i915, encoder->port); 1685 1686 _cnl_ddi_disable_clock(i915, ICL_DPCLKA_CFGCR0, 1687 RKL_DPCLKA_CFGCR0_DDI_CLK_OFF(phy)); 1688 } 1689 1690 static bool rkl_ddi_is_clock_enabled(struct intel_encoder *encoder) 1691 { 1692 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 1693 enum phy phy = intel_port_to_phy(i915, encoder->port); 1694 1695 return _cnl_ddi_is_clock_enabled(i915, ICL_DPCLKA_CFGCR0, 1696 RKL_DPCLKA_CFGCR0_DDI_CLK_OFF(phy)); 1697 } 1698 1699 static struct intel_shared_dpll *rkl_ddi_get_pll(struct intel_encoder *encoder) 1700 { 1701 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 1702 enum phy phy = intel_port_to_phy(i915, encoder->port); 1703 1704 return _cnl_ddi_get_pll(i915, ICL_DPCLKA_CFGCR0, 1705 RKL_DPCLKA_CFGCR0_DDI_CLK_SEL_MASK(phy), 1706 RKL_DPCLKA_CFGCR0_DDI_CLK_SEL_SHIFT(phy)); 1707 } 1708 1709 static void dg1_ddi_enable_clock(struct intel_encoder *encoder, 1710 const struct intel_crtc_state *crtc_state) 1711 { 1712 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 1713 const struct intel_shared_dpll *pll = crtc_state->shared_dpll; 1714 enum phy phy = intel_port_to_phy(i915, encoder->port); 1715 1716 if (drm_WARN_ON(&i915->drm, !pll)) 1717 return; 1718 1719 /* 1720 * If we fail this, something went very wrong: first 2 PLLs should be 1721 * used by first 2 phys and last 2 PLLs by last phys 1722 */ 1723 if (drm_WARN_ON(&i915->drm, 1724 (pll->info->id < DPLL_ID_DG1_DPLL2 && phy >= PHY_C) || 1725 (pll->info->id >= DPLL_ID_DG1_DPLL2 && phy < PHY_C))) 1726 return; 1727 1728 _cnl_ddi_enable_clock(i915, DG1_DPCLKA_CFGCR0(phy), 1729 DG1_DPCLKA_CFGCR0_DDI_CLK_SEL_MASK(phy), 1730 DG1_DPCLKA_CFGCR0_DDI_CLK_SEL(pll->info->id, phy), 1731 DG1_DPCLKA_CFGCR0_DDI_CLK_OFF(phy)); 1732 } 1733 1734 static void dg1_ddi_disable_clock(struct intel_encoder *encoder) 1735 { 1736 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 1737 enum phy phy = intel_port_to_phy(i915, encoder->port); 1738 1739 _cnl_ddi_disable_clock(i915, DG1_DPCLKA_CFGCR0(phy), 1740 DG1_DPCLKA_CFGCR0_DDI_CLK_OFF(phy)); 1741 } 1742 1743 static bool dg1_ddi_is_clock_enabled(struct intel_encoder *encoder) 1744 { 1745 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 1746 enum phy phy = intel_port_to_phy(i915, encoder->port); 1747 1748 return _cnl_ddi_is_clock_enabled(i915, DG1_DPCLKA_CFGCR0(phy), 1749 DG1_DPCLKA_CFGCR0_DDI_CLK_OFF(phy)); 1750 } 1751 1752 static struct intel_shared_dpll *dg1_ddi_get_pll(struct intel_encoder *encoder) 1753 { 1754 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 1755 enum phy phy = intel_port_to_phy(i915, encoder->port); 1756 1757 return _cnl_ddi_get_pll(i915, DG1_DPCLKA_CFGCR0(phy), 1758 DG1_DPCLKA_CFGCR0_DDI_CLK_SEL_MASK(phy), 1759 DG1_DPCLKA_CFGCR0_DDI_CLK_SEL_SHIFT(phy)); 1760 } 1761 1762 static void icl_ddi_combo_enable_clock(struct intel_encoder *encoder, 1763 const struct intel_crtc_state *crtc_state) 1764 { 1765 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 1766 const struct intel_shared_dpll *pll = crtc_state->shared_dpll; 1767 enum phy phy = intel_port_to_phy(i915, encoder->port); 1768 1769 if (drm_WARN_ON(&i915->drm, !pll)) 1770 return; 1771 1772 _cnl_ddi_enable_clock(i915, ICL_DPCLKA_CFGCR0, 1773 ICL_DPCLKA_CFGCR0_DDI_CLK_SEL_MASK(phy), 1774 ICL_DPCLKA_CFGCR0_DDI_CLK_SEL(pll->info->id, phy), 1775 ICL_DPCLKA_CFGCR0_DDI_CLK_OFF(phy)); 1776 } 1777 1778 static void icl_ddi_combo_disable_clock(struct intel_encoder *encoder) 1779 { 1780 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 1781 enum phy phy = intel_port_to_phy(i915, encoder->port); 1782 1783 _cnl_ddi_disable_clock(i915, ICL_DPCLKA_CFGCR0, 1784 ICL_DPCLKA_CFGCR0_DDI_CLK_OFF(phy)); 1785 } 1786 1787 static bool icl_ddi_combo_is_clock_enabled(struct intel_encoder *encoder) 1788 { 1789 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 1790 enum phy phy = intel_port_to_phy(i915, encoder->port); 1791 1792 return _cnl_ddi_is_clock_enabled(i915, ICL_DPCLKA_CFGCR0, 1793 ICL_DPCLKA_CFGCR0_DDI_CLK_OFF(phy)); 1794 } 1795 1796 struct intel_shared_dpll *icl_ddi_combo_get_pll(struct intel_encoder *encoder) 1797 { 1798 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 1799 enum phy phy = intel_port_to_phy(i915, encoder->port); 1800 1801 return _cnl_ddi_get_pll(i915, ICL_DPCLKA_CFGCR0, 1802 ICL_DPCLKA_CFGCR0_DDI_CLK_SEL_MASK(phy), 1803 ICL_DPCLKA_CFGCR0_DDI_CLK_SEL_SHIFT(phy)); 1804 } 1805 1806 static void jsl_ddi_tc_enable_clock(struct intel_encoder *encoder, 1807 const struct intel_crtc_state *crtc_state) 1808 { 1809 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 1810 const struct intel_shared_dpll *pll = crtc_state->shared_dpll; 1811 enum port port = encoder->port; 1812 1813 if (drm_WARN_ON(&i915->drm, !pll)) 1814 return; 1815 1816 /* 1817 * "For DDIC and DDID, program DDI_CLK_SEL to map the MG clock to the port. 1818 * MG does not exist, but the programming is required to ungate DDIC and DDID." 1819 */ 1820 intel_de_write(i915, DDI_CLK_SEL(port), DDI_CLK_SEL_MG); 1821 1822 icl_ddi_combo_enable_clock(encoder, crtc_state); 1823 } 1824 1825 static void jsl_ddi_tc_disable_clock(struct intel_encoder *encoder) 1826 { 1827 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 1828 enum port port = encoder->port; 1829 1830 icl_ddi_combo_disable_clock(encoder); 1831 1832 intel_de_write(i915, DDI_CLK_SEL(port), DDI_CLK_SEL_NONE); 1833 } 1834 1835 static bool jsl_ddi_tc_is_clock_enabled(struct intel_encoder *encoder) 1836 { 1837 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 1838 enum port port = encoder->port; 1839 u32 tmp; 1840 1841 tmp = intel_de_read(i915, DDI_CLK_SEL(port)); 1842 1843 if ((tmp & DDI_CLK_SEL_MASK) == DDI_CLK_SEL_NONE) 1844 return false; 1845 1846 return icl_ddi_combo_is_clock_enabled(encoder); 1847 } 1848 1849 static void icl_ddi_tc_enable_clock(struct intel_encoder *encoder, 1850 const struct intel_crtc_state *crtc_state) 1851 { 1852 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 1853 const struct intel_shared_dpll *pll = crtc_state->shared_dpll; 1854 enum tc_port tc_port = intel_port_to_tc(i915, encoder->port); 1855 enum port port = encoder->port; 1856 1857 if (drm_WARN_ON(&i915->drm, !pll)) 1858 return; 1859 1860 intel_de_write(i915, DDI_CLK_SEL(port), 1861 icl_pll_to_ddi_clk_sel(encoder, crtc_state)); 1862 1863 mutex_lock(&i915->dpll.lock); 1864 1865 intel_de_rmw(i915, ICL_DPCLKA_CFGCR0, 1866 ICL_DPCLKA_CFGCR0_TC_CLK_OFF(tc_port), 0); 1867 1868 mutex_unlock(&i915->dpll.lock); 1869 } 1870 1871 static void icl_ddi_tc_disable_clock(struct intel_encoder *encoder) 1872 { 1873 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 1874 enum tc_port tc_port = intel_port_to_tc(i915, encoder->port); 1875 enum port port = encoder->port; 1876 1877 mutex_lock(&i915->dpll.lock); 1878 1879 intel_de_rmw(i915, ICL_DPCLKA_CFGCR0, 1880 0, ICL_DPCLKA_CFGCR0_TC_CLK_OFF(tc_port)); 1881 1882 mutex_unlock(&i915->dpll.lock); 1883 1884 intel_de_write(i915, DDI_CLK_SEL(port), DDI_CLK_SEL_NONE); 1885 } 1886 1887 static bool icl_ddi_tc_is_clock_enabled(struct intel_encoder *encoder) 1888 { 1889 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 1890 enum tc_port tc_port = intel_port_to_tc(i915, encoder->port); 1891 enum port port = encoder->port; 1892 u32 tmp; 1893 1894 tmp = intel_de_read(i915, DDI_CLK_SEL(port)); 1895 1896 if ((tmp & DDI_CLK_SEL_MASK) == DDI_CLK_SEL_NONE) 1897 return false; 1898 1899 tmp = intel_de_read(i915, ICL_DPCLKA_CFGCR0); 1900 1901 return !(tmp & ICL_DPCLKA_CFGCR0_TC_CLK_OFF(tc_port)); 1902 } 1903 1904 static struct intel_shared_dpll *icl_ddi_tc_get_pll(struct intel_encoder *encoder) 1905 { 1906 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 1907 enum tc_port tc_port = intel_port_to_tc(i915, encoder->port); 1908 enum port port = encoder->port; 1909 enum intel_dpll_id id; 1910 u32 tmp; 1911 1912 tmp = intel_de_read(i915, DDI_CLK_SEL(port)); 1913 1914 switch (tmp & DDI_CLK_SEL_MASK) { 1915 case DDI_CLK_SEL_TBT_162: 1916 case DDI_CLK_SEL_TBT_270: 1917 case DDI_CLK_SEL_TBT_540: 1918 case DDI_CLK_SEL_TBT_810: 1919 id = DPLL_ID_ICL_TBTPLL; 1920 break; 1921 case DDI_CLK_SEL_MG: 1922 id = icl_tc_port_to_pll_id(tc_port); 1923 break; 1924 default: 1925 MISSING_CASE(tmp); 1926 fallthrough; 1927 case DDI_CLK_SEL_NONE: 1928 return NULL; 1929 } 1930 1931 return intel_get_shared_dpll_by_id(i915, id); 1932 } 1933 1934 static void cnl_ddi_enable_clock(struct intel_encoder *encoder, 1935 const struct intel_crtc_state *crtc_state) 1936 { 1937 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 1938 const struct intel_shared_dpll *pll = crtc_state->shared_dpll; 1939 enum port port = encoder->port; 1940 1941 if (drm_WARN_ON(&i915->drm, !pll)) 1942 return; 1943 1944 _cnl_ddi_enable_clock(i915, DPCLKA_CFGCR0, 1945 DPCLKA_CFGCR0_DDI_CLK_SEL_MASK(port), 1946 DPCLKA_CFGCR0_DDI_CLK_SEL(pll->info->id, port), 1947 DPCLKA_CFGCR0_DDI_CLK_OFF(port)); 1948 } 1949 1950 static void cnl_ddi_disable_clock(struct intel_encoder *encoder) 1951 { 1952 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 1953 enum port port = encoder->port; 1954 1955 _cnl_ddi_disable_clock(i915, DPCLKA_CFGCR0, 1956 DPCLKA_CFGCR0_DDI_CLK_OFF(port)); 1957 } 1958 1959 static bool cnl_ddi_is_clock_enabled(struct intel_encoder *encoder) 1960 { 1961 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 1962 enum port port = encoder->port; 1963 1964 return _cnl_ddi_is_clock_enabled(i915, DPCLKA_CFGCR0, 1965 DPCLKA_CFGCR0_DDI_CLK_OFF(port)); 1966 } 1967 1968 static struct intel_shared_dpll *cnl_ddi_get_pll(struct intel_encoder *encoder) 1969 { 1970 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 1971 enum port port = encoder->port; 1972 1973 return _cnl_ddi_get_pll(i915, DPCLKA_CFGCR0, 1974 DPCLKA_CFGCR0_DDI_CLK_SEL_MASK(port), 1975 DPCLKA_CFGCR0_DDI_CLK_SEL_SHIFT(port)); 1976 } 1977 1978 static struct intel_shared_dpll *bxt_ddi_get_pll(struct intel_encoder *encoder) 1979 { 1980 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 1981 enum intel_dpll_id id; 1982 1983 switch (encoder->port) { 1984 case PORT_A: 1985 id = DPLL_ID_SKL_DPLL0; 1986 break; 1987 case PORT_B: 1988 id = DPLL_ID_SKL_DPLL1; 1989 break; 1990 case PORT_C: 1991 id = DPLL_ID_SKL_DPLL2; 1992 break; 1993 default: 1994 MISSING_CASE(encoder->port); 1995 return NULL; 1996 } 1997 1998 return intel_get_shared_dpll_by_id(i915, id); 1999 } 2000 2001 static void skl_ddi_enable_clock(struct intel_encoder *encoder, 2002 const struct intel_crtc_state *crtc_state) 2003 { 2004 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 2005 const struct intel_shared_dpll *pll = crtc_state->shared_dpll; 2006 enum port port = encoder->port; 2007 2008 if (drm_WARN_ON(&i915->drm, !pll)) 2009 return; 2010 2011 mutex_lock(&i915->dpll.lock); 2012 2013 intel_de_rmw(i915, DPLL_CTRL2, 2014 DPLL_CTRL2_DDI_CLK_OFF(port) | 2015 DPLL_CTRL2_DDI_CLK_SEL_MASK(port), 2016 DPLL_CTRL2_DDI_CLK_SEL(pll->info->id, port) | 2017 DPLL_CTRL2_DDI_SEL_OVERRIDE(port)); 2018 2019 mutex_unlock(&i915->dpll.lock); 2020 } 2021 2022 static void skl_ddi_disable_clock(struct intel_encoder *encoder) 2023 { 2024 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 2025 enum port port = encoder->port; 2026 2027 mutex_lock(&i915->dpll.lock); 2028 2029 intel_de_rmw(i915, DPLL_CTRL2, 2030 0, DPLL_CTRL2_DDI_CLK_OFF(port)); 2031 2032 mutex_unlock(&i915->dpll.lock); 2033 } 2034 2035 static bool skl_ddi_is_clock_enabled(struct intel_encoder *encoder) 2036 { 2037 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 2038 enum port port = encoder->port; 2039 2040 /* 2041 * FIXME Not sure if the override affects both 2042 * the PLL selection and the CLK_OFF bit. 2043 */ 2044 return !(intel_de_read(i915, DPLL_CTRL2) & DPLL_CTRL2_DDI_CLK_OFF(port)); 2045 } 2046 2047 static struct intel_shared_dpll *skl_ddi_get_pll(struct intel_encoder *encoder) 2048 { 2049 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 2050 enum port port = encoder->port; 2051 enum intel_dpll_id id; 2052 u32 tmp; 2053 2054 tmp = intel_de_read(i915, DPLL_CTRL2); 2055 2056 /* 2057 * FIXME Not sure if the override affects both 2058 * the PLL selection and the CLK_OFF bit. 2059 */ 2060 if ((tmp & DPLL_CTRL2_DDI_SEL_OVERRIDE(port)) == 0) 2061 return NULL; 2062 2063 id = (tmp & DPLL_CTRL2_DDI_CLK_SEL_MASK(port)) >> 2064 DPLL_CTRL2_DDI_CLK_SEL_SHIFT(port); 2065 2066 return intel_get_shared_dpll_by_id(i915, id); 2067 } 2068 2069 void hsw_ddi_enable_clock(struct intel_encoder *encoder, 2070 const struct intel_crtc_state *crtc_state) 2071 { 2072 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 2073 const struct intel_shared_dpll *pll = crtc_state->shared_dpll; 2074 enum port port = encoder->port; 2075 2076 if (drm_WARN_ON(&i915->drm, !pll)) 2077 return; 2078 2079 intel_de_write(i915, PORT_CLK_SEL(port), hsw_pll_to_ddi_pll_sel(pll)); 2080 } 2081 2082 void hsw_ddi_disable_clock(struct intel_encoder *encoder) 2083 { 2084 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 2085 enum port port = encoder->port; 2086 2087 intel_de_write(i915, PORT_CLK_SEL(port), PORT_CLK_SEL_NONE); 2088 } 2089 2090 bool hsw_ddi_is_clock_enabled(struct intel_encoder *encoder) 2091 { 2092 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 2093 enum port port = encoder->port; 2094 2095 return intel_de_read(i915, PORT_CLK_SEL(port)) != PORT_CLK_SEL_NONE; 2096 } 2097 2098 static struct intel_shared_dpll *hsw_ddi_get_pll(struct intel_encoder *encoder) 2099 { 2100 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 2101 enum port port = encoder->port; 2102 enum intel_dpll_id id; 2103 u32 tmp; 2104 2105 tmp = intel_de_read(i915, PORT_CLK_SEL(port)); 2106 2107 switch (tmp & PORT_CLK_SEL_MASK) { 2108 case PORT_CLK_SEL_WRPLL1: 2109 id = DPLL_ID_WRPLL1; 2110 break; 2111 case PORT_CLK_SEL_WRPLL2: 2112 id = DPLL_ID_WRPLL2; 2113 break; 2114 case PORT_CLK_SEL_SPLL: 2115 id = DPLL_ID_SPLL; 2116 break; 2117 case PORT_CLK_SEL_LCPLL_810: 2118 id = DPLL_ID_LCPLL_810; 2119 break; 2120 case PORT_CLK_SEL_LCPLL_1350: 2121 id = DPLL_ID_LCPLL_1350; 2122 break; 2123 case PORT_CLK_SEL_LCPLL_2700: 2124 id = DPLL_ID_LCPLL_2700; 2125 break; 2126 default: 2127 MISSING_CASE(tmp); 2128 fallthrough; 2129 case PORT_CLK_SEL_NONE: 2130 return NULL; 2131 } 2132 2133 return intel_get_shared_dpll_by_id(i915, id); 2134 } 2135 2136 void intel_ddi_enable_clock(struct intel_encoder *encoder, 2137 const struct intel_crtc_state *crtc_state) 2138 { 2139 if (encoder->enable_clock) 2140 encoder->enable_clock(encoder, crtc_state); 2141 } 2142 2143 static void intel_ddi_disable_clock(struct intel_encoder *encoder) 2144 { 2145 if (encoder->disable_clock) 2146 encoder->disable_clock(encoder); 2147 } 2148 2149 void intel_ddi_sanitize_encoder_pll_mapping(struct intel_encoder *encoder) 2150 { 2151 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 2152 u32 port_mask; 2153 bool ddi_clk_needed; 2154 2155 /* 2156 * In case of DP MST, we sanitize the primary encoder only, not the 2157 * virtual ones. 2158 */ 2159 if (encoder->type == INTEL_OUTPUT_DP_MST) 2160 return; 2161 2162 if (!encoder->base.crtc && intel_encoder_is_dp(encoder)) { 2163 u8 pipe_mask; 2164 bool is_mst; 2165 2166 intel_ddi_get_encoder_pipes(encoder, &pipe_mask, &is_mst); 2167 /* 2168 * In the unlikely case that BIOS enables DP in MST mode, just 2169 * warn since our MST HW readout is incomplete. 2170 */ 2171 if (drm_WARN_ON(&i915->drm, is_mst)) 2172 return; 2173 } 2174 2175 port_mask = BIT(encoder->port); 2176 ddi_clk_needed = encoder->base.crtc; 2177 2178 if (encoder->type == INTEL_OUTPUT_DSI) { 2179 struct intel_encoder *other_encoder; 2180 2181 port_mask = intel_dsi_encoder_ports(encoder); 2182 /* 2183 * Sanity check that we haven't incorrectly registered another 2184 * encoder using any of the ports of this DSI encoder. 2185 */ 2186 for_each_intel_encoder(&i915->drm, other_encoder) { 2187 if (other_encoder == encoder) 2188 continue; 2189 2190 if (drm_WARN_ON(&i915->drm, 2191 port_mask & BIT(other_encoder->port))) 2192 return; 2193 } 2194 /* 2195 * For DSI we keep the ddi clocks gated 2196 * except during enable/disable sequence. 2197 */ 2198 ddi_clk_needed = false; 2199 } 2200 2201 if (ddi_clk_needed || !encoder->disable_clock || 2202 !encoder->is_clock_enabled(encoder)) 2203 return; 2204 2205 drm_notice(&i915->drm, 2206 "[ENCODER:%d:%s] is disabled/in DSI mode with an ungated DDI clock, gate it\n", 2207 encoder->base.base.id, encoder->base.name); 2208 2209 encoder->disable_clock(encoder); 2210 } 2211 2212 static void 2213 icl_program_mg_dp_mode(struct intel_digital_port *dig_port, 2214 const struct intel_crtc_state *crtc_state) 2215 { 2216 struct drm_i915_private *dev_priv = to_i915(dig_port->base.base.dev); 2217 enum tc_port tc_port = intel_port_to_tc(dev_priv, dig_port->base.port); 2218 enum phy phy = intel_port_to_phy(dev_priv, dig_port->base.port); 2219 u32 ln0, ln1, pin_assignment; 2220 u8 width; 2221 2222 if (!intel_phy_is_tc(dev_priv, phy) || 2223 dig_port->tc_mode == TC_PORT_TBT_ALT) 2224 return; 2225 2226 if (DISPLAY_VER(dev_priv) >= 12) { 2227 intel_de_write(dev_priv, HIP_INDEX_REG(tc_port), 2228 HIP_INDEX_VAL(tc_port, 0x0)); 2229 ln0 = intel_de_read(dev_priv, DKL_DP_MODE(tc_port)); 2230 intel_de_write(dev_priv, HIP_INDEX_REG(tc_port), 2231 HIP_INDEX_VAL(tc_port, 0x1)); 2232 ln1 = intel_de_read(dev_priv, DKL_DP_MODE(tc_port)); 2233 } else { 2234 ln0 = intel_de_read(dev_priv, MG_DP_MODE(0, tc_port)); 2235 ln1 = intel_de_read(dev_priv, MG_DP_MODE(1, tc_port)); 2236 } 2237 2238 ln0 &= ~(MG_DP_MODE_CFG_DP_X1_MODE | MG_DP_MODE_CFG_DP_X2_MODE); 2239 ln1 &= ~(MG_DP_MODE_CFG_DP_X1_MODE | MG_DP_MODE_CFG_DP_X2_MODE); 2240 2241 /* DPPATC */ 2242 pin_assignment = intel_tc_port_get_pin_assignment_mask(dig_port); 2243 width = crtc_state->lane_count; 2244 2245 switch (pin_assignment) { 2246 case 0x0: 2247 drm_WARN_ON(&dev_priv->drm, 2248 dig_port->tc_mode != TC_PORT_LEGACY); 2249 if (width == 1) { 2250 ln1 |= MG_DP_MODE_CFG_DP_X1_MODE; 2251 } else { 2252 ln0 |= MG_DP_MODE_CFG_DP_X2_MODE; 2253 ln1 |= MG_DP_MODE_CFG_DP_X2_MODE; 2254 } 2255 break; 2256 case 0x1: 2257 if (width == 4) { 2258 ln0 |= MG_DP_MODE_CFG_DP_X2_MODE; 2259 ln1 |= MG_DP_MODE_CFG_DP_X2_MODE; 2260 } 2261 break; 2262 case 0x2: 2263 if (width == 2) { 2264 ln0 |= MG_DP_MODE_CFG_DP_X2_MODE; 2265 ln1 |= MG_DP_MODE_CFG_DP_X2_MODE; 2266 } 2267 break; 2268 case 0x3: 2269 case 0x5: 2270 if (width == 1) { 2271 ln0 |= MG_DP_MODE_CFG_DP_X1_MODE; 2272 ln1 |= MG_DP_MODE_CFG_DP_X1_MODE; 2273 } else { 2274 ln0 |= MG_DP_MODE_CFG_DP_X2_MODE; 2275 ln1 |= MG_DP_MODE_CFG_DP_X2_MODE; 2276 } 2277 break; 2278 case 0x4: 2279 case 0x6: 2280 if (width == 1) { 2281 ln0 |= MG_DP_MODE_CFG_DP_X1_MODE; 2282 ln1 |= MG_DP_MODE_CFG_DP_X1_MODE; 2283 } else { 2284 ln0 |= MG_DP_MODE_CFG_DP_X2_MODE; 2285 ln1 |= MG_DP_MODE_CFG_DP_X2_MODE; 2286 } 2287 break; 2288 default: 2289 MISSING_CASE(pin_assignment); 2290 } 2291 2292 if (DISPLAY_VER(dev_priv) >= 12) { 2293 intel_de_write(dev_priv, HIP_INDEX_REG(tc_port), 2294 HIP_INDEX_VAL(tc_port, 0x0)); 2295 intel_de_write(dev_priv, DKL_DP_MODE(tc_port), ln0); 2296 intel_de_write(dev_priv, HIP_INDEX_REG(tc_port), 2297 HIP_INDEX_VAL(tc_port, 0x1)); 2298 intel_de_write(dev_priv, DKL_DP_MODE(tc_port), ln1); 2299 } else { 2300 intel_de_write(dev_priv, MG_DP_MODE(0, tc_port), ln0); 2301 intel_de_write(dev_priv, MG_DP_MODE(1, tc_port), ln1); 2302 } 2303 } 2304 2305 static enum transcoder 2306 tgl_dp_tp_transcoder(const struct intel_crtc_state *crtc_state) 2307 { 2308 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_DP_MST)) 2309 return crtc_state->mst_master_transcoder; 2310 else 2311 return crtc_state->cpu_transcoder; 2312 } 2313 2314 i915_reg_t dp_tp_ctl_reg(struct intel_encoder *encoder, 2315 const struct intel_crtc_state *crtc_state) 2316 { 2317 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 2318 2319 if (DISPLAY_VER(dev_priv) >= 12) 2320 return TGL_DP_TP_CTL(tgl_dp_tp_transcoder(crtc_state)); 2321 else 2322 return DP_TP_CTL(encoder->port); 2323 } 2324 2325 i915_reg_t dp_tp_status_reg(struct intel_encoder *encoder, 2326 const struct intel_crtc_state *crtc_state) 2327 { 2328 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 2329 2330 if (DISPLAY_VER(dev_priv) >= 12) 2331 return TGL_DP_TP_STATUS(tgl_dp_tp_transcoder(crtc_state)); 2332 else 2333 return DP_TP_STATUS(encoder->port); 2334 } 2335 2336 static void intel_dp_sink_set_msa_timing_par_ignore_state(struct intel_dp *intel_dp, 2337 const struct intel_crtc_state *crtc_state, 2338 bool enable) 2339 { 2340 struct drm_i915_private *i915 = dp_to_i915(intel_dp); 2341 2342 if (!crtc_state->vrr.enable) 2343 return; 2344 2345 if (drm_dp_dpcd_writeb(&intel_dp->aux, DP_DOWNSPREAD_CTRL, 2346 enable ? DP_MSA_TIMING_PAR_IGNORE_EN : 0) <= 0) 2347 drm_dbg_kms(&i915->drm, 2348 "Failed to %s MSA_TIMING_PAR_IGNORE in the sink\n", 2349 enabledisable(enable)); 2350 } 2351 2352 static void intel_dp_sink_set_fec_ready(struct intel_dp *intel_dp, 2353 const struct intel_crtc_state *crtc_state) 2354 { 2355 struct drm_i915_private *i915 = dp_to_i915(intel_dp); 2356 2357 if (!crtc_state->fec_enable) 2358 return; 2359 2360 if (drm_dp_dpcd_writeb(&intel_dp->aux, DP_FEC_CONFIGURATION, DP_FEC_READY) <= 0) 2361 drm_dbg_kms(&i915->drm, 2362 "Failed to set FEC_READY in the sink\n"); 2363 } 2364 2365 static void intel_ddi_enable_fec(struct intel_encoder *encoder, 2366 const struct intel_crtc_state *crtc_state) 2367 { 2368 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 2369 struct intel_dp *intel_dp; 2370 u32 val; 2371 2372 if (!crtc_state->fec_enable) 2373 return; 2374 2375 intel_dp = enc_to_intel_dp(encoder); 2376 val = intel_de_read(dev_priv, dp_tp_ctl_reg(encoder, crtc_state)); 2377 val |= DP_TP_CTL_FEC_ENABLE; 2378 intel_de_write(dev_priv, dp_tp_ctl_reg(encoder, crtc_state), val); 2379 } 2380 2381 static void intel_ddi_disable_fec_state(struct intel_encoder *encoder, 2382 const struct intel_crtc_state *crtc_state) 2383 { 2384 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 2385 struct intel_dp *intel_dp; 2386 u32 val; 2387 2388 if (!crtc_state->fec_enable) 2389 return; 2390 2391 intel_dp = enc_to_intel_dp(encoder); 2392 val = intel_de_read(dev_priv, dp_tp_ctl_reg(encoder, crtc_state)); 2393 val &= ~DP_TP_CTL_FEC_ENABLE; 2394 intel_de_write(dev_priv, dp_tp_ctl_reg(encoder, crtc_state), val); 2395 intel_de_posting_read(dev_priv, dp_tp_ctl_reg(encoder, crtc_state)); 2396 } 2397 2398 static void intel_ddi_power_up_lanes(struct intel_encoder *encoder, 2399 const struct intel_crtc_state *crtc_state) 2400 { 2401 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 2402 struct intel_digital_port *dig_port = enc_to_dig_port(encoder); 2403 enum phy phy = intel_port_to_phy(i915, encoder->port); 2404 2405 if (intel_phy_is_combo(i915, phy)) { 2406 bool lane_reversal = 2407 dig_port->saved_port_bits & DDI_BUF_PORT_REVERSAL; 2408 2409 intel_combo_phy_power_up_lanes(i915, phy, false, 2410 crtc_state->lane_count, 2411 lane_reversal); 2412 } 2413 } 2414 2415 static void intel_ddi_mso_get_config(struct intel_encoder *encoder, 2416 struct intel_crtc_state *pipe_config) 2417 { 2418 struct intel_crtc *crtc = to_intel_crtc(pipe_config->uapi.crtc); 2419 struct drm_i915_private *i915 = to_i915(crtc->base.dev); 2420 enum pipe pipe = crtc->pipe; 2421 u32 dss1; 2422 2423 if (!HAS_MSO(i915)) 2424 return; 2425 2426 dss1 = intel_de_read(i915, ICL_PIPE_DSS_CTL1(pipe)); 2427 2428 pipe_config->splitter.enable = dss1 & SPLITTER_ENABLE; 2429 if (!pipe_config->splitter.enable) 2430 return; 2431 2432 /* Splitter enable is supported for pipe A only. */ 2433 if (drm_WARN_ON(&i915->drm, pipe != PIPE_A)) { 2434 pipe_config->splitter.enable = false; 2435 return; 2436 } 2437 2438 switch (dss1 & SPLITTER_CONFIGURATION_MASK) { 2439 default: 2440 drm_WARN(&i915->drm, true, 2441 "Invalid splitter configuration, dss1=0x%08x\n", dss1); 2442 fallthrough; 2443 case SPLITTER_CONFIGURATION_2_SEGMENT: 2444 pipe_config->splitter.link_count = 2; 2445 break; 2446 case SPLITTER_CONFIGURATION_4_SEGMENT: 2447 pipe_config->splitter.link_count = 4; 2448 break; 2449 } 2450 2451 pipe_config->splitter.pixel_overlap = REG_FIELD_GET(OVERLAP_PIXELS_MASK, dss1); 2452 } 2453 2454 static void intel_ddi_mso_configure(const struct intel_crtc_state *crtc_state) 2455 { 2456 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc); 2457 struct drm_i915_private *i915 = to_i915(crtc->base.dev); 2458 enum pipe pipe = crtc->pipe; 2459 u32 dss1 = 0; 2460 2461 if (!HAS_MSO(i915)) 2462 return; 2463 2464 if (crtc_state->splitter.enable) { 2465 /* Splitter enable is supported for pipe A only. */ 2466 if (drm_WARN_ON(&i915->drm, pipe != PIPE_A)) 2467 return; 2468 2469 dss1 |= SPLITTER_ENABLE; 2470 dss1 |= OVERLAP_PIXELS(crtc_state->splitter.pixel_overlap); 2471 if (crtc_state->splitter.link_count == 2) 2472 dss1 |= SPLITTER_CONFIGURATION_2_SEGMENT; 2473 else 2474 dss1 |= SPLITTER_CONFIGURATION_4_SEGMENT; 2475 } 2476 2477 intel_de_rmw(i915, ICL_PIPE_DSS_CTL1(pipe), 2478 SPLITTER_ENABLE | SPLITTER_CONFIGURATION_MASK | 2479 OVERLAP_PIXELS_MASK, dss1); 2480 } 2481 2482 static void tgl_ddi_pre_enable_dp(struct intel_atomic_state *state, 2483 struct intel_encoder *encoder, 2484 const struct intel_crtc_state *crtc_state, 2485 const struct drm_connector_state *conn_state) 2486 { 2487 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 2488 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 2489 enum phy phy = intel_port_to_phy(dev_priv, encoder->port); 2490 struct intel_digital_port *dig_port = enc_to_dig_port(encoder); 2491 bool is_mst = intel_crtc_has_type(crtc_state, INTEL_OUTPUT_DP_MST); 2492 int level = intel_ddi_dp_level(intel_dp); 2493 2494 intel_dp_set_link_params(intel_dp, 2495 crtc_state->port_clock, 2496 crtc_state->lane_count); 2497 2498 /* 2499 * 1. Enable Power Wells 2500 * 2501 * This was handled at the beginning of intel_atomic_commit_tail(), 2502 * before we called down into this function. 2503 */ 2504 2505 /* 2. Enable Panel Power if PPS is required */ 2506 intel_pps_on(intel_dp); 2507 2508 /* 2509 * 3. For non-TBT Type-C ports, set FIA lane count 2510 * (DFLEXDPSP.DPX4TXLATC) 2511 * 2512 * This was done before tgl_ddi_pre_enable_dp by 2513 * hsw_crtc_enable()->intel_encoders_pre_pll_enable(). 2514 */ 2515 2516 /* 2517 * 4. Enable the port PLL. 2518 * 2519 * The PLL enabling itself was already done before this function by 2520 * hsw_crtc_enable()->intel_enable_shared_dpll(). We need only 2521 * configure the PLL to port mapping here. 2522 */ 2523 intel_ddi_enable_clock(encoder, crtc_state); 2524 2525 /* 5. If IO power is controlled through PWR_WELL_CTL, Enable IO Power */ 2526 if (!intel_phy_is_tc(dev_priv, phy) || 2527 dig_port->tc_mode != TC_PORT_TBT_ALT) { 2528 drm_WARN_ON(&dev_priv->drm, dig_port->ddi_io_wakeref); 2529 dig_port->ddi_io_wakeref = intel_display_power_get(dev_priv, 2530 dig_port->ddi_io_power_domain); 2531 } 2532 2533 /* 6. Program DP_MODE */ 2534 icl_program_mg_dp_mode(dig_port, crtc_state); 2535 2536 /* 2537 * 7. The rest of the below are substeps under the bspec's "Enable and 2538 * Train Display Port" step. Note that steps that are specific to 2539 * MST will be handled by intel_mst_pre_enable_dp() before/after it 2540 * calls into this function. Also intel_mst_pre_enable_dp() only calls 2541 * us when active_mst_links==0, so any steps designated for "single 2542 * stream or multi-stream master transcoder" can just be performed 2543 * unconditionally here. 2544 */ 2545 2546 /* 2547 * 7.a Configure Transcoder Clock Select to direct the Port clock to the 2548 * Transcoder. 2549 */ 2550 intel_ddi_enable_pipe_clock(encoder, crtc_state); 2551 2552 /* 2553 * 7.b Configure TRANS_DDI_FUNC_CTL DDI Select, DDI Mode Select & MST 2554 * Transport Select 2555 */ 2556 intel_ddi_config_transcoder_func(encoder, crtc_state); 2557 2558 /* 2559 * 7.c Configure & enable DP_TP_CTL with link training pattern 1 2560 * selected 2561 * 2562 * This will be handled by the intel_dp_start_link_train() farther 2563 * down this function. 2564 */ 2565 2566 /* 7.e Configure voltage swing and related IO settings */ 2567 tgl_ddi_vswing_sequence(encoder, crtc_state, level); 2568 2569 /* 2570 * 7.f Combo PHY: Configure PORT_CL_DW10 Static Power Down to power up 2571 * the used lanes of the DDI. 2572 */ 2573 intel_ddi_power_up_lanes(encoder, crtc_state); 2574 2575 /* 2576 * 7.g Program CoG/MSO configuration bits in DSS_CTL1 if selected. 2577 */ 2578 intel_ddi_mso_configure(crtc_state); 2579 2580 /* 2581 * 7.g Configure and enable DDI_BUF_CTL 2582 * 7.h Wait for DDI_BUF_CTL DDI Idle Status = 0b (Not Idle), timeout 2583 * after 500 us. 2584 * 2585 * We only configure what the register value will be here. Actual 2586 * enabling happens during link training farther down. 2587 */ 2588 intel_ddi_init_dp_buf_reg(encoder, crtc_state); 2589 2590 if (!is_mst) 2591 intel_dp_set_power(intel_dp, DP_SET_POWER_D0); 2592 2593 intel_dp_configure_protocol_converter(intel_dp, crtc_state); 2594 intel_dp_sink_set_decompression_state(intel_dp, crtc_state, true); 2595 /* 2596 * DDI FEC: "anticipates enabling FEC encoding sets the FEC_READY bit 2597 * in the FEC_CONFIGURATION register to 1 before initiating link 2598 * training 2599 */ 2600 intel_dp_sink_set_fec_ready(intel_dp, crtc_state); 2601 2602 intel_dp_check_frl_training(intel_dp); 2603 intel_dp_pcon_dsc_configure(intel_dp, crtc_state); 2604 2605 /* 2606 * 7.i Follow DisplayPort specification training sequence (see notes for 2607 * failure handling) 2608 * 7.j If DisplayPort multi-stream - Set DP_TP_CTL link training to Idle 2609 * Pattern, wait for 5 idle patterns (DP_TP_STATUS Min_Idles_Sent) 2610 * (timeout after 800 us) 2611 */ 2612 intel_dp_start_link_train(intel_dp, crtc_state); 2613 2614 /* 7.k Set DP_TP_CTL link training to Normal */ 2615 if (!is_trans_port_sync_mode(crtc_state)) 2616 intel_dp_stop_link_train(intel_dp, crtc_state); 2617 2618 /* 7.l Configure and enable FEC if needed */ 2619 intel_ddi_enable_fec(encoder, crtc_state); 2620 if (!crtc_state->bigjoiner) 2621 intel_dsc_enable(encoder, crtc_state); 2622 } 2623 2624 static void hsw_ddi_pre_enable_dp(struct intel_atomic_state *state, 2625 struct intel_encoder *encoder, 2626 const struct intel_crtc_state *crtc_state, 2627 const struct drm_connector_state *conn_state) 2628 { 2629 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 2630 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 2631 enum port port = encoder->port; 2632 enum phy phy = intel_port_to_phy(dev_priv, port); 2633 struct intel_digital_port *dig_port = enc_to_dig_port(encoder); 2634 bool is_mst = intel_crtc_has_type(crtc_state, INTEL_OUTPUT_DP_MST); 2635 int level = intel_ddi_dp_level(intel_dp); 2636 2637 if (DISPLAY_VER(dev_priv) < 11) 2638 drm_WARN_ON(&dev_priv->drm, 2639 is_mst && (port == PORT_A || port == PORT_E)); 2640 else 2641 drm_WARN_ON(&dev_priv->drm, is_mst && port == PORT_A); 2642 2643 intel_dp_set_link_params(intel_dp, 2644 crtc_state->port_clock, 2645 crtc_state->lane_count); 2646 2647 intel_pps_on(intel_dp); 2648 2649 intel_ddi_enable_clock(encoder, crtc_state); 2650 2651 if (!intel_phy_is_tc(dev_priv, phy) || 2652 dig_port->tc_mode != TC_PORT_TBT_ALT) { 2653 drm_WARN_ON(&dev_priv->drm, dig_port->ddi_io_wakeref); 2654 dig_port->ddi_io_wakeref = intel_display_power_get(dev_priv, 2655 dig_port->ddi_io_power_domain); 2656 } 2657 2658 icl_program_mg_dp_mode(dig_port, crtc_state); 2659 2660 if (DISPLAY_VER(dev_priv) >= 11) 2661 icl_ddi_vswing_sequence(encoder, crtc_state, level); 2662 else if (IS_CANNONLAKE(dev_priv)) 2663 cnl_ddi_vswing_sequence(encoder, crtc_state, level); 2664 else if (IS_GEMINILAKE(dev_priv) || IS_BROXTON(dev_priv)) 2665 bxt_ddi_vswing_sequence(encoder, crtc_state, level); 2666 else 2667 intel_prepare_dp_ddi_buffers(encoder, crtc_state); 2668 2669 intel_ddi_power_up_lanes(encoder, crtc_state); 2670 2671 intel_ddi_init_dp_buf_reg(encoder, crtc_state); 2672 if (!is_mst) 2673 intel_dp_set_power(intel_dp, DP_SET_POWER_D0); 2674 intel_dp_configure_protocol_converter(intel_dp, crtc_state); 2675 intel_dp_sink_set_decompression_state(intel_dp, crtc_state, 2676 true); 2677 intel_dp_sink_set_fec_ready(intel_dp, crtc_state); 2678 intel_dp_start_link_train(intel_dp, crtc_state); 2679 if ((port != PORT_A || DISPLAY_VER(dev_priv) >= 9) && 2680 !is_trans_port_sync_mode(crtc_state)) 2681 intel_dp_stop_link_train(intel_dp, crtc_state); 2682 2683 intel_ddi_enable_fec(encoder, crtc_state); 2684 2685 if (!is_mst) 2686 intel_ddi_enable_pipe_clock(encoder, crtc_state); 2687 2688 if (!crtc_state->bigjoiner) 2689 intel_dsc_enable(encoder, crtc_state); 2690 } 2691 2692 static void intel_ddi_pre_enable_dp(struct intel_atomic_state *state, 2693 struct intel_encoder *encoder, 2694 const struct intel_crtc_state *crtc_state, 2695 const struct drm_connector_state *conn_state) 2696 { 2697 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 2698 2699 if (DISPLAY_VER(dev_priv) >= 12) 2700 tgl_ddi_pre_enable_dp(state, encoder, crtc_state, conn_state); 2701 else 2702 hsw_ddi_pre_enable_dp(state, encoder, crtc_state, conn_state); 2703 2704 /* MST will call a setting of MSA after an allocating of Virtual Channel 2705 * from MST encoder pre_enable callback. 2706 */ 2707 if (!intel_crtc_has_type(crtc_state, INTEL_OUTPUT_DP_MST)) { 2708 intel_ddi_set_dp_msa(crtc_state, conn_state); 2709 2710 intel_dp_set_m_n(crtc_state, M1_N1); 2711 } 2712 } 2713 2714 static void intel_ddi_pre_enable_hdmi(struct intel_atomic_state *state, 2715 struct intel_encoder *encoder, 2716 const struct intel_crtc_state *crtc_state, 2717 const struct drm_connector_state *conn_state) 2718 { 2719 struct intel_digital_port *dig_port = enc_to_dig_port(encoder); 2720 struct intel_hdmi *intel_hdmi = &dig_port->hdmi; 2721 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 2722 2723 intel_dp_dual_mode_set_tmds_output(intel_hdmi, true); 2724 intel_ddi_enable_clock(encoder, crtc_state); 2725 2726 drm_WARN_ON(&dev_priv->drm, dig_port->ddi_io_wakeref); 2727 dig_port->ddi_io_wakeref = intel_display_power_get(dev_priv, 2728 dig_port->ddi_io_power_domain); 2729 2730 icl_program_mg_dp_mode(dig_port, crtc_state); 2731 2732 intel_ddi_enable_pipe_clock(encoder, crtc_state); 2733 2734 dig_port->set_infoframes(encoder, 2735 crtc_state->has_infoframe, 2736 crtc_state, conn_state); 2737 } 2738 2739 static void intel_ddi_pre_enable(struct intel_atomic_state *state, 2740 struct intel_encoder *encoder, 2741 const struct intel_crtc_state *crtc_state, 2742 const struct drm_connector_state *conn_state) 2743 { 2744 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc); 2745 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev); 2746 enum pipe pipe = crtc->pipe; 2747 2748 /* 2749 * When called from DP MST code: 2750 * - conn_state will be NULL 2751 * - encoder will be the main encoder (ie. mst->primary) 2752 * - the main connector associated with this port 2753 * won't be active or linked to a crtc 2754 * - crtc_state will be the state of the first stream to 2755 * be activated on this port, and it may not be the same 2756 * stream that will be deactivated last, but each stream 2757 * should have a state that is identical when it comes to 2758 * the DP link parameteres 2759 */ 2760 2761 drm_WARN_ON(&dev_priv->drm, crtc_state->has_pch_encoder); 2762 2763 intel_set_cpu_fifo_underrun_reporting(dev_priv, pipe, true); 2764 2765 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI)) { 2766 intel_ddi_pre_enable_hdmi(state, encoder, crtc_state, 2767 conn_state); 2768 } else { 2769 struct intel_digital_port *dig_port = enc_to_dig_port(encoder); 2770 2771 intel_ddi_pre_enable_dp(state, encoder, crtc_state, 2772 conn_state); 2773 2774 /* FIXME precompute everything properly */ 2775 /* FIXME how do we turn infoframes off again? */ 2776 if (dig_port->lspcon.active && dig_port->dp.has_hdmi_sink) 2777 dig_port->set_infoframes(encoder, 2778 crtc_state->has_infoframe, 2779 crtc_state, conn_state); 2780 } 2781 } 2782 2783 static void intel_disable_ddi_buf(struct intel_encoder *encoder, 2784 const struct intel_crtc_state *crtc_state) 2785 { 2786 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 2787 enum port port = encoder->port; 2788 bool wait = false; 2789 u32 val; 2790 2791 val = intel_de_read(dev_priv, DDI_BUF_CTL(port)); 2792 if (val & DDI_BUF_CTL_ENABLE) { 2793 val &= ~DDI_BUF_CTL_ENABLE; 2794 intel_de_write(dev_priv, DDI_BUF_CTL(port), val); 2795 wait = true; 2796 } 2797 2798 if (intel_crtc_has_dp_encoder(crtc_state)) { 2799 val = intel_de_read(dev_priv, dp_tp_ctl_reg(encoder, crtc_state)); 2800 val &= ~(DP_TP_CTL_ENABLE | DP_TP_CTL_LINK_TRAIN_MASK); 2801 val |= DP_TP_CTL_LINK_TRAIN_PAT1; 2802 intel_de_write(dev_priv, dp_tp_ctl_reg(encoder, crtc_state), val); 2803 } 2804 2805 /* Disable FEC in DP Sink */ 2806 intel_ddi_disable_fec_state(encoder, crtc_state); 2807 2808 if (wait) 2809 intel_wait_ddi_buf_idle(dev_priv, port); 2810 } 2811 2812 static void intel_ddi_post_disable_dp(struct intel_atomic_state *state, 2813 struct intel_encoder *encoder, 2814 const struct intel_crtc_state *old_crtc_state, 2815 const struct drm_connector_state *old_conn_state) 2816 { 2817 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 2818 struct intel_digital_port *dig_port = enc_to_dig_port(encoder); 2819 struct intel_dp *intel_dp = &dig_port->dp; 2820 bool is_mst = intel_crtc_has_type(old_crtc_state, 2821 INTEL_OUTPUT_DP_MST); 2822 enum phy phy = intel_port_to_phy(dev_priv, encoder->port); 2823 2824 if (!is_mst) 2825 intel_dp_set_infoframes(encoder, false, 2826 old_crtc_state, old_conn_state); 2827 2828 /* 2829 * Power down sink before disabling the port, otherwise we end 2830 * up getting interrupts from the sink on detecting link loss. 2831 */ 2832 intel_dp_set_power(intel_dp, DP_SET_POWER_D3); 2833 2834 if (DISPLAY_VER(dev_priv) >= 12) { 2835 if (is_mst) { 2836 enum transcoder cpu_transcoder = old_crtc_state->cpu_transcoder; 2837 u32 val; 2838 2839 val = intel_de_read(dev_priv, 2840 TRANS_DDI_FUNC_CTL(cpu_transcoder)); 2841 val &= ~(TGL_TRANS_DDI_PORT_MASK | 2842 TRANS_DDI_MODE_SELECT_MASK); 2843 intel_de_write(dev_priv, 2844 TRANS_DDI_FUNC_CTL(cpu_transcoder), 2845 val); 2846 } 2847 } else { 2848 if (!is_mst) 2849 intel_ddi_disable_pipe_clock(old_crtc_state); 2850 } 2851 2852 intel_disable_ddi_buf(encoder, old_crtc_state); 2853 2854 /* 2855 * From TGL spec: "If single stream or multi-stream master transcoder: 2856 * Configure Transcoder Clock select to direct no clock to the 2857 * transcoder" 2858 */ 2859 if (DISPLAY_VER(dev_priv) >= 12) 2860 intel_ddi_disable_pipe_clock(old_crtc_state); 2861 2862 intel_pps_vdd_on(intel_dp); 2863 intel_pps_off(intel_dp); 2864 2865 if (!intel_phy_is_tc(dev_priv, phy) || 2866 dig_port->tc_mode != TC_PORT_TBT_ALT) 2867 intel_display_power_put(dev_priv, 2868 dig_port->ddi_io_power_domain, 2869 fetch_and_zero(&dig_port->ddi_io_wakeref)); 2870 2871 intel_ddi_disable_clock(encoder); 2872 } 2873 2874 static void intel_ddi_post_disable_hdmi(struct intel_atomic_state *state, 2875 struct intel_encoder *encoder, 2876 const struct intel_crtc_state *old_crtc_state, 2877 const struct drm_connector_state *old_conn_state) 2878 { 2879 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 2880 struct intel_digital_port *dig_port = enc_to_dig_port(encoder); 2881 struct intel_hdmi *intel_hdmi = &dig_port->hdmi; 2882 2883 dig_port->set_infoframes(encoder, false, 2884 old_crtc_state, old_conn_state); 2885 2886 intel_ddi_disable_pipe_clock(old_crtc_state); 2887 2888 intel_disable_ddi_buf(encoder, old_crtc_state); 2889 2890 intel_display_power_put(dev_priv, 2891 dig_port->ddi_io_power_domain, 2892 fetch_and_zero(&dig_port->ddi_io_wakeref)); 2893 2894 intel_ddi_disable_clock(encoder); 2895 2896 intel_dp_dual_mode_set_tmds_output(intel_hdmi, false); 2897 } 2898 2899 static void intel_ddi_post_disable(struct intel_atomic_state *state, 2900 struct intel_encoder *encoder, 2901 const struct intel_crtc_state *old_crtc_state, 2902 const struct drm_connector_state *old_conn_state) 2903 { 2904 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 2905 struct intel_digital_port *dig_port = enc_to_dig_port(encoder); 2906 enum phy phy = intel_port_to_phy(dev_priv, encoder->port); 2907 bool is_tc_port = intel_phy_is_tc(dev_priv, phy); 2908 2909 if (!intel_crtc_has_type(old_crtc_state, INTEL_OUTPUT_DP_MST)) { 2910 intel_crtc_vblank_off(old_crtc_state); 2911 2912 intel_disable_pipe(old_crtc_state); 2913 2914 intel_vrr_disable(old_crtc_state); 2915 2916 intel_ddi_disable_transcoder_func(old_crtc_state); 2917 2918 intel_dsc_disable(old_crtc_state); 2919 2920 if (DISPLAY_VER(dev_priv) >= 9) 2921 skl_scaler_disable(old_crtc_state); 2922 else 2923 ilk_pfit_disable(old_crtc_state); 2924 } 2925 2926 if (old_crtc_state->bigjoiner_linked_crtc) { 2927 struct intel_atomic_state *state = 2928 to_intel_atomic_state(old_crtc_state->uapi.state); 2929 struct intel_crtc *slave = 2930 old_crtc_state->bigjoiner_linked_crtc; 2931 const struct intel_crtc_state *old_slave_crtc_state = 2932 intel_atomic_get_old_crtc_state(state, slave); 2933 2934 intel_crtc_vblank_off(old_slave_crtc_state); 2935 2936 intel_dsc_disable(old_slave_crtc_state); 2937 skl_scaler_disable(old_slave_crtc_state); 2938 } 2939 2940 /* 2941 * When called from DP MST code: 2942 * - old_conn_state will be NULL 2943 * - encoder will be the main encoder (ie. mst->primary) 2944 * - the main connector associated with this port 2945 * won't be active or linked to a crtc 2946 * - old_crtc_state will be the state of the last stream to 2947 * be deactivated on this port, and it may not be the same 2948 * stream that was activated last, but each stream 2949 * should have a state that is identical when it comes to 2950 * the DP link parameteres 2951 */ 2952 2953 if (intel_crtc_has_type(old_crtc_state, INTEL_OUTPUT_HDMI)) 2954 intel_ddi_post_disable_hdmi(state, encoder, old_crtc_state, 2955 old_conn_state); 2956 else 2957 intel_ddi_post_disable_dp(state, encoder, old_crtc_state, 2958 old_conn_state); 2959 2960 if (intel_crtc_has_dp_encoder(old_crtc_state) || is_tc_port) 2961 intel_display_power_put(dev_priv, 2962 intel_ddi_main_link_aux_domain(dig_port), 2963 fetch_and_zero(&dig_port->aux_wakeref)); 2964 2965 if (is_tc_port) 2966 intel_tc_port_put_link(dig_port); 2967 } 2968 2969 void intel_ddi_fdi_post_disable(struct intel_atomic_state *state, 2970 struct intel_encoder *encoder, 2971 const struct intel_crtc_state *old_crtc_state, 2972 const struct drm_connector_state *old_conn_state) 2973 { 2974 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 2975 u32 val; 2976 2977 /* 2978 * Bspec lists this as both step 13 (before DDI_BUF_CTL disable) 2979 * and step 18 (after clearing PORT_CLK_SEL). Based on a BUN, 2980 * step 13 is the correct place for it. Step 18 is where it was 2981 * originally before the BUN. 2982 */ 2983 val = intel_de_read(dev_priv, FDI_RX_CTL(PIPE_A)); 2984 val &= ~FDI_RX_ENABLE; 2985 intel_de_write(dev_priv, FDI_RX_CTL(PIPE_A), val); 2986 2987 intel_disable_ddi_buf(encoder, old_crtc_state); 2988 intel_ddi_disable_clock(encoder); 2989 2990 val = intel_de_read(dev_priv, FDI_RX_MISC(PIPE_A)); 2991 val &= ~(FDI_RX_PWRDN_LANE1_MASK | FDI_RX_PWRDN_LANE0_MASK); 2992 val |= FDI_RX_PWRDN_LANE1_VAL(2) | FDI_RX_PWRDN_LANE0_VAL(2); 2993 intel_de_write(dev_priv, FDI_RX_MISC(PIPE_A), val); 2994 2995 val = intel_de_read(dev_priv, FDI_RX_CTL(PIPE_A)); 2996 val &= ~FDI_PCDCLK; 2997 intel_de_write(dev_priv, FDI_RX_CTL(PIPE_A), val); 2998 2999 val = intel_de_read(dev_priv, FDI_RX_CTL(PIPE_A)); 3000 val &= ~FDI_RX_PLL_ENABLE; 3001 intel_de_write(dev_priv, FDI_RX_CTL(PIPE_A), val); 3002 } 3003 3004 static void trans_port_sync_stop_link_train(struct intel_atomic_state *state, 3005 struct intel_encoder *encoder, 3006 const struct intel_crtc_state *crtc_state) 3007 { 3008 const struct drm_connector_state *conn_state; 3009 struct drm_connector *conn; 3010 int i; 3011 3012 if (!crtc_state->sync_mode_slaves_mask) 3013 return; 3014 3015 for_each_new_connector_in_state(&state->base, conn, conn_state, i) { 3016 struct intel_encoder *slave_encoder = 3017 to_intel_encoder(conn_state->best_encoder); 3018 struct intel_crtc *slave_crtc = to_intel_crtc(conn_state->crtc); 3019 const struct intel_crtc_state *slave_crtc_state; 3020 3021 if (!slave_crtc) 3022 continue; 3023 3024 slave_crtc_state = 3025 intel_atomic_get_new_crtc_state(state, slave_crtc); 3026 3027 if (slave_crtc_state->master_transcoder != 3028 crtc_state->cpu_transcoder) 3029 continue; 3030 3031 intel_dp_stop_link_train(enc_to_intel_dp(slave_encoder), 3032 slave_crtc_state); 3033 } 3034 3035 usleep_range(200, 400); 3036 3037 intel_dp_stop_link_train(enc_to_intel_dp(encoder), 3038 crtc_state); 3039 } 3040 3041 static void intel_enable_ddi_dp(struct intel_atomic_state *state, 3042 struct intel_encoder *encoder, 3043 const struct intel_crtc_state *crtc_state, 3044 const struct drm_connector_state *conn_state) 3045 { 3046 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 3047 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 3048 struct intel_digital_port *dig_port = enc_to_dig_port(encoder); 3049 enum port port = encoder->port; 3050 3051 if (port == PORT_A && DISPLAY_VER(dev_priv) < 9) 3052 intel_dp_stop_link_train(intel_dp, crtc_state); 3053 3054 intel_edp_backlight_on(crtc_state, conn_state); 3055 intel_psr_enable(intel_dp, crtc_state, conn_state); 3056 3057 if (!dig_port->lspcon.active || dig_port->dp.has_hdmi_sink) 3058 intel_dp_set_infoframes(encoder, true, crtc_state, conn_state); 3059 3060 intel_edp_drrs_enable(intel_dp, crtc_state); 3061 3062 if (crtc_state->has_audio) 3063 intel_audio_codec_enable(encoder, crtc_state, conn_state); 3064 3065 trans_port_sync_stop_link_train(state, encoder, crtc_state); 3066 } 3067 3068 static i915_reg_t 3069 gen9_chicken_trans_reg_by_port(struct drm_i915_private *dev_priv, 3070 enum port port) 3071 { 3072 static const enum transcoder trans[] = { 3073 [PORT_A] = TRANSCODER_EDP, 3074 [PORT_B] = TRANSCODER_A, 3075 [PORT_C] = TRANSCODER_B, 3076 [PORT_D] = TRANSCODER_C, 3077 [PORT_E] = TRANSCODER_A, 3078 }; 3079 3080 drm_WARN_ON(&dev_priv->drm, DISPLAY_VER(dev_priv) < 9); 3081 3082 if (drm_WARN_ON(&dev_priv->drm, port < PORT_A || port > PORT_E)) 3083 port = PORT_A; 3084 3085 return CHICKEN_TRANS(trans[port]); 3086 } 3087 3088 static void intel_enable_ddi_hdmi(struct intel_atomic_state *state, 3089 struct intel_encoder *encoder, 3090 const struct intel_crtc_state *crtc_state, 3091 const struct drm_connector_state *conn_state) 3092 { 3093 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 3094 struct intel_digital_port *dig_port = enc_to_dig_port(encoder); 3095 struct drm_connector *connector = conn_state->connector; 3096 int level = intel_ddi_hdmi_level(encoder, crtc_state); 3097 enum port port = encoder->port; 3098 3099 if (!intel_hdmi_handle_sink_scrambling(encoder, connector, 3100 crtc_state->hdmi_high_tmds_clock_ratio, 3101 crtc_state->hdmi_scrambling)) 3102 drm_dbg_kms(&dev_priv->drm, 3103 "[CONNECTOR:%d:%s] Failed to configure sink scrambling/TMDS bit clock ratio\n", 3104 connector->base.id, connector->name); 3105 3106 if (DISPLAY_VER(dev_priv) >= 12) 3107 tgl_ddi_vswing_sequence(encoder, crtc_state, level); 3108 else if (DISPLAY_VER(dev_priv) == 11) 3109 icl_ddi_vswing_sequence(encoder, crtc_state, level); 3110 else if (IS_CANNONLAKE(dev_priv)) 3111 cnl_ddi_vswing_sequence(encoder, crtc_state, level); 3112 else if (IS_GEMINILAKE(dev_priv) || IS_BROXTON(dev_priv)) 3113 bxt_ddi_vswing_sequence(encoder, crtc_state, level); 3114 else 3115 intel_prepare_hdmi_ddi_buffers(encoder, level); 3116 3117 if (DISPLAY_VER(dev_priv) == 9 && !IS_BROXTON(dev_priv)) 3118 skl_ddi_set_iboost(encoder, crtc_state, level); 3119 3120 /* Display WA #1143: skl,kbl,cfl */ 3121 if (DISPLAY_VER(dev_priv) == 9 && !IS_BROXTON(dev_priv)) { 3122 /* 3123 * For some reason these chicken bits have been 3124 * stuffed into a transcoder register, event though 3125 * the bits affect a specific DDI port rather than 3126 * a specific transcoder. 3127 */ 3128 i915_reg_t reg = gen9_chicken_trans_reg_by_port(dev_priv, port); 3129 u32 val; 3130 3131 val = intel_de_read(dev_priv, reg); 3132 3133 if (port == PORT_E) 3134 val |= DDIE_TRAINING_OVERRIDE_ENABLE | 3135 DDIE_TRAINING_OVERRIDE_VALUE; 3136 else 3137 val |= DDI_TRAINING_OVERRIDE_ENABLE | 3138 DDI_TRAINING_OVERRIDE_VALUE; 3139 3140 intel_de_write(dev_priv, reg, val); 3141 intel_de_posting_read(dev_priv, reg); 3142 3143 udelay(1); 3144 3145 if (port == PORT_E) 3146 val &= ~(DDIE_TRAINING_OVERRIDE_ENABLE | 3147 DDIE_TRAINING_OVERRIDE_VALUE); 3148 else 3149 val &= ~(DDI_TRAINING_OVERRIDE_ENABLE | 3150 DDI_TRAINING_OVERRIDE_VALUE); 3151 3152 intel_de_write(dev_priv, reg, val); 3153 } 3154 3155 intel_ddi_power_up_lanes(encoder, crtc_state); 3156 3157 /* In HDMI/DVI mode, the port width, and swing/emphasis values 3158 * are ignored so nothing special needs to be done besides 3159 * enabling the port. 3160 */ 3161 intel_de_write(dev_priv, DDI_BUF_CTL(port), 3162 dig_port->saved_port_bits | DDI_BUF_CTL_ENABLE); 3163 3164 if (crtc_state->has_audio) 3165 intel_audio_codec_enable(encoder, crtc_state, conn_state); 3166 } 3167 3168 static void intel_enable_ddi(struct intel_atomic_state *state, 3169 struct intel_encoder *encoder, 3170 const struct intel_crtc_state *crtc_state, 3171 const struct drm_connector_state *conn_state) 3172 { 3173 drm_WARN_ON(state->base.dev, crtc_state->has_pch_encoder); 3174 3175 if (!crtc_state->bigjoiner_slave) 3176 intel_ddi_enable_transcoder_func(encoder, crtc_state); 3177 3178 intel_vrr_enable(encoder, crtc_state); 3179 3180 intel_enable_pipe(crtc_state); 3181 3182 intel_crtc_vblank_on(crtc_state); 3183 3184 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI)) 3185 intel_enable_ddi_hdmi(state, encoder, crtc_state, conn_state); 3186 else 3187 intel_enable_ddi_dp(state, encoder, crtc_state, conn_state); 3188 3189 /* Enable hdcp if it's desired */ 3190 if (conn_state->content_protection == 3191 DRM_MODE_CONTENT_PROTECTION_DESIRED) 3192 intel_hdcp_enable(to_intel_connector(conn_state->connector), 3193 crtc_state, 3194 (u8)conn_state->hdcp_content_type); 3195 } 3196 3197 static void intel_disable_ddi_dp(struct intel_atomic_state *state, 3198 struct intel_encoder *encoder, 3199 const struct intel_crtc_state *old_crtc_state, 3200 const struct drm_connector_state *old_conn_state) 3201 { 3202 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 3203 3204 intel_dp->link_trained = false; 3205 3206 if (old_crtc_state->has_audio) 3207 intel_audio_codec_disable(encoder, 3208 old_crtc_state, old_conn_state); 3209 3210 intel_edp_drrs_disable(intel_dp, old_crtc_state); 3211 intel_psr_disable(intel_dp, old_crtc_state); 3212 intel_edp_backlight_off(old_conn_state); 3213 /* Disable the decompression in DP Sink */ 3214 intel_dp_sink_set_decompression_state(intel_dp, old_crtc_state, 3215 false); 3216 /* Disable Ignore_MSA bit in DP Sink */ 3217 intel_dp_sink_set_msa_timing_par_ignore_state(intel_dp, old_crtc_state, 3218 false); 3219 } 3220 3221 static void intel_disable_ddi_hdmi(struct intel_atomic_state *state, 3222 struct intel_encoder *encoder, 3223 const struct intel_crtc_state *old_crtc_state, 3224 const struct drm_connector_state *old_conn_state) 3225 { 3226 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 3227 struct drm_connector *connector = old_conn_state->connector; 3228 3229 if (old_crtc_state->has_audio) 3230 intel_audio_codec_disable(encoder, 3231 old_crtc_state, old_conn_state); 3232 3233 if (!intel_hdmi_handle_sink_scrambling(encoder, connector, 3234 false, false)) 3235 drm_dbg_kms(&i915->drm, 3236 "[CONNECTOR:%d:%s] Failed to reset sink scrambling/TMDS bit clock ratio\n", 3237 connector->base.id, connector->name); 3238 } 3239 3240 static void intel_disable_ddi(struct intel_atomic_state *state, 3241 struct intel_encoder *encoder, 3242 const struct intel_crtc_state *old_crtc_state, 3243 const struct drm_connector_state *old_conn_state) 3244 { 3245 intel_hdcp_disable(to_intel_connector(old_conn_state->connector)); 3246 3247 if (intel_crtc_has_type(old_crtc_state, INTEL_OUTPUT_HDMI)) 3248 intel_disable_ddi_hdmi(state, encoder, old_crtc_state, 3249 old_conn_state); 3250 else 3251 intel_disable_ddi_dp(state, encoder, old_crtc_state, 3252 old_conn_state); 3253 } 3254 3255 static void intel_ddi_update_pipe_dp(struct intel_atomic_state *state, 3256 struct intel_encoder *encoder, 3257 const struct intel_crtc_state *crtc_state, 3258 const struct drm_connector_state *conn_state) 3259 { 3260 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 3261 3262 intel_ddi_set_dp_msa(crtc_state, conn_state); 3263 3264 intel_psr_update(intel_dp, crtc_state, conn_state); 3265 intel_dp_set_infoframes(encoder, true, crtc_state, conn_state); 3266 intel_edp_drrs_update(intel_dp, crtc_state); 3267 3268 intel_panel_update_backlight(state, encoder, crtc_state, conn_state); 3269 } 3270 3271 void intel_ddi_update_pipe(struct intel_atomic_state *state, 3272 struct intel_encoder *encoder, 3273 const struct intel_crtc_state *crtc_state, 3274 const struct drm_connector_state *conn_state) 3275 { 3276 3277 if (!intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI) && 3278 !intel_encoder_is_mst(encoder)) 3279 intel_ddi_update_pipe_dp(state, encoder, crtc_state, 3280 conn_state); 3281 3282 intel_hdcp_update_pipe(state, encoder, crtc_state, conn_state); 3283 } 3284 3285 static void 3286 intel_ddi_update_prepare(struct intel_atomic_state *state, 3287 struct intel_encoder *encoder, 3288 struct intel_crtc *crtc) 3289 { 3290 struct intel_crtc_state *crtc_state = 3291 crtc ? intel_atomic_get_new_crtc_state(state, crtc) : NULL; 3292 int required_lanes = crtc_state ? crtc_state->lane_count : 1; 3293 3294 drm_WARN_ON(state->base.dev, crtc && crtc->active); 3295 3296 intel_tc_port_get_link(enc_to_dig_port(encoder), 3297 required_lanes); 3298 if (crtc_state && crtc_state->hw.active) 3299 intel_update_active_dpll(state, crtc, encoder); 3300 } 3301 3302 static void 3303 intel_ddi_update_complete(struct intel_atomic_state *state, 3304 struct intel_encoder *encoder, 3305 struct intel_crtc *crtc) 3306 { 3307 intel_tc_port_put_link(enc_to_dig_port(encoder)); 3308 } 3309 3310 static void 3311 intel_ddi_pre_pll_enable(struct intel_atomic_state *state, 3312 struct intel_encoder *encoder, 3313 const struct intel_crtc_state *crtc_state, 3314 const struct drm_connector_state *conn_state) 3315 { 3316 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 3317 struct intel_digital_port *dig_port = enc_to_dig_port(encoder); 3318 enum phy phy = intel_port_to_phy(dev_priv, encoder->port); 3319 bool is_tc_port = intel_phy_is_tc(dev_priv, phy); 3320 3321 if (is_tc_port) 3322 intel_tc_port_get_link(dig_port, crtc_state->lane_count); 3323 3324 if (intel_crtc_has_dp_encoder(crtc_state) || is_tc_port) { 3325 drm_WARN_ON(&dev_priv->drm, dig_port->aux_wakeref); 3326 dig_port->aux_wakeref = 3327 intel_display_power_get(dev_priv, 3328 intel_ddi_main_link_aux_domain(dig_port)); 3329 } 3330 3331 if (is_tc_port && dig_port->tc_mode != TC_PORT_TBT_ALT) 3332 /* 3333 * Program the lane count for static/dynamic connections on 3334 * Type-C ports. Skip this step for TBT. 3335 */ 3336 intel_tc_port_set_fia_lane_count(dig_port, crtc_state->lane_count); 3337 else if (IS_GEMINILAKE(dev_priv) || IS_BROXTON(dev_priv)) 3338 bxt_ddi_phy_set_lane_optim_mask(encoder, 3339 crtc_state->lane_lat_optim_mask); 3340 } 3341 3342 static void intel_ddi_prepare_link_retrain(struct intel_dp *intel_dp, 3343 const struct intel_crtc_state *crtc_state) 3344 { 3345 struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base; 3346 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 3347 enum port port = encoder->port; 3348 u32 dp_tp_ctl, ddi_buf_ctl; 3349 bool wait = false; 3350 3351 dp_tp_ctl = intel_de_read(dev_priv, dp_tp_ctl_reg(encoder, crtc_state)); 3352 3353 if (dp_tp_ctl & DP_TP_CTL_ENABLE) { 3354 ddi_buf_ctl = intel_de_read(dev_priv, DDI_BUF_CTL(port)); 3355 if (ddi_buf_ctl & DDI_BUF_CTL_ENABLE) { 3356 intel_de_write(dev_priv, DDI_BUF_CTL(port), 3357 ddi_buf_ctl & ~DDI_BUF_CTL_ENABLE); 3358 wait = true; 3359 } 3360 3361 dp_tp_ctl &= ~(DP_TP_CTL_ENABLE | DP_TP_CTL_LINK_TRAIN_MASK); 3362 dp_tp_ctl |= DP_TP_CTL_LINK_TRAIN_PAT1; 3363 intel_de_write(dev_priv, dp_tp_ctl_reg(encoder, crtc_state), dp_tp_ctl); 3364 intel_de_posting_read(dev_priv, dp_tp_ctl_reg(encoder, crtc_state)); 3365 3366 if (wait) 3367 intel_wait_ddi_buf_idle(dev_priv, port); 3368 } 3369 3370 dp_tp_ctl = DP_TP_CTL_ENABLE | DP_TP_CTL_LINK_TRAIN_PAT1; 3371 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_DP_MST)) { 3372 dp_tp_ctl |= DP_TP_CTL_MODE_MST; 3373 } else { 3374 dp_tp_ctl |= DP_TP_CTL_MODE_SST; 3375 if (drm_dp_enhanced_frame_cap(intel_dp->dpcd)) 3376 dp_tp_ctl |= DP_TP_CTL_ENHANCED_FRAME_ENABLE; 3377 } 3378 intel_de_write(dev_priv, dp_tp_ctl_reg(encoder, crtc_state), dp_tp_ctl); 3379 intel_de_posting_read(dev_priv, dp_tp_ctl_reg(encoder, crtc_state)); 3380 3381 intel_dp->DP |= DDI_BUF_CTL_ENABLE; 3382 intel_de_write(dev_priv, DDI_BUF_CTL(port), intel_dp->DP); 3383 intel_de_posting_read(dev_priv, DDI_BUF_CTL(port)); 3384 3385 intel_wait_ddi_buf_active(dev_priv, port); 3386 } 3387 3388 static void intel_ddi_set_link_train(struct intel_dp *intel_dp, 3389 const struct intel_crtc_state *crtc_state, 3390 u8 dp_train_pat) 3391 { 3392 struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base; 3393 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 3394 u32 temp; 3395 3396 temp = intel_de_read(dev_priv, dp_tp_ctl_reg(encoder, crtc_state)); 3397 3398 temp &= ~DP_TP_CTL_LINK_TRAIN_MASK; 3399 switch (intel_dp_training_pattern_symbol(dp_train_pat)) { 3400 case DP_TRAINING_PATTERN_DISABLE: 3401 temp |= DP_TP_CTL_LINK_TRAIN_NORMAL; 3402 break; 3403 case DP_TRAINING_PATTERN_1: 3404 temp |= DP_TP_CTL_LINK_TRAIN_PAT1; 3405 break; 3406 case DP_TRAINING_PATTERN_2: 3407 temp |= DP_TP_CTL_LINK_TRAIN_PAT2; 3408 break; 3409 case DP_TRAINING_PATTERN_3: 3410 temp |= DP_TP_CTL_LINK_TRAIN_PAT3; 3411 break; 3412 case DP_TRAINING_PATTERN_4: 3413 temp |= DP_TP_CTL_LINK_TRAIN_PAT4; 3414 break; 3415 } 3416 3417 intel_de_write(dev_priv, dp_tp_ctl_reg(encoder, crtc_state), temp); 3418 } 3419 3420 static void intel_ddi_set_idle_link_train(struct intel_dp *intel_dp, 3421 const struct intel_crtc_state *crtc_state) 3422 { 3423 struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base; 3424 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 3425 enum port port = encoder->port; 3426 u32 val; 3427 3428 val = intel_de_read(dev_priv, dp_tp_ctl_reg(encoder, crtc_state)); 3429 val &= ~DP_TP_CTL_LINK_TRAIN_MASK; 3430 val |= DP_TP_CTL_LINK_TRAIN_IDLE; 3431 intel_de_write(dev_priv, dp_tp_ctl_reg(encoder, crtc_state), val); 3432 3433 /* 3434 * Until TGL on PORT_A we can have only eDP in SST mode. There the only 3435 * reason we need to set idle transmission mode is to work around a HW 3436 * issue where we enable the pipe while not in idle link-training mode. 3437 * In this case there is requirement to wait for a minimum number of 3438 * idle patterns to be sent. 3439 */ 3440 if (port == PORT_A && DISPLAY_VER(dev_priv) < 12) 3441 return; 3442 3443 if (intel_de_wait_for_set(dev_priv, 3444 dp_tp_status_reg(encoder, crtc_state), 3445 DP_TP_STATUS_IDLE_DONE, 1)) 3446 drm_err(&dev_priv->drm, 3447 "Timed out waiting for DP idle patterns\n"); 3448 } 3449 3450 static bool intel_ddi_is_audio_enabled(struct drm_i915_private *dev_priv, 3451 enum transcoder cpu_transcoder) 3452 { 3453 if (cpu_transcoder == TRANSCODER_EDP) 3454 return false; 3455 3456 if (!intel_display_power_is_enabled(dev_priv, POWER_DOMAIN_AUDIO)) 3457 return false; 3458 3459 return intel_de_read(dev_priv, HSW_AUD_PIN_ELD_CP_VLD) & 3460 AUDIO_OUTPUT_ENABLE(cpu_transcoder); 3461 } 3462 3463 void intel_ddi_compute_min_voltage_level(struct drm_i915_private *dev_priv, 3464 struct intel_crtc_state *crtc_state) 3465 { 3466 if (DISPLAY_VER(dev_priv) >= 12 && crtc_state->port_clock > 594000) 3467 crtc_state->min_voltage_level = 2; 3468 else if (IS_JSL_EHL(dev_priv) && crtc_state->port_clock > 594000) 3469 crtc_state->min_voltage_level = 3; 3470 else if (DISPLAY_VER(dev_priv) >= 11 && crtc_state->port_clock > 594000) 3471 crtc_state->min_voltage_level = 1; 3472 else if (IS_CANNONLAKE(dev_priv) && crtc_state->port_clock > 594000) 3473 crtc_state->min_voltage_level = 2; 3474 } 3475 3476 static enum transcoder bdw_transcoder_master_readout(struct drm_i915_private *dev_priv, 3477 enum transcoder cpu_transcoder) 3478 { 3479 u32 master_select; 3480 3481 if (DISPLAY_VER(dev_priv) >= 11) { 3482 u32 ctl2 = intel_de_read(dev_priv, TRANS_DDI_FUNC_CTL2(cpu_transcoder)); 3483 3484 if ((ctl2 & PORT_SYNC_MODE_ENABLE) == 0) 3485 return INVALID_TRANSCODER; 3486 3487 master_select = REG_FIELD_GET(PORT_SYNC_MODE_MASTER_SELECT_MASK, ctl2); 3488 } else { 3489 u32 ctl = intel_de_read(dev_priv, TRANS_DDI_FUNC_CTL(cpu_transcoder)); 3490 3491 if ((ctl & TRANS_DDI_PORT_SYNC_ENABLE) == 0) 3492 return INVALID_TRANSCODER; 3493 3494 master_select = REG_FIELD_GET(TRANS_DDI_PORT_SYNC_MASTER_SELECT_MASK, ctl); 3495 } 3496 3497 if (master_select == 0) 3498 return TRANSCODER_EDP; 3499 else 3500 return master_select - 1; 3501 } 3502 3503 static void bdw_get_trans_port_sync_config(struct intel_crtc_state *crtc_state) 3504 { 3505 struct drm_i915_private *dev_priv = to_i915(crtc_state->uapi.crtc->dev); 3506 u32 transcoders = BIT(TRANSCODER_A) | BIT(TRANSCODER_B) | 3507 BIT(TRANSCODER_C) | BIT(TRANSCODER_D); 3508 enum transcoder cpu_transcoder; 3509 3510 crtc_state->master_transcoder = 3511 bdw_transcoder_master_readout(dev_priv, crtc_state->cpu_transcoder); 3512 3513 for_each_cpu_transcoder_masked(dev_priv, cpu_transcoder, transcoders) { 3514 enum intel_display_power_domain power_domain; 3515 intel_wakeref_t trans_wakeref; 3516 3517 power_domain = POWER_DOMAIN_TRANSCODER(cpu_transcoder); 3518 trans_wakeref = intel_display_power_get_if_enabled(dev_priv, 3519 power_domain); 3520 3521 if (!trans_wakeref) 3522 continue; 3523 3524 if (bdw_transcoder_master_readout(dev_priv, cpu_transcoder) == 3525 crtc_state->cpu_transcoder) 3526 crtc_state->sync_mode_slaves_mask |= BIT(cpu_transcoder); 3527 3528 intel_display_power_put(dev_priv, power_domain, trans_wakeref); 3529 } 3530 3531 drm_WARN_ON(&dev_priv->drm, 3532 crtc_state->master_transcoder != INVALID_TRANSCODER && 3533 crtc_state->sync_mode_slaves_mask); 3534 } 3535 3536 static void intel_ddi_read_func_ctl(struct intel_encoder *encoder, 3537 struct intel_crtc_state *pipe_config) 3538 { 3539 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 3540 struct intel_crtc *intel_crtc = to_intel_crtc(pipe_config->uapi.crtc); 3541 enum transcoder cpu_transcoder = pipe_config->cpu_transcoder; 3542 struct intel_digital_port *dig_port = enc_to_dig_port(encoder); 3543 u32 temp, flags = 0; 3544 3545 temp = intel_de_read(dev_priv, TRANS_DDI_FUNC_CTL(cpu_transcoder)); 3546 if (temp & TRANS_DDI_PHSYNC) 3547 flags |= DRM_MODE_FLAG_PHSYNC; 3548 else 3549 flags |= DRM_MODE_FLAG_NHSYNC; 3550 if (temp & TRANS_DDI_PVSYNC) 3551 flags |= DRM_MODE_FLAG_PVSYNC; 3552 else 3553 flags |= DRM_MODE_FLAG_NVSYNC; 3554 3555 pipe_config->hw.adjusted_mode.flags |= flags; 3556 3557 switch (temp & TRANS_DDI_BPC_MASK) { 3558 case TRANS_DDI_BPC_6: 3559 pipe_config->pipe_bpp = 18; 3560 break; 3561 case TRANS_DDI_BPC_8: 3562 pipe_config->pipe_bpp = 24; 3563 break; 3564 case TRANS_DDI_BPC_10: 3565 pipe_config->pipe_bpp = 30; 3566 break; 3567 case TRANS_DDI_BPC_12: 3568 pipe_config->pipe_bpp = 36; 3569 break; 3570 default: 3571 break; 3572 } 3573 3574 switch (temp & TRANS_DDI_MODE_SELECT_MASK) { 3575 case TRANS_DDI_MODE_SELECT_HDMI: 3576 pipe_config->has_hdmi_sink = true; 3577 3578 pipe_config->infoframes.enable |= 3579 intel_hdmi_infoframes_enabled(encoder, pipe_config); 3580 3581 if (pipe_config->infoframes.enable) 3582 pipe_config->has_infoframe = true; 3583 3584 if (temp & TRANS_DDI_HDMI_SCRAMBLING) 3585 pipe_config->hdmi_scrambling = true; 3586 if (temp & TRANS_DDI_HIGH_TMDS_CHAR_RATE) 3587 pipe_config->hdmi_high_tmds_clock_ratio = true; 3588 fallthrough; 3589 case TRANS_DDI_MODE_SELECT_DVI: 3590 pipe_config->output_types |= BIT(INTEL_OUTPUT_HDMI); 3591 pipe_config->lane_count = 4; 3592 break; 3593 case TRANS_DDI_MODE_SELECT_FDI: 3594 pipe_config->output_types |= BIT(INTEL_OUTPUT_ANALOG); 3595 break; 3596 case TRANS_DDI_MODE_SELECT_DP_SST: 3597 if (encoder->type == INTEL_OUTPUT_EDP) 3598 pipe_config->output_types |= BIT(INTEL_OUTPUT_EDP); 3599 else 3600 pipe_config->output_types |= BIT(INTEL_OUTPUT_DP); 3601 pipe_config->lane_count = 3602 ((temp & DDI_PORT_WIDTH_MASK) >> DDI_PORT_WIDTH_SHIFT) + 1; 3603 intel_dp_get_m_n(intel_crtc, pipe_config); 3604 3605 if (DISPLAY_VER(dev_priv) >= 11) { 3606 i915_reg_t dp_tp_ctl = dp_tp_ctl_reg(encoder, pipe_config); 3607 3608 pipe_config->fec_enable = 3609 intel_de_read(dev_priv, dp_tp_ctl) & DP_TP_CTL_FEC_ENABLE; 3610 3611 drm_dbg_kms(&dev_priv->drm, 3612 "[ENCODER:%d:%s] Fec status: %u\n", 3613 encoder->base.base.id, encoder->base.name, 3614 pipe_config->fec_enable); 3615 } 3616 3617 if (dig_port->lspcon.active && dig_port->dp.has_hdmi_sink) 3618 pipe_config->infoframes.enable |= 3619 intel_lspcon_infoframes_enabled(encoder, pipe_config); 3620 else 3621 pipe_config->infoframes.enable |= 3622 intel_hdmi_infoframes_enabled(encoder, pipe_config); 3623 break; 3624 case TRANS_DDI_MODE_SELECT_DP_MST: 3625 pipe_config->output_types |= BIT(INTEL_OUTPUT_DP_MST); 3626 pipe_config->lane_count = 3627 ((temp & DDI_PORT_WIDTH_MASK) >> DDI_PORT_WIDTH_SHIFT) + 1; 3628 3629 if (DISPLAY_VER(dev_priv) >= 12) 3630 pipe_config->mst_master_transcoder = 3631 REG_FIELD_GET(TRANS_DDI_MST_TRANSPORT_SELECT_MASK, temp); 3632 3633 intel_dp_get_m_n(intel_crtc, pipe_config); 3634 3635 pipe_config->infoframes.enable |= 3636 intel_hdmi_infoframes_enabled(encoder, pipe_config); 3637 break; 3638 default: 3639 break; 3640 } 3641 } 3642 3643 static void intel_ddi_get_config(struct intel_encoder *encoder, 3644 struct intel_crtc_state *pipe_config) 3645 { 3646 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 3647 enum transcoder cpu_transcoder = pipe_config->cpu_transcoder; 3648 3649 /* XXX: DSI transcoder paranoia */ 3650 if (drm_WARN_ON(&dev_priv->drm, transcoder_is_dsi(cpu_transcoder))) 3651 return; 3652 3653 if (pipe_config->bigjoiner_slave) { 3654 /* read out pipe settings from master */ 3655 enum transcoder save = pipe_config->cpu_transcoder; 3656 3657 /* Our own transcoder needs to be disabled when reading it in intel_ddi_read_func_ctl() */ 3658 WARN_ON(pipe_config->output_types); 3659 pipe_config->cpu_transcoder = (enum transcoder)pipe_config->bigjoiner_linked_crtc->pipe; 3660 intel_ddi_read_func_ctl(encoder, pipe_config); 3661 pipe_config->cpu_transcoder = save; 3662 } else { 3663 intel_ddi_read_func_ctl(encoder, pipe_config); 3664 } 3665 3666 intel_ddi_mso_get_config(encoder, pipe_config); 3667 3668 pipe_config->has_audio = 3669 intel_ddi_is_audio_enabled(dev_priv, cpu_transcoder); 3670 3671 if (encoder->type == INTEL_OUTPUT_EDP && dev_priv->vbt.edp.bpp && 3672 pipe_config->pipe_bpp > dev_priv->vbt.edp.bpp) { 3673 /* 3674 * This is a big fat ugly hack. 3675 * 3676 * Some machines in UEFI boot mode provide us a VBT that has 18 3677 * bpp and 1.62 GHz link bandwidth for eDP, which for reasons 3678 * unknown we fail to light up. Yet the same BIOS boots up with 3679 * 24 bpp and 2.7 GHz link. Use the same bpp as the BIOS uses as 3680 * max, not what it tells us to use. 3681 * 3682 * Note: This will still be broken if the eDP panel is not lit 3683 * up by the BIOS, and thus we can't get the mode at module 3684 * load. 3685 */ 3686 drm_dbg_kms(&dev_priv->drm, 3687 "pipe has %d bpp for eDP panel, overriding BIOS-provided max %d bpp\n", 3688 pipe_config->pipe_bpp, dev_priv->vbt.edp.bpp); 3689 dev_priv->vbt.edp.bpp = pipe_config->pipe_bpp; 3690 } 3691 3692 if (!pipe_config->bigjoiner_slave) 3693 ddi_dotclock_get(pipe_config); 3694 3695 if (IS_GEMINILAKE(dev_priv) || IS_BROXTON(dev_priv)) 3696 pipe_config->lane_lat_optim_mask = 3697 bxt_ddi_phy_get_lane_lat_optim_mask(encoder); 3698 3699 intel_ddi_compute_min_voltage_level(dev_priv, pipe_config); 3700 3701 intel_hdmi_read_gcp_infoframe(encoder, pipe_config); 3702 3703 intel_read_infoframe(encoder, pipe_config, 3704 HDMI_INFOFRAME_TYPE_AVI, 3705 &pipe_config->infoframes.avi); 3706 intel_read_infoframe(encoder, pipe_config, 3707 HDMI_INFOFRAME_TYPE_SPD, 3708 &pipe_config->infoframes.spd); 3709 intel_read_infoframe(encoder, pipe_config, 3710 HDMI_INFOFRAME_TYPE_VENDOR, 3711 &pipe_config->infoframes.hdmi); 3712 intel_read_infoframe(encoder, pipe_config, 3713 HDMI_INFOFRAME_TYPE_DRM, 3714 &pipe_config->infoframes.drm); 3715 3716 if (DISPLAY_VER(dev_priv) >= 8) 3717 bdw_get_trans_port_sync_config(pipe_config); 3718 3719 intel_read_dp_sdp(encoder, pipe_config, HDMI_PACKET_TYPE_GAMUT_METADATA); 3720 intel_read_dp_sdp(encoder, pipe_config, DP_SDP_VSC); 3721 3722 intel_psr_get_config(encoder, pipe_config); 3723 } 3724 3725 void intel_ddi_get_clock(struct intel_encoder *encoder, 3726 struct intel_crtc_state *crtc_state, 3727 struct intel_shared_dpll *pll) 3728 { 3729 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 3730 enum icl_port_dpll_id port_dpll_id = ICL_PORT_DPLL_DEFAULT; 3731 struct icl_port_dpll *port_dpll = &crtc_state->icl_port_dplls[port_dpll_id]; 3732 bool pll_active; 3733 3734 if (drm_WARN_ON(&i915->drm, !pll)) 3735 return; 3736 3737 port_dpll->pll = pll; 3738 pll_active = intel_dpll_get_hw_state(i915, pll, &port_dpll->hw_state); 3739 drm_WARN_ON(&i915->drm, !pll_active); 3740 3741 icl_set_active_port_dpll(crtc_state, port_dpll_id); 3742 3743 crtc_state->port_clock = intel_dpll_get_freq(i915, crtc_state->shared_dpll, 3744 &crtc_state->dpll_hw_state); 3745 } 3746 3747 static void adls_ddi_get_config(struct intel_encoder *encoder, 3748 struct intel_crtc_state *crtc_state) 3749 { 3750 intel_ddi_get_clock(encoder, crtc_state, adls_ddi_get_pll(encoder)); 3751 intel_ddi_get_config(encoder, crtc_state); 3752 } 3753 3754 static void rkl_ddi_get_config(struct intel_encoder *encoder, 3755 struct intel_crtc_state *crtc_state) 3756 { 3757 intel_ddi_get_clock(encoder, crtc_state, rkl_ddi_get_pll(encoder)); 3758 intel_ddi_get_config(encoder, crtc_state); 3759 } 3760 3761 static void dg1_ddi_get_config(struct intel_encoder *encoder, 3762 struct intel_crtc_state *crtc_state) 3763 { 3764 intel_ddi_get_clock(encoder, crtc_state, dg1_ddi_get_pll(encoder)); 3765 intel_ddi_get_config(encoder, crtc_state); 3766 } 3767 3768 static void icl_ddi_combo_get_config(struct intel_encoder *encoder, 3769 struct intel_crtc_state *crtc_state) 3770 { 3771 intel_ddi_get_clock(encoder, crtc_state, icl_ddi_combo_get_pll(encoder)); 3772 intel_ddi_get_config(encoder, crtc_state); 3773 } 3774 3775 static void icl_ddi_tc_get_clock(struct intel_encoder *encoder, 3776 struct intel_crtc_state *crtc_state, 3777 struct intel_shared_dpll *pll) 3778 { 3779 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 3780 enum icl_port_dpll_id port_dpll_id; 3781 struct icl_port_dpll *port_dpll; 3782 bool pll_active; 3783 3784 if (drm_WARN_ON(&i915->drm, !pll)) 3785 return; 3786 3787 if (intel_get_shared_dpll_id(i915, pll) == DPLL_ID_ICL_TBTPLL) 3788 port_dpll_id = ICL_PORT_DPLL_DEFAULT; 3789 else 3790 port_dpll_id = ICL_PORT_DPLL_MG_PHY; 3791 3792 port_dpll = &crtc_state->icl_port_dplls[port_dpll_id]; 3793 3794 port_dpll->pll = pll; 3795 pll_active = intel_dpll_get_hw_state(i915, pll, &port_dpll->hw_state); 3796 drm_WARN_ON(&i915->drm, !pll_active); 3797 3798 icl_set_active_port_dpll(crtc_state, port_dpll_id); 3799 3800 if (intel_get_shared_dpll_id(i915, crtc_state->shared_dpll) == DPLL_ID_ICL_TBTPLL) 3801 crtc_state->port_clock = icl_calc_tbt_pll_link(i915, encoder->port); 3802 else 3803 crtc_state->port_clock = intel_dpll_get_freq(i915, crtc_state->shared_dpll, 3804 &crtc_state->dpll_hw_state); 3805 } 3806 3807 static void icl_ddi_tc_get_config(struct intel_encoder *encoder, 3808 struct intel_crtc_state *crtc_state) 3809 { 3810 icl_ddi_tc_get_clock(encoder, crtc_state, icl_ddi_tc_get_pll(encoder)); 3811 intel_ddi_get_config(encoder, crtc_state); 3812 } 3813 3814 static void cnl_ddi_get_config(struct intel_encoder *encoder, 3815 struct intel_crtc_state *crtc_state) 3816 { 3817 intel_ddi_get_clock(encoder, crtc_state, cnl_ddi_get_pll(encoder)); 3818 intel_ddi_get_config(encoder, crtc_state); 3819 } 3820 3821 static void bxt_ddi_get_config(struct intel_encoder *encoder, 3822 struct intel_crtc_state *crtc_state) 3823 { 3824 intel_ddi_get_clock(encoder, crtc_state, bxt_ddi_get_pll(encoder)); 3825 intel_ddi_get_config(encoder, crtc_state); 3826 } 3827 3828 static void skl_ddi_get_config(struct intel_encoder *encoder, 3829 struct intel_crtc_state *crtc_state) 3830 { 3831 intel_ddi_get_clock(encoder, crtc_state, skl_ddi_get_pll(encoder)); 3832 intel_ddi_get_config(encoder, crtc_state); 3833 } 3834 3835 void hsw_ddi_get_config(struct intel_encoder *encoder, 3836 struct intel_crtc_state *crtc_state) 3837 { 3838 intel_ddi_get_clock(encoder, crtc_state, hsw_ddi_get_pll(encoder)); 3839 intel_ddi_get_config(encoder, crtc_state); 3840 } 3841 3842 static void intel_ddi_sync_state(struct intel_encoder *encoder, 3843 const struct intel_crtc_state *crtc_state) 3844 { 3845 if (intel_crtc_has_dp_encoder(crtc_state)) 3846 intel_dp_sync_state(encoder, crtc_state); 3847 } 3848 3849 static bool intel_ddi_initial_fastset_check(struct intel_encoder *encoder, 3850 struct intel_crtc_state *crtc_state) 3851 { 3852 if (intel_crtc_has_dp_encoder(crtc_state)) 3853 return intel_dp_initial_fastset_check(encoder, crtc_state); 3854 3855 return true; 3856 } 3857 3858 static enum intel_output_type 3859 intel_ddi_compute_output_type(struct intel_encoder *encoder, 3860 struct intel_crtc_state *crtc_state, 3861 struct drm_connector_state *conn_state) 3862 { 3863 switch (conn_state->connector->connector_type) { 3864 case DRM_MODE_CONNECTOR_HDMIA: 3865 return INTEL_OUTPUT_HDMI; 3866 case DRM_MODE_CONNECTOR_eDP: 3867 return INTEL_OUTPUT_EDP; 3868 case DRM_MODE_CONNECTOR_DisplayPort: 3869 return INTEL_OUTPUT_DP; 3870 default: 3871 MISSING_CASE(conn_state->connector->connector_type); 3872 return INTEL_OUTPUT_UNUSED; 3873 } 3874 } 3875 3876 static int intel_ddi_compute_config(struct intel_encoder *encoder, 3877 struct intel_crtc_state *pipe_config, 3878 struct drm_connector_state *conn_state) 3879 { 3880 struct intel_crtc *crtc = to_intel_crtc(pipe_config->uapi.crtc); 3881 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 3882 enum port port = encoder->port; 3883 int ret; 3884 3885 if (HAS_TRANSCODER(dev_priv, TRANSCODER_EDP) && port == PORT_A) 3886 pipe_config->cpu_transcoder = TRANSCODER_EDP; 3887 3888 if (intel_crtc_has_type(pipe_config, INTEL_OUTPUT_HDMI)) { 3889 ret = intel_hdmi_compute_config(encoder, pipe_config, conn_state); 3890 } else { 3891 ret = intel_dp_compute_config(encoder, pipe_config, conn_state); 3892 } 3893 3894 if (ret) 3895 return ret; 3896 3897 if (IS_HASWELL(dev_priv) && crtc->pipe == PIPE_A && 3898 pipe_config->cpu_transcoder == TRANSCODER_EDP) 3899 pipe_config->pch_pfit.force_thru = 3900 pipe_config->pch_pfit.enabled || 3901 pipe_config->crc_enabled; 3902 3903 if (IS_GEMINILAKE(dev_priv) || IS_BROXTON(dev_priv)) 3904 pipe_config->lane_lat_optim_mask = 3905 bxt_ddi_phy_calc_lane_lat_optim_mask(pipe_config->lane_count); 3906 3907 intel_ddi_compute_min_voltage_level(dev_priv, pipe_config); 3908 3909 return 0; 3910 } 3911 3912 static bool mode_equal(const struct drm_display_mode *mode1, 3913 const struct drm_display_mode *mode2) 3914 { 3915 return drm_mode_match(mode1, mode2, 3916 DRM_MODE_MATCH_TIMINGS | 3917 DRM_MODE_MATCH_FLAGS | 3918 DRM_MODE_MATCH_3D_FLAGS) && 3919 mode1->clock == mode2->clock; /* we want an exact match */ 3920 } 3921 3922 static bool m_n_equal(const struct intel_link_m_n *m_n_1, 3923 const struct intel_link_m_n *m_n_2) 3924 { 3925 return m_n_1->tu == m_n_2->tu && 3926 m_n_1->gmch_m == m_n_2->gmch_m && 3927 m_n_1->gmch_n == m_n_2->gmch_n && 3928 m_n_1->link_m == m_n_2->link_m && 3929 m_n_1->link_n == m_n_2->link_n; 3930 } 3931 3932 static bool crtcs_port_sync_compatible(const struct intel_crtc_state *crtc_state1, 3933 const struct intel_crtc_state *crtc_state2) 3934 { 3935 return crtc_state1->hw.active && crtc_state2->hw.active && 3936 crtc_state1->output_types == crtc_state2->output_types && 3937 crtc_state1->output_format == crtc_state2->output_format && 3938 crtc_state1->lane_count == crtc_state2->lane_count && 3939 crtc_state1->port_clock == crtc_state2->port_clock && 3940 mode_equal(&crtc_state1->hw.adjusted_mode, 3941 &crtc_state2->hw.adjusted_mode) && 3942 m_n_equal(&crtc_state1->dp_m_n, &crtc_state2->dp_m_n); 3943 } 3944 3945 static u8 3946 intel_ddi_port_sync_transcoders(const struct intel_crtc_state *ref_crtc_state, 3947 int tile_group_id) 3948 { 3949 struct drm_connector *connector; 3950 const struct drm_connector_state *conn_state; 3951 struct drm_i915_private *dev_priv = to_i915(ref_crtc_state->uapi.crtc->dev); 3952 struct intel_atomic_state *state = 3953 to_intel_atomic_state(ref_crtc_state->uapi.state); 3954 u8 transcoders = 0; 3955 int i; 3956 3957 /* 3958 * We don't enable port sync on BDW due to missing w/as and 3959 * due to not having adjusted the modeset sequence appropriately. 3960 */ 3961 if (DISPLAY_VER(dev_priv) < 9) 3962 return 0; 3963 3964 if (!intel_crtc_has_type(ref_crtc_state, INTEL_OUTPUT_DP)) 3965 return 0; 3966 3967 for_each_new_connector_in_state(&state->base, connector, conn_state, i) { 3968 struct intel_crtc *crtc = to_intel_crtc(conn_state->crtc); 3969 const struct intel_crtc_state *crtc_state; 3970 3971 if (!crtc) 3972 continue; 3973 3974 if (!connector->has_tile || 3975 connector->tile_group->id != 3976 tile_group_id) 3977 continue; 3978 crtc_state = intel_atomic_get_new_crtc_state(state, 3979 crtc); 3980 if (!crtcs_port_sync_compatible(ref_crtc_state, 3981 crtc_state)) 3982 continue; 3983 transcoders |= BIT(crtc_state->cpu_transcoder); 3984 } 3985 3986 return transcoders; 3987 } 3988 3989 static int intel_ddi_compute_config_late(struct intel_encoder *encoder, 3990 struct intel_crtc_state *crtc_state, 3991 struct drm_connector_state *conn_state) 3992 { 3993 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 3994 struct drm_connector *connector = conn_state->connector; 3995 u8 port_sync_transcoders = 0; 3996 3997 drm_dbg_kms(&i915->drm, "[ENCODER:%d:%s] [CRTC:%d:%s]", 3998 encoder->base.base.id, encoder->base.name, 3999 crtc_state->uapi.crtc->base.id, crtc_state->uapi.crtc->name); 4000 4001 if (connector->has_tile) 4002 port_sync_transcoders = intel_ddi_port_sync_transcoders(crtc_state, 4003 connector->tile_group->id); 4004 4005 /* 4006 * EDP Transcoders cannot be ensalved 4007 * make them a master always when present 4008 */ 4009 if (port_sync_transcoders & BIT(TRANSCODER_EDP)) 4010 crtc_state->master_transcoder = TRANSCODER_EDP; 4011 else 4012 crtc_state->master_transcoder = ffs(port_sync_transcoders) - 1; 4013 4014 if (crtc_state->master_transcoder == crtc_state->cpu_transcoder) { 4015 crtc_state->master_transcoder = INVALID_TRANSCODER; 4016 crtc_state->sync_mode_slaves_mask = 4017 port_sync_transcoders & ~BIT(crtc_state->cpu_transcoder); 4018 } 4019 4020 return 0; 4021 } 4022 4023 static void intel_ddi_encoder_destroy(struct drm_encoder *encoder) 4024 { 4025 struct intel_digital_port *dig_port = enc_to_dig_port(to_intel_encoder(encoder)); 4026 4027 intel_dp_encoder_flush_work(encoder); 4028 4029 drm_encoder_cleanup(encoder); 4030 if (dig_port) 4031 kfree(dig_port->hdcp_port_data.streams); 4032 kfree(dig_port); 4033 } 4034 4035 static void intel_ddi_encoder_reset(struct drm_encoder *encoder) 4036 { 4037 struct intel_dp *intel_dp = enc_to_intel_dp(to_intel_encoder(encoder)); 4038 4039 intel_dp->reset_link_params = true; 4040 4041 intel_pps_encoder_reset(intel_dp); 4042 } 4043 4044 static const struct drm_encoder_funcs intel_ddi_funcs = { 4045 .reset = intel_ddi_encoder_reset, 4046 .destroy = intel_ddi_encoder_destroy, 4047 }; 4048 4049 static struct intel_connector * 4050 intel_ddi_init_dp_connector(struct intel_digital_port *dig_port) 4051 { 4052 struct drm_i915_private *dev_priv = to_i915(dig_port->base.base.dev); 4053 struct intel_connector *connector; 4054 enum port port = dig_port->base.port; 4055 4056 connector = intel_connector_alloc(); 4057 if (!connector) 4058 return NULL; 4059 4060 dig_port->dp.output_reg = DDI_BUF_CTL(port); 4061 dig_port->dp.prepare_link_retrain = intel_ddi_prepare_link_retrain; 4062 dig_port->dp.set_link_train = intel_ddi_set_link_train; 4063 dig_port->dp.set_idle_link_train = intel_ddi_set_idle_link_train; 4064 4065 if (DISPLAY_VER(dev_priv) >= 12) 4066 dig_port->dp.set_signal_levels = tgl_set_signal_levels; 4067 else if (DISPLAY_VER(dev_priv) >= 11) 4068 dig_port->dp.set_signal_levels = icl_set_signal_levels; 4069 else if (IS_CANNONLAKE(dev_priv)) 4070 dig_port->dp.set_signal_levels = cnl_set_signal_levels; 4071 else if (IS_GEMINILAKE(dev_priv) || IS_BROXTON(dev_priv)) 4072 dig_port->dp.set_signal_levels = bxt_set_signal_levels; 4073 else 4074 dig_port->dp.set_signal_levels = hsw_set_signal_levels; 4075 4076 dig_port->dp.voltage_max = intel_ddi_dp_voltage_max; 4077 dig_port->dp.preemph_max = intel_ddi_dp_preemph_max; 4078 4079 if (!intel_dp_init_connector(dig_port, connector)) { 4080 kfree(connector); 4081 return NULL; 4082 } 4083 4084 return connector; 4085 } 4086 4087 static int modeset_pipe(struct drm_crtc *crtc, 4088 struct drm_modeset_acquire_ctx *ctx) 4089 { 4090 struct drm_atomic_state *state; 4091 struct drm_crtc_state *crtc_state; 4092 int ret; 4093 4094 state = drm_atomic_state_alloc(crtc->dev); 4095 if (!state) 4096 return -ENOMEM; 4097 4098 state->acquire_ctx = ctx; 4099 4100 crtc_state = drm_atomic_get_crtc_state(state, crtc); 4101 if (IS_ERR(crtc_state)) { 4102 ret = PTR_ERR(crtc_state); 4103 goto out; 4104 } 4105 4106 crtc_state->connectors_changed = true; 4107 4108 ret = drm_atomic_commit(state); 4109 out: 4110 drm_atomic_state_put(state); 4111 4112 return ret; 4113 } 4114 4115 static int intel_hdmi_reset_link(struct intel_encoder *encoder, 4116 struct drm_modeset_acquire_ctx *ctx) 4117 { 4118 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 4119 struct intel_hdmi *hdmi = enc_to_intel_hdmi(encoder); 4120 struct intel_connector *connector = hdmi->attached_connector; 4121 struct i2c_adapter *adapter = 4122 intel_gmbus_get_adapter(dev_priv, hdmi->ddc_bus); 4123 struct drm_connector_state *conn_state; 4124 struct intel_crtc_state *crtc_state; 4125 struct intel_crtc *crtc; 4126 u8 config; 4127 int ret; 4128 4129 if (!connector || connector->base.status != connector_status_connected) 4130 return 0; 4131 4132 ret = drm_modeset_lock(&dev_priv->drm.mode_config.connection_mutex, 4133 ctx); 4134 if (ret) 4135 return ret; 4136 4137 conn_state = connector->base.state; 4138 4139 crtc = to_intel_crtc(conn_state->crtc); 4140 if (!crtc) 4141 return 0; 4142 4143 ret = drm_modeset_lock(&crtc->base.mutex, ctx); 4144 if (ret) 4145 return ret; 4146 4147 crtc_state = to_intel_crtc_state(crtc->base.state); 4148 4149 drm_WARN_ON(&dev_priv->drm, 4150 !intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI)); 4151 4152 if (!crtc_state->hw.active) 4153 return 0; 4154 4155 if (!crtc_state->hdmi_high_tmds_clock_ratio && 4156 !crtc_state->hdmi_scrambling) 4157 return 0; 4158 4159 if (conn_state->commit && 4160 !try_wait_for_completion(&conn_state->commit->hw_done)) 4161 return 0; 4162 4163 ret = drm_scdc_readb(adapter, SCDC_TMDS_CONFIG, &config); 4164 if (ret < 0) { 4165 drm_err(&dev_priv->drm, "Failed to read TMDS config: %d\n", 4166 ret); 4167 return 0; 4168 } 4169 4170 if (!!(config & SCDC_TMDS_BIT_CLOCK_RATIO_BY_40) == 4171 crtc_state->hdmi_high_tmds_clock_ratio && 4172 !!(config & SCDC_SCRAMBLING_ENABLE) == 4173 crtc_state->hdmi_scrambling) 4174 return 0; 4175 4176 /* 4177 * HDMI 2.0 says that one should not send scrambled data 4178 * prior to configuring the sink scrambling, and that 4179 * TMDS clock/data transmission should be suspended when 4180 * changing the TMDS clock rate in the sink. So let's 4181 * just do a full modeset here, even though some sinks 4182 * would be perfectly happy if were to just reconfigure 4183 * the SCDC settings on the fly. 4184 */ 4185 return modeset_pipe(&crtc->base, ctx); 4186 } 4187 4188 static enum intel_hotplug_state 4189 intel_ddi_hotplug(struct intel_encoder *encoder, 4190 struct intel_connector *connector) 4191 { 4192 struct drm_i915_private *i915 = to_i915(encoder->base.dev); 4193 struct intel_digital_port *dig_port = enc_to_dig_port(encoder); 4194 struct intel_dp *intel_dp = &dig_port->dp; 4195 enum phy phy = intel_port_to_phy(i915, encoder->port); 4196 bool is_tc = intel_phy_is_tc(i915, phy); 4197 struct drm_modeset_acquire_ctx ctx; 4198 enum intel_hotplug_state state; 4199 int ret; 4200 4201 if (intel_dp->compliance.test_active && 4202 intel_dp->compliance.test_type == DP_TEST_LINK_PHY_TEST_PATTERN) { 4203 intel_dp_phy_test(encoder); 4204 /* just do the PHY test and nothing else */ 4205 return INTEL_HOTPLUG_UNCHANGED; 4206 } 4207 4208 state = intel_encoder_hotplug(encoder, connector); 4209 4210 drm_modeset_acquire_init(&ctx, 0); 4211 4212 for (;;) { 4213 if (connector->base.connector_type == DRM_MODE_CONNECTOR_HDMIA) 4214 ret = intel_hdmi_reset_link(encoder, &ctx); 4215 else 4216 ret = intel_dp_retrain_link(encoder, &ctx); 4217 4218 if (ret == -EDEADLK) { 4219 drm_modeset_backoff(&ctx); 4220 continue; 4221 } 4222 4223 break; 4224 } 4225 4226 drm_modeset_drop_locks(&ctx); 4227 drm_modeset_acquire_fini(&ctx); 4228 drm_WARN(encoder->base.dev, ret, 4229 "Acquiring modeset locks failed with %i\n", ret); 4230 4231 /* 4232 * Unpowered type-c dongles can take some time to boot and be 4233 * responsible, so here giving some time to those dongles to power up 4234 * and then retrying the probe. 4235 * 4236 * On many platforms the HDMI live state signal is known to be 4237 * unreliable, so we can't use it to detect if a sink is connected or 4238 * not. Instead we detect if it's connected based on whether we can 4239 * read the EDID or not. That in turn has a problem during disconnect, 4240 * since the HPD interrupt may be raised before the DDC lines get 4241 * disconnected (due to how the required length of DDC vs. HPD 4242 * connector pins are specified) and so we'll still be able to get a 4243 * valid EDID. To solve this schedule another detection cycle if this 4244 * time around we didn't detect any change in the sink's connection 4245 * status. 4246 * 4247 * Type-c connectors which get their HPD signal deasserted then 4248 * reasserted, without unplugging/replugging the sink from the 4249 * connector, introduce a delay until the AUX channel communication 4250 * becomes functional. Retry the detection for 5 seconds on type-c 4251 * connectors to account for this delay. 4252 */ 4253 if (state == INTEL_HOTPLUG_UNCHANGED && 4254 connector->hotplug_retries < (is_tc ? 5 : 1) && 4255 !dig_port->dp.is_mst) 4256 state = INTEL_HOTPLUG_RETRY; 4257 4258 return state; 4259 } 4260 4261 static bool lpt_digital_port_connected(struct intel_encoder *encoder) 4262 { 4263 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 4264 u32 bit = dev_priv->hotplug.pch_hpd[encoder->hpd_pin]; 4265 4266 return intel_de_read(dev_priv, SDEISR) & bit; 4267 } 4268 4269 static bool hsw_digital_port_connected(struct intel_encoder *encoder) 4270 { 4271 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 4272 u32 bit = dev_priv->hotplug.hpd[encoder->hpd_pin]; 4273 4274 return intel_de_read(dev_priv, DEISR) & bit; 4275 } 4276 4277 static bool bdw_digital_port_connected(struct intel_encoder *encoder) 4278 { 4279 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev); 4280 u32 bit = dev_priv->hotplug.hpd[encoder->hpd_pin]; 4281 4282 return intel_de_read(dev_priv, GEN8_DE_PORT_ISR) & bit; 4283 } 4284 4285 static struct intel_connector * 4286 intel_ddi_init_hdmi_connector(struct intel_digital_port *dig_port) 4287 { 4288 struct intel_connector *connector; 4289 enum port port = dig_port->base.port; 4290 4291 connector = intel_connector_alloc(); 4292 if (!connector) 4293 return NULL; 4294 4295 dig_port->hdmi.hdmi_reg = DDI_BUF_CTL(port); 4296 intel_hdmi_init_connector(dig_port, connector); 4297 4298 return connector; 4299 } 4300 4301 static bool intel_ddi_a_force_4_lanes(struct intel_digital_port *dig_port) 4302 { 4303 struct drm_i915_private *dev_priv = to_i915(dig_port->base.base.dev); 4304 4305 if (dig_port->base.port != PORT_A) 4306 return false; 4307 4308 if (dig_port->saved_port_bits & DDI_A_4_LANES) 4309 return false; 4310 4311 /* Broxton/Geminilake: Bspec says that DDI_A_4_LANES is the only 4312 * supported configuration 4313 */ 4314 if (IS_GEMINILAKE(dev_priv) || IS_BROXTON(dev_priv)) 4315 return true; 4316 4317 /* Cannonlake: Most of SKUs don't support DDI_E, and the only 4318 * one who does also have a full A/E split called 4319 * DDI_F what makes DDI_E useless. However for this 4320 * case let's trust VBT info. 4321 */ 4322 if (IS_CANNONLAKE(dev_priv) && 4323 !intel_bios_is_port_present(dev_priv, PORT_E)) 4324 return true; 4325 4326 return false; 4327 } 4328 4329 static int 4330 intel_ddi_max_lanes(struct intel_digital_port *dig_port) 4331 { 4332 struct drm_i915_private *dev_priv = to_i915(dig_port->base.base.dev); 4333 enum port port = dig_port->base.port; 4334 int max_lanes = 4; 4335 4336 if (DISPLAY_VER(dev_priv) >= 11) 4337 return max_lanes; 4338 4339 if (port == PORT_A || port == PORT_E) { 4340 if (intel_de_read(dev_priv, DDI_BUF_CTL(PORT_A)) & DDI_A_4_LANES) 4341 max_lanes = port == PORT_A ? 4 : 0; 4342 else 4343 /* Both A and E share 2 lanes */ 4344 max_lanes = 2; 4345 } 4346 4347 /* 4348 * Some BIOS might fail to set this bit on port A if eDP 4349 * wasn't lit up at boot. Force this bit set when needed 4350 * so we use the proper lane count for our calculations. 4351 */ 4352 if (intel_ddi_a_force_4_lanes(dig_port)) { 4353 drm_dbg_kms(&dev_priv->drm, 4354 "Forcing DDI_A_4_LANES for port A\n"); 4355 dig_port->saved_port_bits |= DDI_A_4_LANES; 4356 max_lanes = 4; 4357 } 4358 4359 return max_lanes; 4360 } 4361 4362 static bool hti_uses_phy(struct drm_i915_private *i915, enum phy phy) 4363 { 4364 return i915->hti_state & HDPORT_ENABLED && 4365 i915->hti_state & HDPORT_DDI_USED(phy); 4366 } 4367 4368 static enum hpd_pin xelpd_hpd_pin(struct drm_i915_private *dev_priv, 4369 enum port port) 4370 { 4371 if (port >= PORT_D_XELPD) 4372 return HPD_PORT_D + port - PORT_D_XELPD; 4373 else if (port >= PORT_TC1) 4374 return HPD_PORT_TC1 + port - PORT_TC1; 4375 else 4376 return HPD_PORT_A + port - PORT_A; 4377 } 4378 4379 static enum hpd_pin dg1_hpd_pin(struct drm_i915_private *dev_priv, 4380 enum port port) 4381 { 4382 if (port >= PORT_TC1) 4383 return HPD_PORT_C + port - PORT_TC1; 4384 else 4385 return HPD_PORT_A + port - PORT_A; 4386 } 4387 4388 static enum hpd_pin tgl_hpd_pin(struct drm_i915_private *dev_priv, 4389 enum port port) 4390 { 4391 if (port >= PORT_TC1) 4392 return HPD_PORT_TC1 + port - PORT_TC1; 4393 else 4394 return HPD_PORT_A + port - PORT_A; 4395 } 4396 4397 static enum hpd_pin rkl_hpd_pin(struct drm_i915_private *dev_priv, 4398 enum port port) 4399 { 4400 if (HAS_PCH_TGP(dev_priv)) 4401 return tgl_hpd_pin(dev_priv, port); 4402 4403 if (port >= PORT_TC1) 4404 return HPD_PORT_C + port - PORT_TC1; 4405 else 4406 return HPD_PORT_A + port - PORT_A; 4407 } 4408 4409 static enum hpd_pin icl_hpd_pin(struct drm_i915_private *dev_priv, 4410 enum port port) 4411 { 4412 if (port >= PORT_C) 4413 return HPD_PORT_TC1 + port - PORT_C; 4414 else 4415 return HPD_PORT_A + port - PORT_A; 4416 } 4417 4418 static enum hpd_pin ehl_hpd_pin(struct drm_i915_private *dev_priv, 4419 enum port port) 4420 { 4421 if (port == PORT_D) 4422 return HPD_PORT_A; 4423 4424 if (HAS_PCH_MCC(dev_priv)) 4425 return icl_hpd_pin(dev_priv, port); 4426 4427 return HPD_PORT_A + port - PORT_A; 4428 } 4429 4430 static enum hpd_pin cnl_hpd_pin(struct drm_i915_private *dev_priv, 4431 enum port port) 4432 { 4433 if (port == PORT_F) 4434 return HPD_PORT_E; 4435 4436 return HPD_PORT_A + port - PORT_A; 4437 } 4438 4439 static enum hpd_pin skl_hpd_pin(struct drm_i915_private *dev_priv, enum port port) 4440 { 4441 if (HAS_PCH_TGP(dev_priv)) 4442 return icl_hpd_pin(dev_priv, port); 4443 4444 return HPD_PORT_A + port - PORT_A; 4445 } 4446 4447 static bool intel_ddi_is_tc(struct drm_i915_private *i915, enum port port) 4448 { 4449 if (DISPLAY_VER(i915) >= 12) 4450 return port >= PORT_TC1; 4451 else if (DISPLAY_VER(i915) >= 11) 4452 return port >= PORT_C; 4453 else 4454 return false; 4455 } 4456 4457 #define port_tc_name(port) ((port) - PORT_TC1 + '1') 4458 #define tc_port_name(tc_port) ((tc_port) - TC_PORT_1 + '1') 4459 4460 void intel_ddi_init(struct drm_i915_private *dev_priv, enum port port) 4461 { 4462 struct intel_digital_port *dig_port; 4463 struct intel_encoder *encoder; 4464 const struct intel_bios_encoder_data *devdata; 4465 bool init_hdmi, init_dp; 4466 enum phy phy = intel_port_to_phy(dev_priv, port); 4467 4468 /* 4469 * On platforms with HTI (aka HDPORT), if it's enabled at boot it may 4470 * have taken over some of the PHYs and made them unavailable to the 4471 * driver. In that case we should skip initializing the corresponding 4472 * outputs. 4473 */ 4474 if (hti_uses_phy(dev_priv, phy)) { 4475 drm_dbg_kms(&dev_priv->drm, "PORT %c / PHY %c reserved by HTI\n", 4476 port_name(port), phy_name(phy)); 4477 return; 4478 } 4479 4480 devdata = intel_bios_encoder_data_lookup(dev_priv, port); 4481 if (!devdata) { 4482 drm_dbg_kms(&dev_priv->drm, 4483 "VBT says port %c is not present\n", 4484 port_name(port)); 4485 return; 4486 } 4487 4488 init_hdmi = intel_bios_encoder_supports_dvi(devdata) || 4489 intel_bios_encoder_supports_hdmi(devdata); 4490 init_dp = intel_bios_encoder_supports_dp(devdata); 4491 4492 if (intel_bios_is_lspcon_present(dev_priv, port)) { 4493 /* 4494 * Lspcon device needs to be driven with DP connector 4495 * with special detection sequence. So make sure DP 4496 * is initialized before lspcon. 4497 */ 4498 init_dp = true; 4499 init_hdmi = false; 4500 drm_dbg_kms(&dev_priv->drm, "VBT says port %c has lspcon\n", 4501 port_name(port)); 4502 } 4503 4504 if (!init_dp && !init_hdmi) { 4505 drm_dbg_kms(&dev_priv->drm, 4506 "VBT says port %c is not DVI/HDMI/DP compatible, respect it\n", 4507 port_name(port)); 4508 return; 4509 } 4510 4511 dig_port = kzalloc(sizeof(*dig_port), GFP_KERNEL); 4512 if (!dig_port) 4513 return; 4514 4515 encoder = &dig_port->base; 4516 encoder->devdata = devdata; 4517 4518 if (DISPLAY_VER(dev_priv) >= 13 && port >= PORT_D_XELPD) { 4519 drm_encoder_init(&dev_priv->drm, &encoder->base, &intel_ddi_funcs, 4520 DRM_MODE_ENCODER_TMDS, 4521 "DDI %c/PHY %c", 4522 port_name(port - PORT_D_XELPD + PORT_D), 4523 phy_name(phy)); 4524 } else if (DISPLAY_VER(dev_priv) >= 12) { 4525 enum tc_port tc_port = intel_port_to_tc(dev_priv, port); 4526 4527 drm_encoder_init(&dev_priv->drm, &encoder->base, &intel_ddi_funcs, 4528 DRM_MODE_ENCODER_TMDS, 4529 "DDI %s%c/PHY %s%c", 4530 port >= PORT_TC1 ? "TC" : "", 4531 port >= PORT_TC1 ? port_tc_name(port) : port_name(port), 4532 tc_port != TC_PORT_NONE ? "TC" : "", 4533 tc_port != TC_PORT_NONE ? tc_port_name(tc_port) : phy_name(phy)); 4534 } else if (DISPLAY_VER(dev_priv) >= 11) { 4535 enum tc_port tc_port = intel_port_to_tc(dev_priv, port); 4536 4537 drm_encoder_init(&dev_priv->drm, &encoder->base, &intel_ddi_funcs, 4538 DRM_MODE_ENCODER_TMDS, 4539 "DDI %c%s/PHY %s%c", 4540 port_name(port), 4541 port >= PORT_C ? " (TC)" : "", 4542 tc_port != TC_PORT_NONE ? "TC" : "", 4543 tc_port != TC_PORT_NONE ? tc_port_name(tc_port) : phy_name(phy)); 4544 } else { 4545 drm_encoder_init(&dev_priv->drm, &encoder->base, &intel_ddi_funcs, 4546 DRM_MODE_ENCODER_TMDS, 4547 "DDI %c/PHY %c", port_name(port), phy_name(phy)); 4548 } 4549 4550 mutex_init(&dig_port->hdcp_mutex); 4551 dig_port->num_hdcp_streams = 0; 4552 4553 encoder->hotplug = intel_ddi_hotplug; 4554 encoder->compute_output_type = intel_ddi_compute_output_type; 4555 encoder->compute_config = intel_ddi_compute_config; 4556 encoder->compute_config_late = intel_ddi_compute_config_late; 4557 encoder->enable = intel_enable_ddi; 4558 encoder->pre_pll_enable = intel_ddi_pre_pll_enable; 4559 encoder->pre_enable = intel_ddi_pre_enable; 4560 encoder->disable = intel_disable_ddi; 4561 encoder->post_disable = intel_ddi_post_disable; 4562 encoder->update_pipe = intel_ddi_update_pipe; 4563 encoder->get_hw_state = intel_ddi_get_hw_state; 4564 encoder->sync_state = intel_ddi_sync_state; 4565 encoder->initial_fastset_check = intel_ddi_initial_fastset_check; 4566 encoder->suspend = intel_dp_encoder_suspend; 4567 encoder->shutdown = intel_dp_encoder_shutdown; 4568 encoder->get_power_domains = intel_ddi_get_power_domains; 4569 4570 encoder->type = INTEL_OUTPUT_DDI; 4571 encoder->power_domain = intel_port_to_power_domain(port); 4572 encoder->port = port; 4573 encoder->cloneable = 0; 4574 encoder->pipe_mask = ~0; 4575 4576 if (IS_ALDERLAKE_S(dev_priv)) { 4577 encoder->enable_clock = adls_ddi_enable_clock; 4578 encoder->disable_clock = adls_ddi_disable_clock; 4579 encoder->is_clock_enabled = adls_ddi_is_clock_enabled; 4580 encoder->get_config = adls_ddi_get_config; 4581 } else if (IS_ROCKETLAKE(dev_priv)) { 4582 encoder->enable_clock = rkl_ddi_enable_clock; 4583 encoder->disable_clock = rkl_ddi_disable_clock; 4584 encoder->is_clock_enabled = rkl_ddi_is_clock_enabled; 4585 encoder->get_config = rkl_ddi_get_config; 4586 } else if (IS_DG1(dev_priv)) { 4587 encoder->enable_clock = dg1_ddi_enable_clock; 4588 encoder->disable_clock = dg1_ddi_disable_clock; 4589 encoder->is_clock_enabled = dg1_ddi_is_clock_enabled; 4590 encoder->get_config = dg1_ddi_get_config; 4591 } else if (IS_JSL_EHL(dev_priv)) { 4592 if (intel_ddi_is_tc(dev_priv, port)) { 4593 encoder->enable_clock = jsl_ddi_tc_enable_clock; 4594 encoder->disable_clock = jsl_ddi_tc_disable_clock; 4595 encoder->is_clock_enabled = jsl_ddi_tc_is_clock_enabled; 4596 encoder->get_config = icl_ddi_combo_get_config; 4597 } else { 4598 encoder->enable_clock = icl_ddi_combo_enable_clock; 4599 encoder->disable_clock = icl_ddi_combo_disable_clock; 4600 encoder->is_clock_enabled = icl_ddi_combo_is_clock_enabled; 4601 encoder->get_config = icl_ddi_combo_get_config; 4602 } 4603 } else if (DISPLAY_VER(dev_priv) >= 11) { 4604 if (intel_ddi_is_tc(dev_priv, port)) { 4605 encoder->enable_clock = icl_ddi_tc_enable_clock; 4606 encoder->disable_clock = icl_ddi_tc_disable_clock; 4607 encoder->is_clock_enabled = icl_ddi_tc_is_clock_enabled; 4608 encoder->get_config = icl_ddi_tc_get_config; 4609 } else { 4610 encoder->enable_clock = icl_ddi_combo_enable_clock; 4611 encoder->disable_clock = icl_ddi_combo_disable_clock; 4612 encoder->is_clock_enabled = icl_ddi_combo_is_clock_enabled; 4613 encoder->get_config = icl_ddi_combo_get_config; 4614 } 4615 } else if (IS_CANNONLAKE(dev_priv)) { 4616 encoder->enable_clock = cnl_ddi_enable_clock; 4617 encoder->disable_clock = cnl_ddi_disable_clock; 4618 encoder->is_clock_enabled = cnl_ddi_is_clock_enabled; 4619 encoder->get_config = cnl_ddi_get_config; 4620 } else if (IS_GEMINILAKE(dev_priv) || IS_BROXTON(dev_priv)) { 4621 /* BXT/GLK have fixed PLL->port mapping */ 4622 encoder->get_config = bxt_ddi_get_config; 4623 } else if (DISPLAY_VER(dev_priv) == 9) { 4624 encoder->enable_clock = skl_ddi_enable_clock; 4625 encoder->disable_clock = skl_ddi_disable_clock; 4626 encoder->is_clock_enabled = skl_ddi_is_clock_enabled; 4627 encoder->get_config = skl_ddi_get_config; 4628 } else if (IS_BROADWELL(dev_priv) || IS_HASWELL(dev_priv)) { 4629 encoder->enable_clock = hsw_ddi_enable_clock; 4630 encoder->disable_clock = hsw_ddi_disable_clock; 4631 encoder->is_clock_enabled = hsw_ddi_is_clock_enabled; 4632 encoder->get_config = hsw_ddi_get_config; 4633 } 4634 4635 if (DISPLAY_VER(dev_priv) >= 13) 4636 encoder->hpd_pin = xelpd_hpd_pin(dev_priv, port); 4637 else if (IS_DG1(dev_priv)) 4638 encoder->hpd_pin = dg1_hpd_pin(dev_priv, port); 4639 else if (IS_ROCKETLAKE(dev_priv)) 4640 encoder->hpd_pin = rkl_hpd_pin(dev_priv, port); 4641 else if (DISPLAY_VER(dev_priv) >= 12) 4642 encoder->hpd_pin = tgl_hpd_pin(dev_priv, port); 4643 else if (IS_JSL_EHL(dev_priv)) 4644 encoder->hpd_pin = ehl_hpd_pin(dev_priv, port); 4645 else if (DISPLAY_VER(dev_priv) == 11) 4646 encoder->hpd_pin = icl_hpd_pin(dev_priv, port); 4647 else if (IS_CANNONLAKE(dev_priv)) 4648 encoder->hpd_pin = cnl_hpd_pin(dev_priv, port); 4649 else if (DISPLAY_VER(dev_priv) == 9 && !IS_BROXTON(dev_priv)) 4650 encoder->hpd_pin = skl_hpd_pin(dev_priv, port); 4651 else 4652 encoder->hpd_pin = intel_hpd_pin_default(dev_priv, port); 4653 4654 if (DISPLAY_VER(dev_priv) >= 11) 4655 dig_port->saved_port_bits = 4656 intel_de_read(dev_priv, DDI_BUF_CTL(port)) 4657 & DDI_BUF_PORT_REVERSAL; 4658 else 4659 dig_port->saved_port_bits = 4660 intel_de_read(dev_priv, DDI_BUF_CTL(port)) 4661 & (DDI_BUF_PORT_REVERSAL | DDI_A_4_LANES); 4662 4663 if (intel_bios_is_lane_reversal_needed(dev_priv, port)) 4664 dig_port->saved_port_bits |= DDI_BUF_PORT_REVERSAL; 4665 4666 dig_port->dp.output_reg = INVALID_MMIO_REG; 4667 dig_port->max_lanes = intel_ddi_max_lanes(dig_port); 4668 dig_port->aux_ch = intel_bios_port_aux_ch(dev_priv, port); 4669 4670 if (intel_phy_is_tc(dev_priv, phy)) { 4671 bool is_legacy = 4672 !intel_bios_encoder_supports_typec_usb(devdata) && 4673 !intel_bios_encoder_supports_tbt(devdata); 4674 4675 intel_tc_port_init(dig_port, is_legacy); 4676 4677 encoder->update_prepare = intel_ddi_update_prepare; 4678 encoder->update_complete = intel_ddi_update_complete; 4679 } 4680 4681 drm_WARN_ON(&dev_priv->drm, port > PORT_I); 4682 dig_port->ddi_io_power_domain = POWER_DOMAIN_PORT_DDI_A_IO + 4683 port - PORT_A; 4684 4685 if (init_dp) { 4686 if (!intel_ddi_init_dp_connector(dig_port)) 4687 goto err; 4688 4689 dig_port->hpd_pulse = intel_dp_hpd_pulse; 4690 4691 /* Splitter enable for eDP MSO is supported for pipe A only. */ 4692 if (dig_port->dp.mso_link_count) 4693 encoder->pipe_mask = BIT(PIPE_A); 4694 } 4695 4696 /* In theory we don't need the encoder->type check, but leave it just in 4697 * case we have some really bad VBTs... */ 4698 if (encoder->type != INTEL_OUTPUT_EDP && init_hdmi) { 4699 if (!intel_ddi_init_hdmi_connector(dig_port)) 4700 goto err; 4701 } 4702 4703 if (DISPLAY_VER(dev_priv) >= 11) { 4704 if (intel_phy_is_tc(dev_priv, phy)) 4705 dig_port->connected = intel_tc_port_connected; 4706 else 4707 dig_port->connected = lpt_digital_port_connected; 4708 } else if (DISPLAY_VER(dev_priv) >= 8) { 4709 if (port == PORT_A || IS_GEMINILAKE(dev_priv) || 4710 IS_BROXTON(dev_priv)) 4711 dig_port->connected = bdw_digital_port_connected; 4712 else 4713 dig_port->connected = lpt_digital_port_connected; 4714 } else { 4715 if (port == PORT_A) 4716 dig_port->connected = hsw_digital_port_connected; 4717 else 4718 dig_port->connected = lpt_digital_port_connected; 4719 } 4720 4721 intel_infoframe_init(dig_port); 4722 4723 return; 4724 4725 err: 4726 drm_encoder_cleanup(&encoder->base); 4727 kfree(dig_port); 4728 } 4729