1 /* 2 * Copyright © 2008 Intel Corporation 3 * 4 * Permission is hereby granted, free of charge, to any person obtaining a 5 * copy of this software and associated documentation files (the "Software"), 6 * to deal in the Software without restriction, including without limitation 7 * the rights to use, copy, modify, merge, publish, distribute, sublicense, 8 * and/or sell copies of the Software, and to permit persons to whom the 9 * Software is furnished to do so, subject to the following conditions: 10 * 11 * The above copyright notice and this permission notice (including the next 12 * paragraph) shall be included in all copies or substantial portions of the 13 * Software. 14 * 15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL 18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING 20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS 21 * IN THE SOFTWARE. 22 * 23 * Authors: 24 * Keith Packard <keithp@keithp.com> 25 * 26 */ 27 28 #include <linux/export.h> 29 #include <linux/i2c.h> 30 #include <linux/iopoll.h> 31 #include <linux/log2.h> 32 #include <linux/math.h> 33 #include <linux/notifier.h> 34 #include <linux/seq_buf.h> 35 #include <linux/slab.h> 36 #include <linux/sort.h> 37 #include <linux/string_helpers.h> 38 #include <linux/timekeeping.h> 39 #include <linux/types.h> 40 #include <asm/byteorder.h> 41 42 #include <drm/display/drm_dp_helper.h> 43 #include <drm/display/drm_dp_tunnel.h> 44 #include <drm/display/drm_dsc_helper.h> 45 #include <drm/display/drm_hdmi_helper.h> 46 #include <drm/drm_atomic_helper.h> 47 #include <drm/drm_crtc.h> 48 #include <drm/drm_edid.h> 49 #include <drm/drm_fixed.h> 50 #include <drm/drm_print.h> 51 #include <drm/drm_probe_helper.h> 52 53 #include "g4x_dp.h" 54 #include "intel_alpm.h" 55 #include "intel_atomic.h" 56 #include "intel_audio.h" 57 #include "intel_backlight.h" 58 #include "intel_combo_phy_regs.h" 59 #include "intel_connector.h" 60 #include "intel_crtc.h" 61 #include "intel_crtc_state_dump.h" 62 #include "intel_cx0_phy.h" 63 #include "intel_ddi.h" 64 #include "intel_de.h" 65 #include "intel_display_driver.h" 66 #include "intel_display_jiffies.h" 67 #include "intel_display_utils.h" 68 #include "intel_display_regs.h" 69 #include "intel_display_rpm.h" 70 #include "intel_display_types.h" 71 #include "intel_dp.h" 72 #include "intel_dp_aux.h" 73 #include "intel_dp_hdcp.h" 74 #include "intel_dp_link_training.h" 75 #include "intel_dp_mst.h" 76 #include "intel_dp_test.h" 77 #include "intel_dp_tunnel.h" 78 #include "intel_dpio_phy.h" 79 #include "intel_dpll.h" 80 #include "intel_drrs.h" 81 #include "intel_encoder.h" 82 #include "intel_fifo_underrun.h" 83 #include "intel_hdcp.h" 84 #include "intel_hdmi.h" 85 #include "intel_hotplug.h" 86 #include "intel_hotplug_irq.h" 87 #include "intel_lspcon.h" 88 #include "intel_lvds.h" 89 #include "intel_modeset_lock.h" 90 #include "intel_panel.h" 91 #include "intel_pch_display.h" 92 #include "intel_pfit.h" 93 #include "intel_pps.h" 94 #include "intel_psr.h" 95 #include "intel_quirks.h" 96 #include "intel_tc.h" 97 #include "intel_vblank.h" 98 #include "intel_vdsc.h" 99 #include "intel_vrr.h" 100 101 /* Max DSC line buffer depth supported by HW. */ 102 #define INTEL_DP_DSC_MAX_LINE_BUF_DEPTH 13 103 104 /* DP DSC FEC Overhead factor in ppm = 1/(0.972261) = 1.028530 */ 105 #define DP_DSC_FEC_OVERHEAD_FACTOR 1028530 106 107 /* Constants for DP DSC configurations */ 108 static const u8 valid_dsc_bpp[] = {6, 8, 10, 12, 15}; 109 110 /* 111 * With Single pipe configuration, HW is capable of supporting maximum of: 112 * 2 slices per line for ICL, BMG 113 * 4 slices per line for other platforms. 114 * For now consider a max of 2 slices per line, which works for all platforms. 115 * With this we can have max of 4 DSC Slices per pipe. 116 * 117 * For higher resolutions where 12 slice support is required with 118 * ultrajoiner, only then each pipe can support 3 slices. 119 * 120 * #TODO Split this better to use 4 slices/dsc engine where supported. 121 */ 122 static const u8 valid_dsc_slicecount[] = {1, 2, 3, 4}; 123 124 /** 125 * intel_dp_is_edp - is the given port attached to an eDP panel (either CPU or PCH) 126 * @intel_dp: DP struct 127 * 128 * If a CPU or PCH DP output is attached to an eDP panel, this function 129 * will return true, and false otherwise. 130 * 131 * This function is not safe to use prior to encoder type being set. 132 */ 133 bool intel_dp_is_edp(struct intel_dp *intel_dp) 134 { 135 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp); 136 137 return dig_port->base.type == INTEL_OUTPUT_EDP; 138 } 139 140 static void intel_dp_unset_edid(struct intel_dp *intel_dp); 141 142 /* Is link rate UHBR and thus 128b/132b? */ 143 bool intel_dp_is_uhbr(const struct intel_crtc_state *crtc_state) 144 { 145 return drm_dp_is_uhbr_rate(crtc_state->port_clock); 146 } 147 148 /** 149 * intel_dp_link_symbol_size - get the link symbol size for a given link rate 150 * @rate: link rate in 10kbit/s units 151 * 152 * Returns the link symbol size in bits/symbol units depending on the link 153 * rate -> channel coding. 154 */ 155 int intel_dp_link_symbol_size(int rate) 156 { 157 return drm_dp_is_uhbr_rate(rate) ? 32 : 10; 158 } 159 160 /** 161 * intel_dp_link_symbol_clock - convert link rate to link symbol clock 162 * @rate: link rate in 10kbit/s units 163 * 164 * Returns the link symbol clock frequency in kHz units depending on the 165 * link rate and channel coding. 166 */ 167 int intel_dp_link_symbol_clock(int rate) 168 { 169 return DIV_ROUND_CLOSEST(rate * 10, intel_dp_link_symbol_size(rate)); 170 } 171 172 static int max_dprx_rate(struct intel_dp *intel_dp) 173 { 174 struct intel_display *display = to_intel_display(intel_dp); 175 int max_rate; 176 177 if (intel_dp_tunnel_bw_alloc_is_enabled(intel_dp)) 178 max_rate = drm_dp_tunnel_max_dprx_rate(intel_dp->tunnel); 179 else 180 max_rate = drm_dp_bw_code_to_link_rate(intel_dp->dpcd[DP_MAX_LINK_RATE]); 181 182 /* 183 * Some platforms + eDP panels may not reliably support HBR3 184 * due to signal integrity limitations, despite advertising it. 185 * Cap the link rate to HBR2 to avoid unstable configurations for the 186 * known machines. 187 */ 188 if (intel_dp_is_edp(intel_dp) && intel_has_quirk(display, QUIRK_EDP_LIMIT_RATE_HBR2)) 189 max_rate = min(max_rate, 540000); 190 191 return max_rate; 192 } 193 194 static int max_dprx_lane_count(struct intel_dp *intel_dp) 195 { 196 if (intel_dp_tunnel_bw_alloc_is_enabled(intel_dp)) 197 return drm_dp_tunnel_max_dprx_lane_count(intel_dp->tunnel); 198 199 return drm_dp_max_lane_count(intel_dp->dpcd); 200 } 201 202 static void intel_dp_set_default_sink_rates(struct intel_dp *intel_dp) 203 { 204 intel_dp->sink_rates[0] = 162000; 205 intel_dp->num_sink_rates = 1; 206 } 207 208 /* update sink rates from dpcd */ 209 static void intel_dp_set_dpcd_sink_rates(struct intel_dp *intel_dp) 210 { 211 static const int dp_rates[] = { 212 162000, 270000, 540000, 810000 213 }; 214 int i, max_rate; 215 int max_lttpr_rate; 216 217 if (drm_dp_has_quirk(&intel_dp->desc, DP_DPCD_QUIRK_CAN_DO_MAX_LINK_RATE_3_24_GBPS)) { 218 /* Needed, e.g., for Apple MBP 2017, 15 inch eDP Retina panel */ 219 static const int quirk_rates[] = { 162000, 270000, 324000 }; 220 221 memcpy(intel_dp->sink_rates, quirk_rates, sizeof(quirk_rates)); 222 intel_dp->num_sink_rates = ARRAY_SIZE(quirk_rates); 223 224 return; 225 } 226 227 /* 228 * Sink rates for 8b/10b. 229 */ 230 max_rate = max_dprx_rate(intel_dp); 231 max_lttpr_rate = drm_dp_lttpr_max_link_rate(intel_dp->lttpr_common_caps); 232 if (max_lttpr_rate) 233 max_rate = min(max_rate, max_lttpr_rate); 234 235 for (i = 0; i < ARRAY_SIZE(dp_rates); i++) { 236 if (dp_rates[i] > max_rate) 237 break; 238 intel_dp->sink_rates[i] = dp_rates[i]; 239 } 240 241 /* 242 * Sink rates for 128b/132b. If set, sink should support all 8b/10b 243 * rates and 10 Gbps. 244 */ 245 if (drm_dp_128b132b_supported(intel_dp->dpcd)) { 246 u8 uhbr_rates = 0; 247 248 BUILD_BUG_ON(ARRAY_SIZE(intel_dp->sink_rates) < ARRAY_SIZE(dp_rates) + 3); 249 250 drm_dp_dpcd_readb(&intel_dp->aux, 251 DP_128B132B_SUPPORTED_LINK_RATES, &uhbr_rates); 252 253 if (drm_dp_lttpr_count(intel_dp->lttpr_common_caps)) { 254 /* We have a repeater */ 255 if (intel_dp->lttpr_common_caps[0] >= 0x20 && 256 intel_dp->lttpr_common_caps[DP_MAIN_LINK_CHANNEL_CODING_PHY_REPEATER - 257 DP_LT_TUNABLE_PHY_REPEATER_FIELD_DATA_STRUCTURE_REV] & 258 DP_PHY_REPEATER_128B132B_SUPPORTED) { 259 /* Repeater supports 128b/132b, valid UHBR rates */ 260 uhbr_rates &= intel_dp->lttpr_common_caps[DP_PHY_REPEATER_128B132B_RATES - 261 DP_LT_TUNABLE_PHY_REPEATER_FIELD_DATA_STRUCTURE_REV]; 262 } else { 263 /* Does not support 128b/132b */ 264 uhbr_rates = 0; 265 } 266 } 267 268 if (uhbr_rates & DP_UHBR10) 269 intel_dp->sink_rates[i++] = 1000000; 270 if (uhbr_rates & DP_UHBR13_5) 271 intel_dp->sink_rates[i++] = 1350000; 272 if (uhbr_rates & DP_UHBR20) 273 intel_dp->sink_rates[i++] = 2000000; 274 } 275 276 intel_dp->num_sink_rates = i; 277 } 278 279 static void intel_dp_set_sink_rates(struct intel_dp *intel_dp) 280 { 281 struct intel_display *display = to_intel_display(intel_dp); 282 struct intel_connector *connector = intel_dp->attached_connector; 283 struct intel_digital_port *intel_dig_port = dp_to_dig_port(intel_dp); 284 struct intel_encoder *encoder = &intel_dig_port->base; 285 286 intel_dp_set_dpcd_sink_rates(intel_dp); 287 288 if (intel_dp->num_sink_rates) 289 return; 290 291 drm_err(display->drm, 292 "[CONNECTOR:%d:%s][ENCODER:%d:%s] Invalid DPCD with no link rates, using defaults\n", 293 connector->base.base.id, connector->base.name, 294 encoder->base.base.id, encoder->base.name); 295 296 intel_dp_set_default_sink_rates(intel_dp); 297 } 298 299 static void intel_dp_set_default_max_sink_lane_count(struct intel_dp *intel_dp) 300 { 301 intel_dp->max_sink_lane_count = 1; 302 } 303 304 static void intel_dp_set_max_sink_lane_count(struct intel_dp *intel_dp) 305 { 306 struct intel_display *display = to_intel_display(intel_dp); 307 struct intel_connector *connector = intel_dp->attached_connector; 308 struct intel_digital_port *intel_dig_port = dp_to_dig_port(intel_dp); 309 struct intel_encoder *encoder = &intel_dig_port->base; 310 311 intel_dp->max_sink_lane_count = max_dprx_lane_count(intel_dp); 312 313 switch (intel_dp->max_sink_lane_count) { 314 case 1: 315 case 2: 316 case 4: 317 return; 318 } 319 320 drm_err(display->drm, 321 "[CONNECTOR:%d:%s][ENCODER:%d:%s] Invalid DPCD max lane count (%d), using default\n", 322 connector->base.base.id, connector->base.name, 323 encoder->base.base.id, encoder->base.name, 324 intel_dp->max_sink_lane_count); 325 326 intel_dp_set_default_max_sink_lane_count(intel_dp); 327 } 328 329 /* Get length of rates array potentially limited by max_rate. */ 330 static int intel_dp_rate_limit_len(const int *rates, int len, int max_rate) 331 { 332 int i; 333 334 /* Limit results by potentially reduced max rate */ 335 for (i = 0; i < len; i++) { 336 if (rates[len - i - 1] <= max_rate) 337 return len - i; 338 } 339 340 return 0; 341 } 342 343 /* Get length of common rates array potentially limited by max_rate. */ 344 static int intel_dp_common_len_rate_limit(const struct intel_dp *intel_dp, 345 int max_rate) 346 { 347 return intel_dp_rate_limit_len(intel_dp->common_rates, 348 intel_dp->num_common_rates, max_rate); 349 } 350 351 int intel_dp_common_rate(struct intel_dp *intel_dp, int index) 352 { 353 struct intel_display *display = to_intel_display(intel_dp); 354 355 if (drm_WARN_ON(display->drm, 356 index < 0 || index >= intel_dp->num_common_rates)) 357 return 162000; 358 359 return intel_dp->common_rates[index]; 360 } 361 362 /* Theoretical max between source and sink */ 363 int intel_dp_max_common_rate(struct intel_dp *intel_dp) 364 { 365 return intel_dp_common_rate(intel_dp, intel_dp->num_common_rates - 1); 366 } 367 368 int intel_dp_max_source_lane_count(struct intel_digital_port *dig_port) 369 { 370 int vbt_max_lanes = intel_bios_dp_max_lane_count(dig_port->base.devdata); 371 int max_lanes = dig_port->max_lanes; 372 373 if (vbt_max_lanes) 374 max_lanes = min(max_lanes, vbt_max_lanes); 375 376 return max_lanes; 377 } 378 379 /* Theoretical max between source and sink */ 380 int intel_dp_max_common_lane_count(struct intel_dp *intel_dp) 381 { 382 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp); 383 int source_max = intel_dp_max_source_lane_count(dig_port); 384 int sink_max = intel_dp->max_sink_lane_count; 385 int lane_max = intel_tc_port_max_lane_count(dig_port); 386 int lttpr_max = drm_dp_lttpr_max_lane_count(intel_dp->lttpr_common_caps); 387 388 if (lttpr_max) 389 sink_max = min(sink_max, lttpr_max); 390 391 return min3(source_max, sink_max, lane_max); 392 } 393 394 static int forced_lane_count(struct intel_dp *intel_dp) 395 { 396 return clamp(intel_dp->link.force_lane_count, 1, intel_dp_max_common_lane_count(intel_dp)); 397 } 398 399 int intel_dp_max_lane_count(struct intel_dp *intel_dp) 400 { 401 int lane_count; 402 403 if (intel_dp->link.force_lane_count) 404 lane_count = forced_lane_count(intel_dp); 405 else 406 lane_count = intel_dp->link.max_lane_count; 407 408 switch (lane_count) { 409 case 1: 410 case 2: 411 case 4: 412 return lane_count; 413 default: 414 MISSING_CASE(lane_count); 415 return 1; 416 } 417 } 418 419 static int intel_dp_min_lane_count(struct intel_dp *intel_dp) 420 { 421 if (intel_dp->link.force_lane_count) 422 return forced_lane_count(intel_dp); 423 424 return 1; 425 } 426 427 int intel_dp_link_bw_overhead(int link_clock, int lane_count, int hdisplay, 428 int dsc_slice_count, int bpp_x16, unsigned long flags) 429 { 430 int overhead; 431 432 WARN_ON(flags & ~(DRM_DP_BW_OVERHEAD_MST | DRM_DP_BW_OVERHEAD_SSC_REF_CLK | 433 DRM_DP_BW_OVERHEAD_FEC)); 434 435 if (drm_dp_is_uhbr_rate(link_clock)) 436 flags |= DRM_DP_BW_OVERHEAD_UHBR; 437 438 if (dsc_slice_count) 439 flags |= DRM_DP_BW_OVERHEAD_DSC; 440 441 overhead = drm_dp_bw_overhead(lane_count, hdisplay, 442 dsc_slice_count, 443 bpp_x16, 444 flags); 445 446 /* 447 * TODO: clarify whether a minimum required by the fixed FEC overhead 448 * in the bspec audio programming sequence is required here. 449 */ 450 return max(overhead, intel_dp_bw_fec_overhead(flags & DRM_DP_BW_OVERHEAD_FEC)); 451 } 452 453 /* 454 * The required data bandwidth for a mode with given pixel clock and bpp. This 455 * is the required net bandwidth independent of the data bandwidth efficiency. 456 */ 457 int intel_dp_link_required(int link_clock, int lane_count, 458 int mode_clock, int mode_hdisplay, 459 int link_bpp_x16, unsigned long bw_overhead_flags) 460 { 461 int bw_overhead = intel_dp_link_bw_overhead(link_clock, lane_count, mode_hdisplay, 462 0, link_bpp_x16, bw_overhead_flags); 463 464 return intel_dp_effective_data_rate(mode_clock, link_bpp_x16, bw_overhead); 465 } 466 467 /** 468 * intel_dp_effective_data_rate - Return the pixel data rate accounting for BW allocation overhead 469 * @pixel_clock: pixel clock in kHz 470 * @bpp_x16: bits per pixel .4 fixed point format 471 * @bw_overhead: BW allocation overhead in 1ppm units 472 * 473 * Return the effective pixel data rate in kB/sec units taking into account 474 * the provided SSC, FEC, DSC BW allocation overhead. 475 */ 476 int intel_dp_effective_data_rate(int pixel_clock, int bpp_x16, 477 int bw_overhead) 478 { 479 return DIV_ROUND_UP_ULL(mul_u32_u32(pixel_clock * bpp_x16, bw_overhead), 480 1000000 * 16 * 8); 481 } 482 483 /** 484 * intel_dp_max_link_data_rate: Calculate the maximum rate for the given link params 485 * @intel_dp: Intel DP object 486 * @max_dprx_rate: Maximum data rate of the DPRX 487 * @max_dprx_lanes: Maximum lane count of the DPRX 488 * 489 * Calculate the maximum data rate for the provided link parameters taking into 490 * account any BW limitations by a DP tunnel attached to @intel_dp. 491 * 492 * Returns the maximum data rate in kBps units. 493 */ 494 int intel_dp_max_link_data_rate(struct intel_dp *intel_dp, 495 int max_dprx_rate, int max_dprx_lanes) 496 { 497 int max_rate = drm_dp_max_dprx_data_rate(max_dprx_rate, max_dprx_lanes); 498 499 if (intel_dp_tunnel_bw_alloc_is_enabled(intel_dp)) 500 max_rate = min(max_rate, 501 drm_dp_tunnel_available_bw(intel_dp->tunnel)); 502 503 return max_rate; 504 } 505 506 bool intel_dp_has_joiner(struct intel_dp *intel_dp) 507 { 508 struct intel_display *display = to_intel_display(intel_dp); 509 struct intel_digital_port *intel_dig_port = dp_to_dig_port(intel_dp); 510 struct intel_encoder *encoder = &intel_dig_port->base; 511 512 /* eDP MSO is not compatible with joiner */ 513 if (intel_dp->mso_link_count) 514 return false; 515 516 return DISPLAY_VER(display) >= 12 || 517 (DISPLAY_VER(display) == 11 && 518 encoder->port != PORT_A); 519 } 520 521 static int dg2_max_source_rate(struct intel_dp *intel_dp) 522 { 523 return intel_dp_is_edp(intel_dp) ? 810000 : 1350000; 524 } 525 526 static int icl_max_source_rate(struct intel_dp *intel_dp) 527 { 528 struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base; 529 530 if (intel_encoder_is_combo(encoder) && !intel_dp_is_edp(intel_dp)) 531 return 540000; 532 533 return 810000; 534 } 535 536 static int ehl_max_source_rate(struct intel_dp *intel_dp) 537 { 538 if (intel_dp_is_edp(intel_dp)) 539 return 540000; 540 541 return 810000; 542 } 543 544 static int mtl_max_source_rate(struct intel_dp *intel_dp) 545 { 546 struct intel_display *display = to_intel_display(intel_dp); 547 struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base; 548 549 if (intel_encoder_is_c10phy(encoder) || 550 display->platform.pantherlake_wildcatlake) 551 return 810000; 552 553 if (DISPLAY_VERx100(display) == 1401) 554 return 1350000; 555 556 return 2000000; 557 } 558 559 static int vbt_max_link_rate(struct intel_dp *intel_dp) 560 { 561 struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base; 562 int max_rate; 563 564 max_rate = intel_bios_dp_max_link_rate(encoder->devdata); 565 566 if (intel_dp_is_edp(intel_dp)) { 567 struct intel_connector *connector = intel_dp->attached_connector; 568 int edp_max_rate = connector->panel.vbt.edp.max_link_rate; 569 570 if (max_rate && edp_max_rate) 571 max_rate = min(max_rate, edp_max_rate); 572 else if (edp_max_rate) 573 max_rate = edp_max_rate; 574 } 575 576 return max_rate; 577 } 578 579 static void 580 intel_dp_set_source_rates(struct intel_dp *intel_dp) 581 { 582 /* The values must be in increasing order */ 583 static const int bmg_rates[] = { 584 162000, 216000, 243000, 270000, 324000, 432000, 540000, 675000, 585 810000, 1000000, 1350000, 586 }; 587 static const int mtl_rates[] = { 588 162000, 216000, 243000, 270000, 324000, 432000, 540000, 675000, 589 810000, 1000000, 2000000, 590 }; 591 static const int icl_rates[] = { 592 162000, 216000, 270000, 324000, 432000, 540000, 648000, 810000, 593 1000000, 1350000, 594 }; 595 static const int bxt_rates[] = { 596 162000, 216000, 243000, 270000, 324000, 432000, 540000 597 }; 598 static const int skl_rates[] = { 599 162000, 216000, 270000, 324000, 432000, 540000 600 }; 601 static const int hsw_rates[] = { 602 162000, 270000, 540000 603 }; 604 static const int g4x_rates[] = { 605 162000, 270000 606 }; 607 struct intel_display *display = to_intel_display(intel_dp); 608 const int *source_rates; 609 int size, max_rate = 0, vbt_max_rate; 610 611 /* This should only be done once */ 612 drm_WARN_ON(display->drm, 613 intel_dp->source_rates || intel_dp->num_source_rates); 614 615 if (DISPLAY_VER(display) >= 14) { 616 if (display->platform.battlemage) { 617 source_rates = bmg_rates; 618 size = ARRAY_SIZE(bmg_rates); 619 } else { 620 source_rates = mtl_rates; 621 size = ARRAY_SIZE(mtl_rates); 622 } 623 max_rate = mtl_max_source_rate(intel_dp); 624 } else if (DISPLAY_VER(display) >= 11) { 625 source_rates = icl_rates; 626 size = ARRAY_SIZE(icl_rates); 627 if (display->platform.dg2) 628 max_rate = dg2_max_source_rate(intel_dp); 629 else if (display->platform.alderlake_p || display->platform.alderlake_s || 630 display->platform.dg1 || display->platform.rocketlake) 631 max_rate = 810000; 632 else if (display->platform.jasperlake || display->platform.elkhartlake) 633 max_rate = ehl_max_source_rate(intel_dp); 634 else 635 max_rate = icl_max_source_rate(intel_dp); 636 } else if (display->platform.geminilake || display->platform.broxton) { 637 source_rates = bxt_rates; 638 size = ARRAY_SIZE(bxt_rates); 639 } else if (DISPLAY_VER(display) == 9) { 640 source_rates = skl_rates; 641 size = ARRAY_SIZE(skl_rates); 642 } else if ((display->platform.haswell && !display->platform.haswell_ulx) || 643 display->platform.broadwell) { 644 source_rates = hsw_rates; 645 size = ARRAY_SIZE(hsw_rates); 646 } else { 647 source_rates = g4x_rates; 648 size = ARRAY_SIZE(g4x_rates); 649 } 650 651 vbt_max_rate = vbt_max_link_rate(intel_dp); 652 if (max_rate && vbt_max_rate) 653 max_rate = min(max_rate, vbt_max_rate); 654 else if (vbt_max_rate) 655 max_rate = vbt_max_rate; 656 657 if (max_rate) 658 size = intel_dp_rate_limit_len(source_rates, size, max_rate); 659 660 intel_dp->source_rates = source_rates; 661 intel_dp->num_source_rates = size; 662 } 663 664 static int intersect_rates(const int *source_rates, int source_len, 665 const int *sink_rates, int sink_len, 666 int *common_rates) 667 { 668 int i = 0, j = 0, k = 0; 669 670 while (i < source_len && j < sink_len) { 671 if (source_rates[i] == sink_rates[j]) { 672 if (WARN_ON(k >= DP_MAX_SUPPORTED_RATES)) 673 return k; 674 common_rates[k] = source_rates[i]; 675 ++k; 676 ++i; 677 ++j; 678 } else if (source_rates[i] < sink_rates[j]) { 679 ++i; 680 } else { 681 ++j; 682 } 683 } 684 return k; 685 } 686 687 /* return index of rate in rates array, or -1 if not found */ 688 int intel_dp_rate_index(const int *rates, int len, int rate) 689 { 690 int i; 691 692 for (i = 0; i < len; i++) 693 if (rate == rates[i]) 694 return i; 695 696 return -1; 697 } 698 699 static int intel_dp_link_config_rate(struct intel_dp *intel_dp, 700 const struct intel_dp_link_config *lc) 701 { 702 return intel_dp_common_rate(intel_dp, lc->link_rate_idx); 703 } 704 705 static int intel_dp_link_config_lane_count(const struct intel_dp_link_config *lc) 706 { 707 return 1 << lc->lane_count_exp; 708 } 709 710 static int intel_dp_link_config_bw(struct intel_dp *intel_dp, 711 const struct intel_dp_link_config *lc) 712 { 713 return drm_dp_max_dprx_data_rate(intel_dp_link_config_rate(intel_dp, lc), 714 intel_dp_link_config_lane_count(lc)); 715 } 716 717 static int link_config_cmp_by_bw(const void *a, const void *b, const void *p) 718 { 719 struct intel_dp *intel_dp = (struct intel_dp *)p; /* remove const */ 720 const struct intel_dp_link_config *lc_a = a; 721 const struct intel_dp_link_config *lc_b = b; 722 int bw_a = intel_dp_link_config_bw(intel_dp, lc_a); 723 int bw_b = intel_dp_link_config_bw(intel_dp, lc_b); 724 725 if (bw_a != bw_b) 726 return bw_a - bw_b; 727 728 return intel_dp_link_config_rate(intel_dp, lc_a) - 729 intel_dp_link_config_rate(intel_dp, lc_b); 730 } 731 732 static void intel_dp_link_config_init(struct intel_dp *intel_dp) 733 { 734 struct intel_display *display = to_intel_display(intel_dp); 735 struct intel_dp_link_config *lc; 736 int num_common_lane_configs; 737 int i; 738 int j; 739 740 if (drm_WARN_ON(display->drm, !is_power_of_2(intel_dp_max_common_lane_count(intel_dp)))) 741 return; 742 743 num_common_lane_configs = ilog2(intel_dp_max_common_lane_count(intel_dp)) + 1; 744 745 if (drm_WARN_ON(display->drm, intel_dp->num_common_rates * num_common_lane_configs > 746 ARRAY_SIZE(intel_dp->link.configs))) 747 return; 748 749 intel_dp->link.num_configs = intel_dp->num_common_rates * num_common_lane_configs; 750 751 lc = &intel_dp->link.configs[0]; 752 for (i = 0; i < intel_dp->num_common_rates; i++) { 753 for (j = 0; j < num_common_lane_configs; j++) { 754 lc->lane_count_exp = j; 755 lc->link_rate_idx = i; 756 757 lc++; 758 } 759 } 760 761 sort_r(intel_dp->link.configs, intel_dp->link.num_configs, 762 sizeof(intel_dp->link.configs[0]), 763 link_config_cmp_by_bw, NULL, 764 intel_dp); 765 } 766 767 void intel_dp_link_config_get(struct intel_dp *intel_dp, int idx, int *link_rate, int *lane_count) 768 { 769 struct intel_display *display = to_intel_display(intel_dp); 770 const struct intel_dp_link_config *lc; 771 772 if (drm_WARN_ON(display->drm, idx < 0 || idx >= intel_dp->link.num_configs)) 773 idx = 0; 774 775 lc = &intel_dp->link.configs[idx]; 776 777 *link_rate = intel_dp_link_config_rate(intel_dp, lc); 778 *lane_count = intel_dp_link_config_lane_count(lc); 779 } 780 781 int intel_dp_link_config_index(struct intel_dp *intel_dp, int link_rate, int lane_count) 782 { 783 int link_rate_idx = intel_dp_rate_index(intel_dp->common_rates, intel_dp->num_common_rates, 784 link_rate); 785 int lane_count_exp = ilog2(lane_count); 786 int i; 787 788 for (i = 0; i < intel_dp->link.num_configs; i++) { 789 const struct intel_dp_link_config *lc = &intel_dp->link.configs[i]; 790 791 if (lc->lane_count_exp == lane_count_exp && 792 lc->link_rate_idx == link_rate_idx) 793 return i; 794 } 795 796 return -1; 797 } 798 799 static void intel_dp_set_common_rates(struct intel_dp *intel_dp) 800 { 801 struct intel_display *display = to_intel_display(intel_dp); 802 803 drm_WARN_ON(display->drm, 804 !intel_dp->num_source_rates || !intel_dp->num_sink_rates); 805 806 intel_dp->num_common_rates = intersect_rates(intel_dp->source_rates, 807 intel_dp->num_source_rates, 808 intel_dp->sink_rates, 809 intel_dp->num_sink_rates, 810 intel_dp->common_rates); 811 812 /* Paranoia, there should always be something in common. */ 813 if (drm_WARN_ON(display->drm, intel_dp->num_common_rates == 0)) { 814 intel_dp->common_rates[0] = 162000; 815 intel_dp->num_common_rates = 1; 816 } 817 818 intel_dp_link_config_init(intel_dp); 819 } 820 821 bool intel_dp_link_params_valid(struct intel_dp *intel_dp, int link_rate, 822 u8 lane_count) 823 { 824 /* 825 * FIXME: we need to synchronize the current link parameters with 826 * hardware readout. Currently fast link training doesn't work on 827 * boot-up. 828 */ 829 if (link_rate == 0 || 830 link_rate > intel_dp->link.max_rate) 831 return false; 832 833 if (lane_count == 0 || 834 lane_count > intel_dp_max_lane_count(intel_dp)) 835 return false; 836 837 return true; 838 } 839 840 u32 intel_dp_mode_to_fec_clock(u32 mode_clock) 841 { 842 return div_u64(mul_u32_u32(mode_clock, DP_DSC_FEC_OVERHEAD_FACTOR), 843 1000000U); 844 } 845 846 int intel_dp_bw_fec_overhead(bool fec_enabled) 847 { 848 /* 849 * TODO: Calculate the actual overhead for a given mode. 850 * The hard-coded 1/0.972261=2.853% overhead factor 851 * corresponds (for instance) to the 8b/10b DP FEC 2.4% + 852 * 0.453% DSC overhead. This is enough for a 3840 width mode, 853 * which has a DSC overhead of up to ~0.2%, but may not be 854 * enough for a 1024 width mode where this is ~0.8% (on a 4 855 * lane DP link, with 2 DSC slices and 8 bpp color depth). 856 */ 857 return fec_enabled ? DP_DSC_FEC_OVERHEAD_FACTOR : 1000000; 858 } 859 860 static int 861 small_joiner_ram_size_bits(struct intel_display *display) 862 { 863 if (DISPLAY_VER(display) >= 13) 864 return 17280 * 8; 865 else if (DISPLAY_VER(display) >= 11) 866 return 7680 * 8; 867 else 868 return 6144 * 8; 869 } 870 871 static int align_min_vesa_compressed_bpp_x16(int min_link_bpp_x16) 872 { 873 int i; 874 875 for (i = 0; i < ARRAY_SIZE(valid_dsc_bpp); i++) { 876 int vesa_bpp_x16 = fxp_q4_from_int(valid_dsc_bpp[i]); 877 878 if (vesa_bpp_x16 >= min_link_bpp_x16) 879 return vesa_bpp_x16; 880 } 881 882 return 0; 883 } 884 885 static int align_max_vesa_compressed_bpp_x16(int max_link_bpp_x16) 886 { 887 int i; 888 889 for (i = ARRAY_SIZE(valid_dsc_bpp) - 1; i >= 0; i--) { 890 int vesa_bpp_x16 = fxp_q4_from_int(valid_dsc_bpp[i]); 891 892 if (vesa_bpp_x16 <= max_link_bpp_x16) 893 return vesa_bpp_x16; 894 } 895 896 return 0; 897 } 898 899 static int bigjoiner_interface_bits(struct intel_display *display) 900 { 901 return DISPLAY_VER(display) >= 14 ? 36 : 24; 902 } 903 904 static u32 bigjoiner_bw_max_bpp(struct intel_display *display, u32 mode_clock, 905 int num_joined_pipes) 906 { 907 u32 max_bpp; 908 /* With bigjoiner multiple dsc engines are used in parallel so PPC is 2 */ 909 int ppc = 2; 910 int num_big_joiners = num_joined_pipes / 2; 911 912 max_bpp = display->cdclk.max_cdclk_freq * ppc * bigjoiner_interface_bits(display) / 913 intel_dp_mode_to_fec_clock(mode_clock); 914 915 max_bpp *= num_big_joiners; 916 917 return max_bpp; 918 919 } 920 921 static u32 small_joiner_ram_max_bpp(struct intel_display *display, 922 u32 mode_hdisplay, 923 int num_joined_pipes) 924 { 925 u32 max_bpp; 926 927 /* Small Joiner Check: output bpp <= joiner RAM (bits) / Horiz. width */ 928 max_bpp = small_joiner_ram_size_bits(display) / mode_hdisplay; 929 930 max_bpp *= num_joined_pipes; 931 932 return max_bpp; 933 } 934 935 static int ultrajoiner_ram_bits(void) 936 { 937 return 4 * 72 * 512; 938 } 939 940 static u32 ultrajoiner_ram_max_bpp(u32 mode_hdisplay) 941 { 942 return ultrajoiner_ram_bits() / mode_hdisplay; 943 } 944 945 /* TODO: return a bpp_x16 value */ 946 static 947 u32 get_max_compressed_bpp_with_joiner(struct intel_display *display, 948 u32 mode_clock, u32 mode_hdisplay, 949 int num_joined_pipes) 950 { 951 u32 max_bpp = small_joiner_ram_max_bpp(display, mode_hdisplay, num_joined_pipes); 952 953 if (num_joined_pipes > 1) 954 max_bpp = min(max_bpp, bigjoiner_bw_max_bpp(display, mode_clock, 955 num_joined_pipes)); 956 if (num_joined_pipes == 4) 957 max_bpp = min(max_bpp, ultrajoiner_ram_max_bpp(mode_hdisplay)); 958 959 return max_bpp; 960 } 961 962 u8 intel_dp_dsc_get_slice_count(const struct intel_connector *connector, 963 int mode_clock, int mode_hdisplay, 964 int num_joined_pipes) 965 { 966 struct intel_display *display = to_intel_display(connector); 967 u32 sink_slice_count_mask = 968 drm_dp_dsc_sink_slice_count_mask(connector->dp.dsc_dpcd, false); 969 u8 min_slice_count, i; 970 int max_slice_width; 971 int tp_rgb_yuv444; 972 int tp_yuv422_420; 973 974 /* 975 * TODO: Use the throughput value specific to the actual RGB/YUV 976 * format of the output. 977 * The RGB/YUV444 throughput value should be always either equal 978 * or smaller than the YUV422/420 value, but let's not depend on 979 * this assumption. 980 */ 981 if (mode_clock > max(connector->dp.dsc_branch_caps.overall_throughput.rgb_yuv444, 982 connector->dp.dsc_branch_caps.overall_throughput.yuv422_420)) 983 return 0; 984 985 if (mode_hdisplay > connector->dp.dsc_branch_caps.max_line_width) 986 return 0; 987 988 /* 989 * TODO: Pass the total pixel rate of all the streams transferred to 990 * an MST tiled display, calculate the total slice count for all tiles 991 * from this and the per-tile slice count from the total slice count. 992 */ 993 tp_rgb_yuv444 = drm_dp_dsc_sink_max_slice_throughput(connector->dp.dsc_dpcd, 994 mode_clock, true); 995 tp_yuv422_420 = drm_dp_dsc_sink_max_slice_throughput(connector->dp.dsc_dpcd, 996 mode_clock, false); 997 998 /* 999 * TODO: Use the throughput value specific to the actual RGB/YUV 1000 * format of the output. 1001 * For now use the smaller of these, which is ok, potentially 1002 * resulting in a higher than required minimum slice count. 1003 * The RGB/YUV444 throughput value should be always either equal 1004 * or smaller than the YUV422/420 value, but let's not depend on 1005 * this assumption. 1006 */ 1007 min_slice_count = DIV_ROUND_UP(mode_clock, min(tp_rgb_yuv444, tp_yuv422_420)); 1008 1009 /* 1010 * Due to some DSC engine BW limitations, we need to enable second 1011 * slice and VDSC engine, whenever we approach close enough to max CDCLK 1012 */ 1013 if (mode_clock >= ((display->cdclk.max_cdclk_freq * 85) / 100)) 1014 min_slice_count = max_t(u8, min_slice_count, 2); 1015 1016 max_slice_width = drm_dp_dsc_sink_max_slice_width(connector->dp.dsc_dpcd); 1017 if (max_slice_width < DP_DSC_MIN_SLICE_WIDTH_VALUE) { 1018 drm_dbg_kms(display->drm, 1019 "Unsupported slice width %d by DP DSC Sink device\n", 1020 max_slice_width); 1021 return 0; 1022 } 1023 /* Also take into account max slice width */ 1024 min_slice_count = max_t(u8, min_slice_count, 1025 DIV_ROUND_UP(mode_hdisplay, 1026 max_slice_width)); 1027 1028 /* Find the closest match to the valid slice count values */ 1029 for (i = 0; i < ARRAY_SIZE(valid_dsc_slicecount); i++) { 1030 u8 test_slice_count = valid_dsc_slicecount[i] * num_joined_pipes; 1031 1032 /* 1033 * 3 DSC Slices per pipe need 3 DSC engines, which is supported only 1034 * with Ultrajoiner only for some platforms. 1035 */ 1036 if (valid_dsc_slicecount[i] == 3 && 1037 (!HAS_DSC_3ENGINES(display) || num_joined_pipes != 4)) 1038 continue; 1039 1040 if (!(drm_dp_dsc_slice_count_to_mask(test_slice_count) & 1041 sink_slice_count_mask)) 1042 continue; 1043 1044 /* 1045 * Bigjoiner needs small joiner to be enabled. 1046 * So there should be at least 2 dsc slices per pipe, 1047 * whenever bigjoiner is enabled. 1048 */ 1049 if (num_joined_pipes > 1 && valid_dsc_slicecount[i] < 2) 1050 continue; 1051 1052 if (mode_hdisplay % test_slice_count) 1053 continue; 1054 1055 if (min_slice_count <= test_slice_count) 1056 return test_slice_count; 1057 } 1058 1059 /* Print slice count 1,2,4,..24 if bit#0,1,3,..23 is set in the mask. */ 1060 sink_slice_count_mask <<= 1; 1061 drm_dbg_kms(display->drm, 1062 "[CONNECTOR:%d:%s] Unsupported slice count (min: %d, sink supported: %*pbl)\n", 1063 connector->base.base.id, connector->base.name, 1064 min_slice_count, 1065 (int)BITS_PER_TYPE(sink_slice_count_mask), &sink_slice_count_mask); 1066 1067 return 0; 1068 } 1069 1070 static bool source_can_output(struct intel_dp *intel_dp, 1071 enum intel_output_format format) 1072 { 1073 struct intel_display *display = to_intel_display(intel_dp); 1074 1075 switch (format) { 1076 case INTEL_OUTPUT_FORMAT_RGB: 1077 return true; 1078 1079 case INTEL_OUTPUT_FORMAT_YCBCR444: 1080 /* 1081 * No YCbCr output support on gmch platforms. 1082 * Also, ILK doesn't seem capable of DP YCbCr output. 1083 * The displayed image is severely corrupted. SNB+ is fine. 1084 */ 1085 return !HAS_GMCH(display) && !display->platform.ironlake; 1086 1087 case INTEL_OUTPUT_FORMAT_YCBCR420: 1088 /* Platform < Gen 11 cannot output YCbCr420 format */ 1089 return DISPLAY_VER(display) >= 11; 1090 1091 default: 1092 MISSING_CASE(format); 1093 return false; 1094 } 1095 } 1096 1097 static bool 1098 dfp_can_convert_from_rgb(struct intel_dp *intel_dp, 1099 enum intel_output_format sink_format) 1100 { 1101 if (!drm_dp_is_branch(intel_dp->dpcd)) 1102 return false; 1103 1104 if (sink_format == INTEL_OUTPUT_FORMAT_YCBCR444) 1105 return intel_dp->dfp.rgb_to_ycbcr; 1106 1107 if (sink_format == INTEL_OUTPUT_FORMAT_YCBCR420) 1108 return intel_dp->dfp.rgb_to_ycbcr && 1109 intel_dp->dfp.ycbcr_444_to_420; 1110 1111 return false; 1112 } 1113 1114 static bool 1115 dfp_can_convert_from_ycbcr444(struct intel_dp *intel_dp, 1116 enum intel_output_format sink_format) 1117 { 1118 if (!drm_dp_is_branch(intel_dp->dpcd)) 1119 return false; 1120 1121 if (sink_format == INTEL_OUTPUT_FORMAT_YCBCR420) 1122 return intel_dp->dfp.ycbcr_444_to_420; 1123 1124 return false; 1125 } 1126 1127 static bool 1128 dfp_can_convert(struct intel_dp *intel_dp, 1129 enum intel_output_format output_format, 1130 enum intel_output_format sink_format) 1131 { 1132 switch (output_format) { 1133 case INTEL_OUTPUT_FORMAT_RGB: 1134 return dfp_can_convert_from_rgb(intel_dp, sink_format); 1135 case INTEL_OUTPUT_FORMAT_YCBCR444: 1136 return dfp_can_convert_from_ycbcr444(intel_dp, sink_format); 1137 default: 1138 MISSING_CASE(output_format); 1139 return false; 1140 } 1141 1142 return false; 1143 } 1144 1145 static enum intel_output_format 1146 intel_dp_output_format(struct intel_connector *connector, 1147 enum intel_output_format sink_format) 1148 { 1149 struct intel_display *display = to_intel_display(connector); 1150 struct intel_dp *intel_dp = intel_attached_dp(connector); 1151 enum intel_output_format force_dsc_output_format = 1152 intel_dp->force_dsc_output_format; 1153 enum intel_output_format output_format; 1154 if (force_dsc_output_format) { 1155 if (source_can_output(intel_dp, force_dsc_output_format) && 1156 (!drm_dp_is_branch(intel_dp->dpcd) || 1157 sink_format != force_dsc_output_format || 1158 dfp_can_convert(intel_dp, force_dsc_output_format, sink_format))) 1159 return force_dsc_output_format; 1160 1161 drm_dbg_kms(display->drm, "Cannot force DSC output format\n"); 1162 } 1163 1164 if (sink_format == INTEL_OUTPUT_FORMAT_RGB || 1165 dfp_can_convert_from_rgb(intel_dp, sink_format)) 1166 output_format = INTEL_OUTPUT_FORMAT_RGB; 1167 1168 else if (sink_format == INTEL_OUTPUT_FORMAT_YCBCR444 || 1169 dfp_can_convert_from_ycbcr444(intel_dp, sink_format)) 1170 output_format = INTEL_OUTPUT_FORMAT_YCBCR444; 1171 1172 else 1173 output_format = INTEL_OUTPUT_FORMAT_YCBCR420; 1174 1175 drm_WARN_ON(display->drm, !source_can_output(intel_dp, output_format)); 1176 1177 return output_format; 1178 } 1179 1180 int intel_dp_min_bpp(enum intel_output_format output_format) 1181 { 1182 if (output_format == INTEL_OUTPUT_FORMAT_RGB) 1183 return intel_display_min_pipe_bpp(); 1184 else 1185 return 8 * 3; 1186 } 1187 1188 int intel_dp_output_format_link_bpp_x16(enum intel_output_format output_format, int pipe_bpp) 1189 { 1190 /* 1191 * bpp value was assumed to RGB format. And YCbCr 4:2:0 output 1192 * format of the number of bytes per pixel will be half the number 1193 * of bytes of RGB pixel. 1194 */ 1195 if (output_format == INTEL_OUTPUT_FORMAT_YCBCR420) 1196 pipe_bpp /= 2; 1197 1198 return fxp_q4_from_int(pipe_bpp); 1199 } 1200 1201 static enum intel_output_format 1202 intel_dp_sink_format(struct intel_connector *connector, 1203 const struct drm_display_mode *mode) 1204 { 1205 const struct drm_display_info *info = &connector->base.display_info; 1206 1207 if (drm_mode_is_420_only(info, mode)) 1208 return INTEL_OUTPUT_FORMAT_YCBCR420; 1209 1210 return INTEL_OUTPUT_FORMAT_RGB; 1211 } 1212 1213 static int 1214 intel_dp_mode_min_link_bpp_x16(struct intel_connector *connector, 1215 const struct drm_display_mode *mode) 1216 { 1217 enum intel_output_format output_format, sink_format; 1218 1219 sink_format = intel_dp_sink_format(connector, mode); 1220 1221 output_format = intel_dp_output_format(connector, sink_format); 1222 1223 return intel_dp_output_format_link_bpp_x16(output_format, 1224 intel_dp_min_bpp(output_format)); 1225 } 1226 1227 static bool intel_dp_hdisplay_bad(struct intel_display *display, 1228 int hdisplay) 1229 { 1230 /* 1231 * Older platforms don't like hdisplay==4096 with DP. 1232 * 1233 * On ILK/SNB/IVB the pipe seems to be somewhat running (scanline 1234 * and frame counter increment), but we don't get vblank interrupts, 1235 * and the pipe underruns immediately. The link also doesn't seem 1236 * to get trained properly. 1237 * 1238 * On CHV the vblank interrupts don't seem to disappear but 1239 * otherwise the symptoms are similar. 1240 * 1241 * TODO: confirm the behaviour on HSW+ 1242 */ 1243 return hdisplay == 4096 && !HAS_DDI(display); 1244 } 1245 1246 static int intel_dp_max_tmds_clock(struct intel_dp *intel_dp) 1247 { 1248 struct intel_connector *connector = intel_dp->attached_connector; 1249 const struct drm_display_info *info = &connector->base.display_info; 1250 int max_tmds_clock = intel_dp->dfp.max_tmds_clock; 1251 1252 /* Only consider the sink's max TMDS clock if we know this is a HDMI DFP */ 1253 if (max_tmds_clock && info->max_tmds_clock) 1254 max_tmds_clock = min(max_tmds_clock, info->max_tmds_clock); 1255 1256 return max_tmds_clock; 1257 } 1258 1259 static enum drm_mode_status 1260 intel_dp_tmds_clock_valid(struct intel_dp *intel_dp, 1261 int clock, int bpc, 1262 enum intel_output_format sink_format, 1263 bool respect_downstream_limits) 1264 { 1265 int tmds_clock, min_tmds_clock, max_tmds_clock; 1266 1267 if (!respect_downstream_limits) 1268 return MODE_OK; 1269 1270 tmds_clock = intel_hdmi_tmds_clock(clock, bpc, sink_format); 1271 1272 min_tmds_clock = intel_dp->dfp.min_tmds_clock; 1273 max_tmds_clock = intel_dp_max_tmds_clock(intel_dp); 1274 1275 if (min_tmds_clock && tmds_clock < min_tmds_clock) 1276 return MODE_CLOCK_LOW; 1277 1278 if (max_tmds_clock && tmds_clock > max_tmds_clock) 1279 return MODE_CLOCK_HIGH; 1280 1281 return MODE_OK; 1282 } 1283 1284 static enum drm_mode_status 1285 intel_dp_mode_valid_downstream(struct intel_connector *connector, 1286 const struct drm_display_mode *mode, 1287 int target_clock) 1288 { 1289 struct intel_dp *intel_dp = intel_attached_dp(connector); 1290 const struct drm_display_info *info = &connector->base.display_info; 1291 enum drm_mode_status status; 1292 enum intel_output_format sink_format; 1293 1294 /* If PCON supports FRL MODE, check FRL bandwidth constraints */ 1295 if (intel_dp->dfp.pcon_max_frl_bw) { 1296 int link_bpp_x16 = intel_dp_mode_min_link_bpp_x16(connector, mode); 1297 int target_bw; 1298 int max_frl_bw; 1299 1300 target_bw = fxp_q4_to_int_roundup(link_bpp_x16) * target_clock; 1301 1302 max_frl_bw = intel_dp->dfp.pcon_max_frl_bw; 1303 1304 /* converting bw from Gbps to Kbps*/ 1305 max_frl_bw = max_frl_bw * 1000000; 1306 1307 if (target_bw > max_frl_bw) 1308 return MODE_CLOCK_HIGH; 1309 1310 return MODE_OK; 1311 } 1312 1313 if (intel_dp->dfp.max_dotclock && 1314 target_clock > intel_dp->dfp.max_dotclock) 1315 return MODE_CLOCK_HIGH; 1316 1317 sink_format = intel_dp_sink_format(connector, mode); 1318 1319 /* Assume 8bpc for the DP++/HDMI/DVI TMDS clock check */ 1320 status = intel_dp_tmds_clock_valid(intel_dp, target_clock, 1321 8, sink_format, true); 1322 1323 if (status != MODE_OK) { 1324 if (sink_format == INTEL_OUTPUT_FORMAT_YCBCR420 || 1325 !connector->base.ycbcr_420_allowed || 1326 !drm_mode_is_420_also(info, mode)) 1327 return status; 1328 sink_format = INTEL_OUTPUT_FORMAT_YCBCR420; 1329 status = intel_dp_tmds_clock_valid(intel_dp, target_clock, 1330 8, sink_format, true); 1331 if (status != MODE_OK) 1332 return status; 1333 } 1334 1335 return MODE_OK; 1336 } 1337 1338 static 1339 bool intel_dp_needs_joiner(struct intel_dp *intel_dp, 1340 struct intel_connector *connector, 1341 int hdisplay, int clock, 1342 int num_joined_pipes) 1343 { 1344 struct intel_display *display = to_intel_display(intel_dp); 1345 int hdisplay_limit; 1346 1347 if (!intel_dp_has_joiner(intel_dp)) 1348 return false; 1349 1350 num_joined_pipes /= 2; 1351 1352 hdisplay_limit = DISPLAY_VER(display) >= 30 ? 6144 : 5120; 1353 1354 return clock > num_joined_pipes * display->cdclk.max_dotclk_freq || 1355 hdisplay > num_joined_pipes * hdisplay_limit; 1356 } 1357 1358 int intel_dp_num_joined_pipes(struct intel_dp *intel_dp, 1359 struct intel_connector *connector, 1360 int hdisplay, int clock) 1361 { 1362 struct intel_display *display = to_intel_display(intel_dp); 1363 1364 if (connector->force_joined_pipes) 1365 return connector->force_joined_pipes; 1366 1367 if (HAS_ULTRAJOINER(display) && 1368 intel_dp_needs_joiner(intel_dp, connector, hdisplay, clock, 4)) 1369 return 4; 1370 1371 if ((HAS_BIGJOINER(display) || HAS_UNCOMPRESSED_JOINER(display)) && 1372 intel_dp_needs_joiner(intel_dp, connector, hdisplay, clock, 2)) 1373 return 2; 1374 1375 return 1; 1376 } 1377 1378 bool intel_dp_has_dsc(const struct intel_connector *connector) 1379 { 1380 struct intel_display *display = to_intel_display(connector); 1381 1382 if (!HAS_DSC(display)) 1383 return false; 1384 1385 if (connector->mst.dp && !HAS_DSC_MST(display)) 1386 return false; 1387 1388 if (connector->base.connector_type == DRM_MODE_CONNECTOR_eDP && 1389 connector->panel.vbt.edp.dsc_disable) 1390 return false; 1391 1392 if (!drm_dp_sink_supports_dsc(connector->dp.dsc_dpcd)) 1393 return false; 1394 1395 return true; 1396 } 1397 1398 static enum drm_mode_status 1399 intel_dp_mode_valid(struct drm_connector *_connector, 1400 const struct drm_display_mode *mode) 1401 { 1402 struct intel_display *display = to_intel_display(_connector->dev); 1403 struct intel_connector *connector = to_intel_connector(_connector); 1404 struct intel_dp *intel_dp = intel_attached_dp(connector); 1405 enum intel_output_format sink_format, output_format; 1406 const struct drm_display_mode *fixed_mode; 1407 int target_clock = mode->clock; 1408 int max_rate, mode_rate, max_lanes, max_link_clock; 1409 int max_dotclk = display->cdclk.max_dotclk_freq; 1410 u16 dsc_max_compressed_bpp = 0; 1411 u8 dsc_slice_count = 0; 1412 enum drm_mode_status status; 1413 bool dsc = false; 1414 int num_joined_pipes; 1415 int link_bpp_x16; 1416 1417 status = intel_cpu_transcoder_mode_valid(display, mode); 1418 if (status != MODE_OK) 1419 return status; 1420 1421 if (mode->flags & DRM_MODE_FLAG_DBLCLK) 1422 return MODE_H_ILLEGAL; 1423 1424 if (mode->clock < 10000) 1425 return MODE_CLOCK_LOW; 1426 1427 fixed_mode = intel_panel_fixed_mode(connector, mode); 1428 if (intel_dp_is_edp(intel_dp) && fixed_mode) { 1429 status = intel_panel_mode_valid(connector, mode); 1430 if (status != MODE_OK) 1431 return status; 1432 1433 target_clock = fixed_mode->clock; 1434 } 1435 1436 num_joined_pipes = intel_dp_num_joined_pipes(intel_dp, connector, 1437 mode->hdisplay, target_clock); 1438 max_dotclk *= num_joined_pipes; 1439 1440 sink_format = intel_dp_sink_format(connector, mode); 1441 output_format = intel_dp_output_format(connector, sink_format); 1442 1443 status = intel_pfit_mode_valid(display, mode, output_format, num_joined_pipes); 1444 if (status != MODE_OK) 1445 return status; 1446 1447 if (target_clock > max_dotclk) 1448 return MODE_CLOCK_HIGH; 1449 1450 if (intel_dp_hdisplay_bad(display, mode->hdisplay)) 1451 return MODE_H_ILLEGAL; 1452 1453 max_link_clock = intel_dp_max_link_rate(intel_dp); 1454 max_lanes = intel_dp_max_lane_count(intel_dp); 1455 1456 max_rate = intel_dp_max_link_data_rate(intel_dp, max_link_clock, max_lanes); 1457 1458 link_bpp_x16 = intel_dp_mode_min_link_bpp_x16(connector, mode); 1459 mode_rate = intel_dp_link_required(max_link_clock, max_lanes, 1460 target_clock, mode->hdisplay, 1461 link_bpp_x16, 0); 1462 1463 if (intel_dp_has_dsc(connector)) { 1464 int pipe_bpp; 1465 1466 /* 1467 * TBD pass the connector BPC, 1468 * for now U8_MAX so that max BPC on that platform would be picked 1469 */ 1470 pipe_bpp = intel_dp_dsc_compute_max_bpp(connector, U8_MAX); 1471 1472 /* 1473 * Output bpp is stored in 6.4 format so right shift by 4 to get the 1474 * integer value since we support only integer values of bpp. 1475 */ 1476 if (intel_dp_is_edp(intel_dp)) { 1477 dsc_max_compressed_bpp = 1478 drm_edp_dsc_sink_output_bpp(connector->dp.dsc_dpcd) >> 4; 1479 dsc_slice_count = 1480 drm_dp_dsc_sink_max_slice_count(connector->dp.dsc_dpcd, 1481 true); 1482 dsc = dsc_max_compressed_bpp && dsc_slice_count; 1483 } else if (drm_dp_sink_supports_fec(connector->dp.fec_capability)) { 1484 unsigned long bw_overhead_flags = 0; 1485 1486 if (!drm_dp_is_uhbr_rate(max_link_clock)) 1487 bw_overhead_flags |= DRM_DP_BW_OVERHEAD_FEC; 1488 1489 dsc = intel_dp_mode_valid_with_dsc(connector, 1490 max_link_clock, max_lanes, 1491 target_clock, mode->hdisplay, 1492 num_joined_pipes, 1493 output_format, pipe_bpp, 1494 bw_overhead_flags); 1495 } 1496 } 1497 1498 if (intel_dp_joiner_needs_dsc(display, num_joined_pipes) && !dsc) 1499 return MODE_CLOCK_HIGH; 1500 1501 if (mode_rate > max_rate && !dsc) 1502 return MODE_CLOCK_HIGH; 1503 1504 status = intel_dp_mode_valid_downstream(connector, mode, target_clock); 1505 if (status != MODE_OK) 1506 return status; 1507 1508 return intel_mode_valid_max_plane_size(display, mode, num_joined_pipes); 1509 } 1510 1511 bool intel_dp_source_supports_tps3(struct intel_display *display) 1512 { 1513 return DISPLAY_VER(display) >= 9 || 1514 display->platform.broadwell || display->platform.haswell; 1515 } 1516 1517 bool intel_dp_source_supports_tps4(struct intel_display *display) 1518 { 1519 return DISPLAY_VER(display) >= 10; 1520 } 1521 1522 static void seq_buf_print_array(struct seq_buf *s, const int *array, int nelem) 1523 { 1524 int i; 1525 1526 for (i = 0; i < nelem; i++) 1527 seq_buf_printf(s, "%s%d", i ? ", " : "", array[i]); 1528 } 1529 1530 static void intel_dp_print_rates(struct intel_dp *intel_dp) 1531 { 1532 struct intel_display *display = to_intel_display(intel_dp); 1533 DECLARE_SEQ_BUF(s, 128); /* FIXME: too big for stack? */ 1534 1535 if (!drm_debug_enabled(DRM_UT_KMS)) 1536 return; 1537 1538 seq_buf_print_array(&s, intel_dp->source_rates, intel_dp->num_source_rates); 1539 drm_dbg_kms(display->drm, "source rates: %s\n", seq_buf_str(&s)); 1540 1541 seq_buf_clear(&s); 1542 seq_buf_print_array(&s, intel_dp->sink_rates, intel_dp->num_sink_rates); 1543 drm_dbg_kms(display->drm, "sink rates: %s\n", seq_buf_str(&s)); 1544 1545 seq_buf_clear(&s); 1546 seq_buf_print_array(&s, intel_dp->common_rates, intel_dp->num_common_rates); 1547 drm_dbg_kms(display->drm, "common rates: %s\n", seq_buf_str(&s)); 1548 } 1549 1550 static int forced_link_rate(struct intel_dp *intel_dp) 1551 { 1552 int len = intel_dp_common_len_rate_limit(intel_dp, intel_dp->link.force_rate); 1553 1554 if (len == 0) 1555 return intel_dp_common_rate(intel_dp, 0); 1556 1557 return intel_dp_common_rate(intel_dp, len - 1); 1558 } 1559 1560 int 1561 intel_dp_max_link_rate(struct intel_dp *intel_dp) 1562 { 1563 int len; 1564 1565 if (intel_dp->link.force_rate) 1566 return forced_link_rate(intel_dp); 1567 1568 len = intel_dp_common_len_rate_limit(intel_dp, intel_dp->link.max_rate); 1569 1570 return intel_dp_common_rate(intel_dp, len - 1); 1571 } 1572 1573 static int 1574 intel_dp_min_link_rate(struct intel_dp *intel_dp) 1575 { 1576 if (intel_dp->link.force_rate) 1577 return forced_link_rate(intel_dp); 1578 1579 return intel_dp_common_rate(intel_dp, 0); 1580 } 1581 1582 int intel_dp_rate_select(struct intel_dp *intel_dp, int rate) 1583 { 1584 struct intel_display *display = to_intel_display(intel_dp); 1585 int i = intel_dp_rate_index(intel_dp->sink_rates, 1586 intel_dp->num_sink_rates, rate); 1587 1588 if (drm_WARN_ON(display->drm, i < 0)) 1589 i = 0; 1590 1591 return i; 1592 } 1593 1594 void intel_dp_compute_rate(struct intel_dp *intel_dp, int port_clock, 1595 u8 *link_bw, u8 *rate_select) 1596 { 1597 struct intel_display *display = to_intel_display(intel_dp); 1598 1599 /* FIXME g4x can't generate an exact 2.7GHz with the 96MHz non-SSC refclk */ 1600 if (display->platform.g4x && port_clock == 268800) 1601 port_clock = 270000; 1602 1603 /* eDP 1.4 rate select method. */ 1604 if (intel_dp->use_rate_select) { 1605 *link_bw = 0; 1606 *rate_select = 1607 intel_dp_rate_select(intel_dp, port_clock); 1608 } else { 1609 *link_bw = drm_dp_link_rate_to_bw_code(port_clock); 1610 *rate_select = 0; 1611 } 1612 } 1613 1614 bool intel_dp_has_hdmi_sink(struct intel_dp *intel_dp) 1615 { 1616 struct intel_connector *connector = intel_dp->attached_connector; 1617 1618 return connector->base.display_info.is_hdmi; 1619 } 1620 1621 static bool intel_dp_source_supports_fec(struct intel_dp *intel_dp, 1622 const struct intel_crtc_state *pipe_config) 1623 { 1624 struct intel_display *display = to_intel_display(intel_dp); 1625 struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base; 1626 1627 if (DISPLAY_VER(display) >= 12) 1628 return true; 1629 1630 if (DISPLAY_VER(display) == 11 && encoder->port != PORT_A && 1631 !intel_crtc_has_type(pipe_config, INTEL_OUTPUT_DP_MST)) 1632 return true; 1633 1634 return false; 1635 } 1636 1637 bool intel_dp_supports_fec(struct intel_dp *intel_dp, 1638 const struct intel_connector *connector, 1639 const struct intel_crtc_state *pipe_config) 1640 { 1641 return intel_dp_source_supports_fec(intel_dp, pipe_config) && 1642 drm_dp_sink_supports_fec(connector->dp.fec_capability); 1643 } 1644 1645 bool intel_dp_supports_dsc(struct intel_dp *intel_dp, 1646 const struct intel_connector *connector, 1647 const struct intel_crtc_state *crtc_state) 1648 { 1649 if (!intel_dp_has_dsc(connector)) 1650 return false; 1651 1652 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_DP) && 1653 !intel_dp_supports_fec(intel_dp, connector, crtc_state)) 1654 return false; 1655 1656 return intel_dsc_source_support(crtc_state); 1657 } 1658 1659 static int intel_dp_hdmi_compute_bpc(struct intel_dp *intel_dp, 1660 const struct intel_crtc_state *crtc_state, 1661 int bpc, bool respect_downstream_limits) 1662 { 1663 int clock = crtc_state->hw.adjusted_mode.crtc_clock; 1664 1665 /* 1666 * Current bpc could already be below 8bpc due to 1667 * FDI bandwidth constraints or other limits. 1668 * HDMI minimum is 8bpc however. 1669 */ 1670 bpc = max(bpc, 8); 1671 1672 /* 1673 * We will never exceed downstream TMDS clock limits while 1674 * attempting deep color. If the user insists on forcing an 1675 * out of spec mode they will have to be satisfied with 8bpc. 1676 */ 1677 if (!respect_downstream_limits) 1678 bpc = 8; 1679 1680 for (; bpc >= 8; bpc -= 2) { 1681 if (intel_hdmi_bpc_possible(crtc_state, bpc, 1682 intel_dp_has_hdmi_sink(intel_dp)) && 1683 intel_dp_tmds_clock_valid(intel_dp, clock, bpc, crtc_state->sink_format, 1684 respect_downstream_limits) == MODE_OK) 1685 return bpc; 1686 } 1687 1688 return -EINVAL; 1689 } 1690 1691 static int intel_dp_max_bpp(struct intel_dp *intel_dp, 1692 const struct intel_crtc_state *crtc_state, 1693 bool respect_downstream_limits) 1694 { 1695 struct intel_display *display = to_intel_display(intel_dp); 1696 struct intel_connector *connector = intel_dp->attached_connector; 1697 int bpp, bpc; 1698 1699 bpc = crtc_state->pipe_bpp / 3; 1700 1701 if (intel_dp->dfp.max_bpc) 1702 bpc = min_t(int, bpc, intel_dp->dfp.max_bpc); 1703 1704 if (intel_dp->dfp.min_tmds_clock) { 1705 int max_hdmi_bpc; 1706 1707 max_hdmi_bpc = intel_dp_hdmi_compute_bpc(intel_dp, crtc_state, bpc, 1708 respect_downstream_limits); 1709 if (max_hdmi_bpc < 0) 1710 return 0; 1711 1712 bpc = min(bpc, max_hdmi_bpc); 1713 } 1714 1715 bpp = bpc * 3; 1716 if (intel_dp_is_edp(intel_dp)) { 1717 /* Get bpp from vbt only for panels that dont have bpp in edid */ 1718 if (connector->base.display_info.bpc == 0 && 1719 connector->panel.vbt.edp.bpp && 1720 connector->panel.vbt.edp.bpp < bpp) { 1721 drm_dbg_kms(display->drm, 1722 "clamping bpp for eDP panel to BIOS-provided %i\n", 1723 connector->panel.vbt.edp.bpp); 1724 bpp = connector->panel.vbt.edp.bpp; 1725 } 1726 } 1727 1728 return bpp; 1729 } 1730 1731 static bool has_seamless_m_n(struct intel_connector *connector) 1732 { 1733 struct intel_display *display = to_intel_display(connector); 1734 1735 /* 1736 * Seamless M/N reprogramming only implemented 1737 * for BDW+ double buffered M/N registers so far. 1738 */ 1739 return HAS_DOUBLE_BUFFERED_M_N(display) && 1740 intel_panel_drrs_type(connector) == DRRS_TYPE_SEAMLESS; 1741 } 1742 1743 static int intel_dp_mode_clock(const struct intel_crtc_state *crtc_state, 1744 const struct drm_connector_state *conn_state) 1745 { 1746 struct intel_connector *connector = to_intel_connector(conn_state->connector); 1747 const struct drm_display_mode *adjusted_mode = &crtc_state->hw.adjusted_mode; 1748 1749 /* FIXME a bit of a mess wrt clock vs. crtc_clock */ 1750 if (has_seamless_m_n(connector)) 1751 return intel_panel_highest_mode(connector, adjusted_mode)->clock; 1752 else 1753 return adjusted_mode->crtc_clock; 1754 } 1755 1756 /* Optimize link config in order: max bpp, min clock, min lanes */ 1757 static int 1758 intel_dp_compute_link_config_wide(struct intel_dp *intel_dp, 1759 struct intel_crtc_state *pipe_config, 1760 const struct drm_connector_state *conn_state, 1761 const struct link_config_limits *limits) 1762 { 1763 int bpp, i, lane_count, clock = intel_dp_mode_clock(pipe_config, conn_state); 1764 int link_rate, link_avail; 1765 1766 for (bpp = fxp_q4_to_int(limits->link.max_bpp_x16); 1767 bpp >= fxp_q4_to_int(limits->link.min_bpp_x16); 1768 bpp -= 2 * 3) { 1769 int link_bpp_x16 = 1770 intel_dp_output_format_link_bpp_x16(pipe_config->output_format, bpp); 1771 1772 for (i = 0; i < intel_dp->num_common_rates; i++) { 1773 link_rate = intel_dp_common_rate(intel_dp, i); 1774 if (link_rate < limits->min_rate || 1775 link_rate > limits->max_rate) 1776 continue; 1777 1778 for (lane_count = limits->min_lane_count; 1779 lane_count <= limits->max_lane_count; 1780 lane_count <<= 1) { 1781 const struct drm_display_mode *adjusted_mode = 1782 &pipe_config->hw.adjusted_mode; 1783 int mode_rate = 1784 intel_dp_link_required(link_rate, lane_count, 1785 clock, adjusted_mode->hdisplay, 1786 link_bpp_x16, 0); 1787 1788 link_avail = intel_dp_max_link_data_rate(intel_dp, 1789 link_rate, 1790 lane_count); 1791 1792 if (mode_rate <= link_avail) { 1793 pipe_config->lane_count = lane_count; 1794 pipe_config->pipe_bpp = bpp; 1795 pipe_config->port_clock = link_rate; 1796 1797 return 0; 1798 } 1799 } 1800 } 1801 } 1802 1803 return -EINVAL; 1804 } 1805 1806 int intel_dp_dsc_max_src_input_bpc(struct intel_display *display) 1807 { 1808 /* Max DSC Input BPC for ICL is 10 and for TGL+ is 12 */ 1809 if (DISPLAY_VER(display) >= 12) 1810 return 12; 1811 if (DISPLAY_VER(display) == 11) 1812 return 10; 1813 1814 return intel_dp_dsc_min_src_input_bpc(); 1815 } 1816 1817 static int align_min_sink_dsc_input_bpp(const struct intel_connector *connector, 1818 int min_pipe_bpp) 1819 { 1820 u8 dsc_bpc[3]; 1821 int num_bpc; 1822 int i; 1823 1824 num_bpc = drm_dp_dsc_sink_supported_input_bpcs(connector->dp.dsc_dpcd, 1825 dsc_bpc); 1826 for (i = num_bpc - 1; i >= 0; i--) { 1827 if (dsc_bpc[i] * 3 >= min_pipe_bpp) 1828 return dsc_bpc[i] * 3; 1829 } 1830 1831 return 0; 1832 } 1833 1834 static int align_max_sink_dsc_input_bpp(const struct intel_connector *connector, 1835 int max_pipe_bpp) 1836 { 1837 u8 dsc_bpc[3]; 1838 int num_bpc; 1839 int i; 1840 1841 num_bpc = drm_dp_dsc_sink_supported_input_bpcs(connector->dp.dsc_dpcd, 1842 dsc_bpc); 1843 for (i = 0; i < num_bpc; i++) { 1844 if (dsc_bpc[i] * 3 <= max_pipe_bpp) 1845 return dsc_bpc[i] * 3; 1846 } 1847 1848 return 0; 1849 } 1850 1851 int intel_dp_dsc_compute_max_bpp(const struct intel_connector *connector, 1852 u8 max_req_bpc) 1853 { 1854 struct intel_display *display = to_intel_display(connector); 1855 int dsc_max_bpc; 1856 1857 dsc_max_bpc = intel_dp_dsc_max_src_input_bpc(display); 1858 1859 if (!dsc_max_bpc) 1860 return dsc_max_bpc; 1861 1862 dsc_max_bpc = min(dsc_max_bpc, max_req_bpc); 1863 1864 return align_max_sink_dsc_input_bpp(connector, dsc_max_bpc * 3); 1865 } 1866 1867 static int intel_dp_source_dsc_version_minor(struct intel_display *display) 1868 { 1869 return DISPLAY_VER(display) >= 14 ? 2 : 1; 1870 } 1871 1872 static int intel_dp_sink_dsc_version_minor(const u8 dsc_dpcd[DP_DSC_RECEIVER_CAP_SIZE]) 1873 { 1874 return (dsc_dpcd[DP_DSC_REV - DP_DSC_SUPPORT] & DP_DSC_MINOR_MASK) >> 1875 DP_DSC_MINOR_SHIFT; 1876 } 1877 1878 static int intel_dp_get_slice_height(int vactive) 1879 { 1880 int slice_height; 1881 1882 /* 1883 * VDSC 1.2a spec in Section 3.8 Options for Slices implies that 108 1884 * lines is an optimal slice height, but any size can be used as long as 1885 * vertical active integer multiple and maximum vertical slice count 1886 * requirements are met. 1887 */ 1888 for (slice_height = 108; slice_height <= vactive; slice_height += 2) 1889 if (vactive % slice_height == 0) 1890 return slice_height; 1891 1892 /* 1893 * Highly unlikely we reach here as most of the resolutions will end up 1894 * finding appropriate slice_height in above loop but returning 1895 * slice_height as 2 here as it should work with all resolutions. 1896 */ 1897 return 2; 1898 } 1899 1900 static int intel_dp_dsc_compute_params(const struct intel_connector *connector, 1901 struct intel_crtc_state *crtc_state) 1902 { 1903 struct intel_display *display = to_intel_display(connector); 1904 struct drm_dsc_config *vdsc_cfg = &crtc_state->dsc.config; 1905 int ret; 1906 1907 /* 1908 * RC_MODEL_SIZE is currently a constant across all configurations. 1909 * 1910 * FIXME: Look into using sink defined DPCD DP_DSC_RC_BUF_BLK_SIZE and 1911 * DP_DSC_RC_BUF_SIZE for this. 1912 */ 1913 vdsc_cfg->rc_model_size = DSC_RC_MODEL_SIZE_CONST; 1914 vdsc_cfg->pic_height = crtc_state->hw.adjusted_mode.crtc_vdisplay; 1915 1916 vdsc_cfg->slice_height = intel_dp_get_slice_height(vdsc_cfg->pic_height); 1917 1918 ret = intel_dsc_compute_params(crtc_state); 1919 if (ret) 1920 return ret; 1921 1922 vdsc_cfg->dsc_version_major = 1923 (connector->dp.dsc_dpcd[DP_DSC_REV - DP_DSC_SUPPORT] & 1924 DP_DSC_MAJOR_MASK) >> DP_DSC_MAJOR_SHIFT; 1925 vdsc_cfg->dsc_version_minor = 1926 min(intel_dp_source_dsc_version_minor(display), 1927 intel_dp_sink_dsc_version_minor(connector->dp.dsc_dpcd)); 1928 if (vdsc_cfg->convert_rgb) 1929 vdsc_cfg->convert_rgb = 1930 connector->dp.dsc_dpcd[DP_DSC_DEC_COLOR_FORMAT_CAP - DP_DSC_SUPPORT] & 1931 DP_DSC_RGB; 1932 1933 vdsc_cfg->line_buf_depth = min(INTEL_DP_DSC_MAX_LINE_BUF_DEPTH, 1934 drm_dp_dsc_sink_line_buf_depth(connector->dp.dsc_dpcd)); 1935 if (!vdsc_cfg->line_buf_depth) { 1936 drm_dbg_kms(display->drm, 1937 "DSC Sink Line Buffer Depth invalid\n"); 1938 return -EINVAL; 1939 } 1940 1941 vdsc_cfg->block_pred_enable = 1942 connector->dp.dsc_dpcd[DP_DSC_BLK_PREDICTION_SUPPORT - DP_DSC_SUPPORT] & 1943 DP_DSC_BLK_PREDICTION_IS_SUPPORTED; 1944 1945 return drm_dsc_compute_rc_parameters(vdsc_cfg); 1946 } 1947 1948 static bool intel_dp_dsc_supports_format(const struct intel_connector *connector, 1949 enum intel_output_format output_format) 1950 { 1951 struct intel_display *display = to_intel_display(connector); 1952 u8 sink_dsc_format; 1953 1954 switch (output_format) { 1955 case INTEL_OUTPUT_FORMAT_RGB: 1956 sink_dsc_format = DP_DSC_RGB; 1957 break; 1958 case INTEL_OUTPUT_FORMAT_YCBCR444: 1959 sink_dsc_format = DP_DSC_YCbCr444; 1960 break; 1961 case INTEL_OUTPUT_FORMAT_YCBCR420: 1962 if (min(intel_dp_source_dsc_version_minor(display), 1963 intel_dp_sink_dsc_version_minor(connector->dp.dsc_dpcd)) < 2) 1964 return false; 1965 sink_dsc_format = DP_DSC_YCbCr420_Native; 1966 break; 1967 default: 1968 return false; 1969 } 1970 1971 return drm_dp_dsc_sink_supports_format(connector->dp.dsc_dpcd, sink_dsc_format); 1972 } 1973 1974 static bool is_bw_sufficient_for_dsc_config(struct intel_dp *intel_dp, 1975 int link_clock, int lane_count, 1976 int mode_clock, int mode_hdisplay, 1977 int dsc_slice_count, int link_bpp_x16, 1978 unsigned long bw_overhead_flags) 1979 { 1980 int available_bw; 1981 int required_bw; 1982 1983 available_bw = intel_dp_max_link_data_rate(intel_dp, link_clock, lane_count); 1984 required_bw = intel_dp_link_required(link_clock, lane_count, 1985 mode_clock, mode_hdisplay, 1986 link_bpp_x16, bw_overhead_flags); 1987 1988 return available_bw >= required_bw; 1989 } 1990 1991 static int dsc_compute_link_config(struct intel_dp *intel_dp, 1992 struct intel_crtc_state *pipe_config, 1993 struct drm_connector_state *conn_state, 1994 const struct link_config_limits *limits, 1995 int dsc_bpp_x16) 1996 { 1997 const struct drm_display_mode *adjusted_mode = &pipe_config->hw.adjusted_mode; 1998 int link_rate, lane_count; 1999 int i; 2000 2001 for (i = 0; i < intel_dp->num_common_rates; i++) { 2002 link_rate = intel_dp_common_rate(intel_dp, i); 2003 if (link_rate < limits->min_rate || link_rate > limits->max_rate) 2004 continue; 2005 2006 for (lane_count = limits->min_lane_count; 2007 lane_count <= limits->max_lane_count; 2008 lane_count <<= 1) { 2009 2010 /* 2011 * FIXME: intel_dp_mtp_tu_compute_config() requires 2012 * ->lane_count and ->port_clock set before we know 2013 * they'll work. If we end up failing altogether, 2014 * they'll remain in crtc state. This shouldn't matter, 2015 * as we'd then bail out from compute config, but it's 2016 * just ugly. 2017 */ 2018 pipe_config->lane_count = lane_count; 2019 pipe_config->port_clock = link_rate; 2020 2021 if (drm_dp_is_uhbr_rate(link_rate)) { 2022 int ret; 2023 2024 ret = intel_dp_mtp_tu_compute_config(intel_dp, 2025 pipe_config, 2026 conn_state, 2027 dsc_bpp_x16, 2028 dsc_bpp_x16, 2029 0, true); 2030 if (ret) 2031 continue; 2032 } else { 2033 unsigned long bw_overhead_flags = 2034 pipe_config->fec_enable ? DRM_DP_BW_OVERHEAD_FEC : 0; 2035 2036 if (!is_bw_sufficient_for_dsc_config(intel_dp, 2037 link_rate, lane_count, 2038 adjusted_mode->crtc_clock, 2039 adjusted_mode->hdisplay, 2040 pipe_config->dsc.slice_count, 2041 dsc_bpp_x16, 2042 bw_overhead_flags)) 2043 continue; 2044 } 2045 2046 return 0; 2047 } 2048 } 2049 2050 return -EINVAL; 2051 } 2052 2053 static 2054 u16 intel_dp_dsc_max_sink_compressed_bppx16(const struct intel_connector *connector, 2055 enum intel_output_format output_format, 2056 int bpc) 2057 { 2058 u16 max_bppx16 = drm_edp_dsc_sink_output_bpp(connector->dp.dsc_dpcd); 2059 2060 if (max_bppx16) 2061 return max_bppx16; 2062 /* 2063 * If support not given in DPCD 67h, 68h use the Maximum Allowed bit rate 2064 * values as given in spec Table 2-157 DP v2.0 2065 */ 2066 switch (output_format) { 2067 case INTEL_OUTPUT_FORMAT_RGB: 2068 case INTEL_OUTPUT_FORMAT_YCBCR444: 2069 return (3 * bpc) << 4; 2070 case INTEL_OUTPUT_FORMAT_YCBCR420: 2071 return (3 * (bpc / 2)) << 4; 2072 default: 2073 MISSING_CASE(output_format); 2074 break; 2075 } 2076 2077 return 0; 2078 } 2079 2080 static int intel_dp_dsc_sink_min_compressed_bpp(enum intel_output_format output_format) 2081 { 2082 /* From Mandatory bit rate range Support Table 2-157 (DP v2.0) */ 2083 switch (output_format) { 2084 case INTEL_OUTPUT_FORMAT_RGB: 2085 case INTEL_OUTPUT_FORMAT_YCBCR444: 2086 return 8; 2087 case INTEL_OUTPUT_FORMAT_YCBCR420: 2088 return 6; 2089 default: 2090 MISSING_CASE(output_format); 2091 break; 2092 } 2093 2094 return 0; 2095 } 2096 2097 static int intel_dp_dsc_sink_max_compressed_bpp(const struct intel_connector *connector, 2098 enum intel_output_format output_format, 2099 int bpc) 2100 { 2101 return intel_dp_dsc_max_sink_compressed_bppx16(connector, 2102 output_format, bpc) >> 4; 2103 } 2104 2105 int intel_dp_dsc_min_src_compressed_bpp(void) 2106 { 2107 /* Min Compressed bpp supported by source is 8 */ 2108 return 8; 2109 } 2110 2111 static int dsc_src_max_compressed_bpp(struct intel_dp *intel_dp) 2112 { 2113 struct intel_display *display = to_intel_display(intel_dp); 2114 2115 /* 2116 * Forcing DSC and using the platform's max compressed bpp is seen to cause 2117 * underruns. Since DSC isn't needed in these cases, limit the 2118 * max compressed bpp to 18, which is a safe value across platforms with different 2119 * pipe bpps. 2120 */ 2121 if (intel_dp->force_dsc_en) 2122 return 18; 2123 2124 /* 2125 * Max Compressed bpp for Gen 13+ is 27bpp. 2126 * For earlier platform is 23bpp. (Bspec:49259). 2127 */ 2128 if (DISPLAY_VER(display) < 13) 2129 return 23; 2130 else 2131 return 27; 2132 } 2133 2134 /* 2135 * Note: for pre-13 display you still need to check the validity of each step. 2136 */ 2137 int intel_dp_dsc_bpp_step_x16(const struct intel_connector *connector) 2138 { 2139 struct intel_display *display = to_intel_display(connector); 2140 u8 incr = drm_dp_dsc_sink_bpp_incr(connector->dp.dsc_dpcd); 2141 2142 if (DISPLAY_VER(display) < 14 || !incr) 2143 return fxp_q4_from_int(1); 2144 2145 if (connector->mst.dp && 2146 !connector->link.force_bpp_x16 && !connector->mst.dp->force_dsc_fractional_bpp_en) 2147 return fxp_q4_from_int(1); 2148 2149 /* fxp q4 */ 2150 return fxp_q4_from_int(1) / incr; 2151 } 2152 2153 /* 2154 * Note: for bpp_x16 to be valid it must be also within the source/sink's 2155 * min..max bpp capability range. 2156 */ 2157 bool intel_dp_dsc_valid_compressed_bpp(struct intel_dp *intel_dp, int bpp_x16) 2158 { 2159 struct intel_display *display = to_intel_display(intel_dp); 2160 2161 if (DISPLAY_VER(display) >= 13) { 2162 if (intel_dp->force_dsc_fractional_bpp_en && !fxp_q4_to_frac(bpp_x16)) 2163 return false; 2164 2165 return true; 2166 } 2167 2168 if (fxp_q4_to_frac(bpp_x16)) 2169 return false; 2170 2171 return align_max_vesa_compressed_bpp_x16(bpp_x16) == bpp_x16; 2172 } 2173 2174 static int align_min_compressed_bpp_x16(const struct intel_connector *connector, int min_bpp_x16) 2175 { 2176 struct intel_display *display = to_intel_display(connector); 2177 2178 if (DISPLAY_VER(display) >= 13) { 2179 int bpp_step_x16 = intel_dp_dsc_bpp_step_x16(connector); 2180 2181 drm_WARN_ON(display->drm, !is_power_of_2(bpp_step_x16)); 2182 2183 return round_up(min_bpp_x16, bpp_step_x16); 2184 } else { 2185 return align_min_vesa_compressed_bpp_x16(min_bpp_x16); 2186 } 2187 } 2188 2189 static int align_max_compressed_bpp_x16(const struct intel_connector *connector, 2190 enum intel_output_format output_format, 2191 int pipe_bpp, int max_bpp_x16) 2192 { 2193 struct intel_display *display = to_intel_display(connector); 2194 int link_bpp_x16 = intel_dp_output_format_link_bpp_x16(output_format, pipe_bpp); 2195 int bpp_step_x16 = intel_dp_dsc_bpp_step_x16(connector); 2196 2197 max_bpp_x16 = min(max_bpp_x16, link_bpp_x16 - bpp_step_x16); 2198 2199 if (DISPLAY_VER(display) >= 13) { 2200 drm_WARN_ON(display->drm, !is_power_of_2(bpp_step_x16)); 2201 2202 return round_down(max_bpp_x16, bpp_step_x16); 2203 } else { 2204 return align_max_vesa_compressed_bpp_x16(max_bpp_x16); 2205 } 2206 } 2207 2208 /* 2209 * Find the max compressed BPP we can find a link configuration for. The BPPs to 2210 * try depend on the source (platform) and sink. 2211 */ 2212 static int dsc_compute_compressed_bpp(struct intel_dp *intel_dp, 2213 struct intel_crtc_state *pipe_config, 2214 struct drm_connector_state *conn_state, 2215 const struct link_config_limits *limits, 2216 int pipe_bpp) 2217 { 2218 struct intel_display *display = to_intel_display(intel_dp); 2219 const struct intel_connector *connector = to_intel_connector(conn_state->connector); 2220 int min_bpp_x16, max_bpp_x16, bpp_step_x16; 2221 int bpp_x16; 2222 int ret; 2223 2224 min_bpp_x16 = limits->link.min_bpp_x16; 2225 max_bpp_x16 = limits->link.max_bpp_x16; 2226 bpp_step_x16 = intel_dp_dsc_bpp_step_x16(connector); 2227 2228 max_bpp_x16 = align_max_compressed_bpp_x16(connector, pipe_config->output_format, 2229 pipe_bpp, max_bpp_x16); 2230 if (intel_dp_is_edp(intel_dp)) { 2231 pipe_config->port_clock = limits->max_rate; 2232 pipe_config->lane_count = limits->max_lane_count; 2233 2234 pipe_config->dsc.compressed_bpp_x16 = max_bpp_x16; 2235 2236 return 0; 2237 } 2238 2239 for (bpp_x16 = max_bpp_x16; bpp_x16 >= min_bpp_x16; bpp_x16 -= bpp_step_x16) { 2240 if (!intel_dp_dsc_valid_compressed_bpp(intel_dp, bpp_x16)) 2241 continue; 2242 2243 ret = dsc_compute_link_config(intel_dp, 2244 pipe_config, 2245 conn_state, 2246 limits, 2247 bpp_x16); 2248 if (ret == 0) { 2249 pipe_config->dsc.compressed_bpp_x16 = bpp_x16; 2250 if (intel_dp->force_dsc_fractional_bpp_en && 2251 fxp_q4_to_frac(bpp_x16)) 2252 drm_dbg_kms(display->drm, 2253 "Forcing DSC fractional bpp\n"); 2254 2255 return 0; 2256 } 2257 } 2258 2259 return -EINVAL; 2260 } 2261 2262 int intel_dp_dsc_min_src_input_bpc(void) 2263 { 2264 /* Min DSC Input BPC for ICL+ is 8 */ 2265 return 8; 2266 } 2267 2268 static 2269 bool is_dsc_pipe_bpp_sufficient(const struct link_config_limits *limits, 2270 int pipe_bpp) 2271 { 2272 return pipe_bpp >= limits->pipe.min_bpp && 2273 pipe_bpp <= limits->pipe.max_bpp; 2274 } 2275 2276 static 2277 int intel_dp_force_dsc_pipe_bpp(struct intel_dp *intel_dp, 2278 const struct link_config_limits *limits) 2279 { 2280 struct intel_display *display = to_intel_display(intel_dp); 2281 int forced_bpp; 2282 2283 if (!intel_dp->force_dsc_bpc) 2284 return 0; 2285 2286 forced_bpp = intel_dp->force_dsc_bpc * 3; 2287 2288 if (is_dsc_pipe_bpp_sufficient(limits, forced_bpp)) { 2289 drm_dbg_kms(display->drm, "Input DSC BPC forced to %d\n", 2290 intel_dp->force_dsc_bpc); 2291 return forced_bpp; 2292 } 2293 2294 drm_dbg_kms(display->drm, 2295 "Cannot force DSC BPC:%d, due to DSC BPC limits\n", 2296 intel_dp->force_dsc_bpc); 2297 2298 return 0; 2299 } 2300 2301 static int intel_dp_dsc_compute_pipe_bpp(struct intel_dp *intel_dp, 2302 struct intel_crtc_state *pipe_config, 2303 struct drm_connector_state *conn_state, 2304 const struct link_config_limits *limits) 2305 { 2306 int forced_bpp, pipe_bpp; 2307 int ret; 2308 2309 forced_bpp = intel_dp_force_dsc_pipe_bpp(intel_dp, limits); 2310 if (forced_bpp) 2311 pipe_bpp = forced_bpp; 2312 else 2313 pipe_bpp = limits->pipe.max_bpp; 2314 2315 ret = dsc_compute_compressed_bpp(intel_dp, pipe_config, conn_state, 2316 limits, pipe_bpp); 2317 if (ret) 2318 return -EINVAL; 2319 2320 pipe_config->pipe_bpp = pipe_bpp; 2321 2322 return 0; 2323 } 2324 2325 /* 2326 * Return whether FEC must be enabled for 8b10b SST or MST links. On 128b132b 2327 * links FEC is always enabled implicitly by the HW, so this function returns 2328 * false for that case. 2329 */ 2330 bool intel_dp_needs_8b10b_fec(const struct intel_crtc_state *crtc_state, 2331 bool dsc_enabled_on_crtc) 2332 { 2333 if (intel_dp_is_uhbr(crtc_state)) 2334 return false; 2335 2336 /* 2337 * Though eDP v1.5 supports FEC with DSC, unlike DP, it is optional. 2338 * Since, FEC is a bandwidth overhead, continue to not enable it for 2339 * eDP. Until, there is a good reason to do so. 2340 */ 2341 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_EDP)) 2342 return false; 2343 2344 return dsc_enabled_on_crtc || intel_dsc_enabled_on_link(crtc_state); 2345 } 2346 2347 int intel_dp_dsc_compute_config(struct intel_dp *intel_dp, 2348 struct intel_crtc_state *pipe_config, 2349 struct drm_connector_state *conn_state, 2350 const struct link_config_limits *limits, 2351 int timeslots) 2352 { 2353 struct intel_display *display = to_intel_display(intel_dp); 2354 const struct intel_connector *connector = 2355 to_intel_connector(conn_state->connector); 2356 const struct drm_display_mode *adjusted_mode = 2357 &pipe_config->hw.adjusted_mode; 2358 int num_joined_pipes = intel_crtc_num_joined_pipes(pipe_config); 2359 bool is_mst = intel_crtc_has_type(pipe_config, INTEL_OUTPUT_DP_MST); 2360 int ret; 2361 2362 /* 2363 * FIXME: set the FEC enabled state once pipe_config->port_clock is 2364 * already known, so the UHBR/non-UHBR mode can be determined. 2365 */ 2366 pipe_config->fec_enable = intel_dp_needs_8b10b_fec(pipe_config, true); 2367 2368 if (!intel_dp_dsc_supports_format(connector, pipe_config->output_format)) 2369 return -EINVAL; 2370 2371 /* 2372 * Link parameters, pipe bpp and compressed bpp have already been 2373 * figured out for DP MST DSC. 2374 */ 2375 if (!is_mst) { 2376 ret = intel_dp_dsc_compute_pipe_bpp(intel_dp, pipe_config, 2377 conn_state, limits); 2378 if (ret) { 2379 drm_dbg_kms(display->drm, 2380 "No Valid pipe bpp for given mode ret = %d\n", ret); 2381 return ret; 2382 } 2383 } 2384 2385 /* Calculate Slice count */ 2386 if (intel_dp_is_edp(intel_dp)) { 2387 pipe_config->dsc.slice_count = 2388 drm_dp_dsc_sink_max_slice_count(connector->dp.dsc_dpcd, 2389 true); 2390 if (!pipe_config->dsc.slice_count) { 2391 drm_dbg_kms(display->drm, 2392 "Unsupported Slice Count %d\n", 2393 pipe_config->dsc.slice_count); 2394 return -EINVAL; 2395 } 2396 } else { 2397 u8 dsc_dp_slice_count; 2398 2399 dsc_dp_slice_count = 2400 intel_dp_dsc_get_slice_count(connector, 2401 adjusted_mode->crtc_clock, 2402 adjusted_mode->crtc_hdisplay, 2403 num_joined_pipes); 2404 if (!dsc_dp_slice_count) { 2405 drm_dbg_kms(display->drm, 2406 "Compressed Slice Count not supported\n"); 2407 return -EINVAL; 2408 } 2409 2410 pipe_config->dsc.slice_count = dsc_dp_slice_count; 2411 } 2412 /* 2413 * VDSC engine operates at 1 Pixel per clock, so if peak pixel rate 2414 * is greater than the maximum Cdclock and if slice count is even 2415 * then we need to use 2 VDSC instances. 2416 * In case of Ultrajoiner along with 12 slices we need to use 3 2417 * VDSC instances. 2418 */ 2419 if (pipe_config->joiner_pipes && num_joined_pipes == 4 && 2420 pipe_config->dsc.slice_count == 12) 2421 pipe_config->dsc.num_streams = 3; 2422 else if (pipe_config->joiner_pipes || pipe_config->dsc.slice_count > 1) 2423 pipe_config->dsc.num_streams = 2; 2424 else 2425 pipe_config->dsc.num_streams = 1; 2426 2427 ret = intel_dp_dsc_compute_params(connector, pipe_config); 2428 if (ret < 0) { 2429 drm_dbg_kms(display->drm, 2430 "Cannot compute valid DSC parameters for Input Bpp = %d" 2431 "Compressed BPP = " FXP_Q4_FMT "\n", 2432 pipe_config->pipe_bpp, 2433 FXP_Q4_ARGS(pipe_config->dsc.compressed_bpp_x16)); 2434 return ret; 2435 } 2436 2437 intel_dsc_enable_on_crtc(pipe_config); 2438 2439 drm_dbg_kms(display->drm, "DP DSC computed with Input Bpp = %d " 2440 "Compressed Bpp = " FXP_Q4_FMT " Slice Count = %d\n", 2441 pipe_config->pipe_bpp, 2442 FXP_Q4_ARGS(pipe_config->dsc.compressed_bpp_x16), 2443 pipe_config->dsc.slice_count); 2444 2445 return 0; 2446 } 2447 2448 static int 2449 dsc_throughput_quirk_max_bpp_x16(const struct intel_connector *connector, 2450 int mode_clock) 2451 { 2452 if (!connector->dp.dsc_throughput_quirk) 2453 return INT_MAX; 2454 2455 /* 2456 * Synaptics Panamera branch devices have a problem decompressing a 2457 * stream with a compressed link-bpp higher than 12, if the pixel 2458 * clock is higher than ~50 % of the maximum overall throughput 2459 * reported by the branch device. Work around this by limiting the 2460 * maximum link bpp for such pixel clocks. 2461 * 2462 * TODO: Use the throughput value specific to the actual RGB/YUV 2463 * format of the output, after determining the pixel clock limit for 2464 * YUV modes. For now use the smaller of the throughput values, which 2465 * may result in limiting the link-bpp value already at a lower than 2466 * required mode clock in case of native YUV422/420 output formats. 2467 * The RGB/YUV444 throughput value should be always either equal or 2468 * smaller than the YUV422/420 value, but let's not depend on this 2469 * assumption. 2470 */ 2471 if (mode_clock < 2472 min(connector->dp.dsc_branch_caps.overall_throughput.rgb_yuv444, 2473 connector->dp.dsc_branch_caps.overall_throughput.yuv422_420) / 2) 2474 return INT_MAX; 2475 2476 return fxp_q4_from_int(12); 2477 } 2478 2479 static int compute_min_compressed_bpp_x16(struct intel_connector *connector, 2480 enum intel_output_format output_format) 2481 { 2482 int dsc_src_min_bpp, dsc_sink_min_bpp, dsc_min_bpp; 2483 int min_bpp_x16; 2484 2485 dsc_src_min_bpp = intel_dp_dsc_min_src_compressed_bpp(); 2486 dsc_sink_min_bpp = intel_dp_dsc_sink_min_compressed_bpp(output_format); 2487 dsc_min_bpp = max(dsc_src_min_bpp, dsc_sink_min_bpp); 2488 2489 min_bpp_x16 = fxp_q4_from_int(dsc_min_bpp); 2490 2491 min_bpp_x16 = align_min_compressed_bpp_x16(connector, min_bpp_x16); 2492 2493 return min_bpp_x16; 2494 } 2495 2496 static int compute_max_compressed_bpp_x16(struct intel_connector *connector, 2497 int mode_clock, int mode_hdisplay, 2498 int num_joined_pipes, 2499 enum intel_output_format output_format, 2500 int pipe_max_bpp, int max_link_bpp_x16) 2501 { 2502 struct intel_display *display = to_intel_display(connector); 2503 struct intel_dp *intel_dp = intel_attached_dp(connector); 2504 int dsc_src_max_bpp, dsc_sink_max_bpp, dsc_max_bpp; 2505 int throughput_max_bpp_x16; 2506 int joiner_max_bpp; 2507 2508 dsc_src_max_bpp = dsc_src_max_compressed_bpp(intel_dp); 2509 joiner_max_bpp = get_max_compressed_bpp_with_joiner(display, 2510 mode_clock, 2511 mode_hdisplay, 2512 num_joined_pipes); 2513 dsc_sink_max_bpp = intel_dp_dsc_sink_max_compressed_bpp(connector, 2514 output_format, 2515 pipe_max_bpp / 3); 2516 dsc_max_bpp = min(dsc_sink_max_bpp, dsc_src_max_bpp); 2517 dsc_max_bpp = min(dsc_max_bpp, joiner_max_bpp); 2518 2519 max_link_bpp_x16 = min(max_link_bpp_x16, fxp_q4_from_int(dsc_max_bpp)); 2520 2521 throughput_max_bpp_x16 = dsc_throughput_quirk_max_bpp_x16(connector, 2522 mode_clock); 2523 if (throughput_max_bpp_x16 < max_link_bpp_x16) { 2524 max_link_bpp_x16 = throughput_max_bpp_x16; 2525 2526 drm_dbg_kms(display->drm, 2527 "[CONNECTOR:%d:%s] Decreasing link max bpp to " FXP_Q4_FMT " due to DSC throughput quirk\n", 2528 connector->base.base.id, connector->base.name, 2529 FXP_Q4_ARGS(max_link_bpp_x16)); 2530 } 2531 2532 max_link_bpp_x16 = align_max_compressed_bpp_x16(connector, output_format, 2533 pipe_max_bpp, max_link_bpp_x16); 2534 2535 return max_link_bpp_x16; 2536 } 2537 2538 bool intel_dp_mode_valid_with_dsc(struct intel_connector *connector, 2539 int link_clock, int lane_count, 2540 int mode_clock, int mode_hdisplay, 2541 int num_joined_pipes, 2542 enum intel_output_format output_format, 2543 int pipe_bpp, unsigned long bw_overhead_flags) 2544 { 2545 struct intel_dp *intel_dp = intel_attached_dp(connector); 2546 int min_bpp_x16 = compute_min_compressed_bpp_x16(connector, output_format); 2547 int max_bpp_x16 = compute_max_compressed_bpp_x16(connector, 2548 mode_clock, mode_hdisplay, 2549 num_joined_pipes, 2550 output_format, 2551 pipe_bpp, INT_MAX); 2552 int dsc_slice_count = intel_dp_dsc_get_slice_count(connector, 2553 mode_clock, 2554 mode_hdisplay, 2555 num_joined_pipes); 2556 2557 if (min_bpp_x16 <= 0 || min_bpp_x16 > max_bpp_x16) 2558 return false; 2559 2560 return is_bw_sufficient_for_dsc_config(intel_dp, 2561 link_clock, lane_count, 2562 mode_clock, mode_hdisplay, 2563 dsc_slice_count, min_bpp_x16, 2564 bw_overhead_flags); 2565 } 2566 2567 /* 2568 * Calculate the output link min, max bpp values in limits based on the pipe bpp 2569 * range, crtc_state and dsc mode. Return true on success. 2570 */ 2571 static bool 2572 intel_dp_compute_config_link_bpp_limits(struct intel_connector *connector, 2573 const struct intel_crtc_state *crtc_state, 2574 bool dsc, 2575 struct link_config_limits *limits) 2576 { 2577 struct intel_display *display = to_intel_display(connector); 2578 struct intel_dp *intel_dp = intel_attached_dp(connector); 2579 const struct drm_display_mode *adjusted_mode = 2580 &crtc_state->hw.adjusted_mode; 2581 const struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc); 2582 const struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base; 2583 int max_link_bpp_x16; 2584 2585 max_link_bpp_x16 = min(crtc_state->max_link_bpp_x16, 2586 fxp_q4_from_int(limits->pipe.max_bpp)); 2587 2588 if (!dsc) { 2589 max_link_bpp_x16 = rounddown(max_link_bpp_x16, fxp_q4_from_int(2 * 3)); 2590 2591 if (max_link_bpp_x16 < fxp_q4_from_int(limits->pipe.min_bpp)) 2592 return false; 2593 2594 limits->link.min_bpp_x16 = fxp_q4_from_int(limits->pipe.min_bpp); 2595 } else { 2596 limits->link.min_bpp_x16 = 2597 compute_min_compressed_bpp_x16(connector, crtc_state->output_format); 2598 2599 max_link_bpp_x16 = 2600 compute_max_compressed_bpp_x16(connector, 2601 adjusted_mode->crtc_clock, 2602 adjusted_mode->hdisplay, 2603 intel_crtc_num_joined_pipes(crtc_state), 2604 crtc_state->output_format, 2605 limits->pipe.max_bpp, 2606 max_link_bpp_x16); 2607 } 2608 2609 limits->link.max_bpp_x16 = max_link_bpp_x16; 2610 2611 drm_dbg_kms(display->drm, 2612 "[ENCODER:%d:%s][CRTC:%d:%s] DP link limits: pixel clock %d kHz DSC %s max lanes %d max rate %d max pipe_bpp %d min link_bpp " FXP_Q4_FMT " max link_bpp " FXP_Q4_FMT "\n", 2613 encoder->base.base.id, encoder->base.name, 2614 crtc->base.base.id, crtc->base.name, 2615 adjusted_mode->crtc_clock, 2616 str_on_off(dsc), 2617 limits->max_lane_count, 2618 limits->max_rate, 2619 limits->pipe.max_bpp, 2620 FXP_Q4_ARGS(limits->link.min_bpp_x16), 2621 FXP_Q4_ARGS(limits->link.max_bpp_x16)); 2622 2623 if (limits->link.min_bpp_x16 <= 0 || 2624 limits->link.min_bpp_x16 > limits->link.max_bpp_x16) 2625 return false; 2626 2627 return true; 2628 } 2629 2630 static bool 2631 intel_dp_dsc_compute_pipe_bpp_limits(struct intel_connector *connector, 2632 struct link_config_limits *limits) 2633 { 2634 struct intel_display *display = to_intel_display(connector); 2635 const struct link_config_limits orig_limits = *limits; 2636 int dsc_min_bpc = intel_dp_dsc_min_src_input_bpc(); 2637 int dsc_max_bpc = intel_dp_dsc_max_src_input_bpc(display); 2638 2639 limits->pipe.min_bpp = max(limits->pipe.min_bpp, dsc_min_bpc * 3); 2640 limits->pipe.min_bpp = align_min_sink_dsc_input_bpp(connector, limits->pipe.min_bpp); 2641 2642 limits->pipe.max_bpp = min(limits->pipe.max_bpp, dsc_max_bpc * 3); 2643 limits->pipe.max_bpp = align_max_sink_dsc_input_bpp(connector, limits->pipe.max_bpp); 2644 2645 if (limits->pipe.min_bpp <= 0 || 2646 limits->pipe.min_bpp > limits->pipe.max_bpp) { 2647 drm_dbg_kms(display->drm, 2648 "[CONNECTOR:%d:%s] Invalid DSC src/sink input BPP (src:%d-%d pipe:%d-%d sink-align:%d-%d)\n", 2649 connector->base.base.id, connector->base.name, 2650 dsc_min_bpc * 3, dsc_max_bpc * 3, 2651 orig_limits.pipe.min_bpp, orig_limits.pipe.max_bpp, 2652 limits->pipe.min_bpp, limits->pipe.max_bpp); 2653 2654 return false; 2655 } 2656 2657 return true; 2658 } 2659 2660 bool 2661 intel_dp_compute_config_limits(struct intel_dp *intel_dp, 2662 struct drm_connector_state *conn_state, 2663 struct intel_crtc_state *crtc_state, 2664 bool respect_downstream_limits, 2665 bool dsc, 2666 struct link_config_limits *limits) 2667 { 2668 bool is_mst = intel_crtc_has_type(crtc_state, INTEL_OUTPUT_DP_MST); 2669 struct intel_connector *connector = 2670 to_intel_connector(conn_state->connector); 2671 2672 limits->min_rate = intel_dp_min_link_rate(intel_dp); 2673 limits->max_rate = intel_dp_max_link_rate(intel_dp); 2674 2675 limits->min_rate = min(limits->min_rate, limits->max_rate); 2676 2677 limits->min_lane_count = intel_dp_min_lane_count(intel_dp); 2678 limits->max_lane_count = intel_dp_max_lane_count(intel_dp); 2679 2680 limits->pipe.min_bpp = intel_dp_in_hdr_mode(conn_state) ? 30 : 2681 intel_dp_min_bpp(crtc_state->output_format); 2682 if (is_mst) { 2683 /* 2684 * FIXME: If all the streams can't fit into the link with their 2685 * current pipe_bpp we should reduce pipe_bpp across the board 2686 * until things start to fit. Until then we limit to <= 8bpc 2687 * since that's what was hardcoded for all MST streams 2688 * previously. This hack should be removed once we have the 2689 * proper retry logic in place. 2690 */ 2691 limits->pipe.max_bpp = min(crtc_state->pipe_bpp, 24); 2692 } else { 2693 limits->pipe.max_bpp = intel_dp_max_bpp(intel_dp, crtc_state, 2694 respect_downstream_limits); 2695 } 2696 2697 if (dsc && !intel_dp_dsc_compute_pipe_bpp_limits(connector, limits)) 2698 return false; 2699 2700 if (is_mst || intel_dp->use_max_params) { 2701 /* 2702 * For MST we always configure max link bw - the spec doesn't 2703 * seem to suggest we should do otherwise. 2704 * 2705 * Use the maximum clock and number of lanes the eDP panel 2706 * advertizes being capable of in case the initial fast 2707 * optimal params failed us. The panels are generally 2708 * designed to support only a single clock and lane 2709 * configuration, and typically on older panels these 2710 * values correspond to the native resolution of the panel. 2711 */ 2712 limits->min_lane_count = limits->max_lane_count; 2713 limits->min_rate = limits->max_rate; 2714 } 2715 2716 intel_dp_test_compute_config(intel_dp, crtc_state, limits); 2717 2718 return intel_dp_compute_config_link_bpp_limits(connector, 2719 crtc_state, 2720 dsc, 2721 limits); 2722 } 2723 2724 int intel_dp_config_required_rate(const struct intel_crtc_state *crtc_state) 2725 { 2726 const struct drm_display_mode *adjusted_mode = 2727 &crtc_state->hw.adjusted_mode; 2728 int link_bpp_x16 = crtc_state->dsc.compression_enable ? 2729 crtc_state->dsc.compressed_bpp_x16 : 2730 fxp_q4_from_int(crtc_state->pipe_bpp); 2731 2732 return intel_dp_link_required(crtc_state->port_clock, crtc_state->lane_count, 2733 adjusted_mode->crtc_clock, adjusted_mode->hdisplay, 2734 link_bpp_x16, 0); 2735 } 2736 2737 bool intel_dp_joiner_needs_dsc(struct intel_display *display, 2738 int num_joined_pipes) 2739 { 2740 /* 2741 * Pipe joiner needs compression up to display 12 due to bandwidth 2742 * limitation. DG2 onwards pipe joiner can be enabled without 2743 * compression. 2744 * Ultrajoiner always needs compression. 2745 */ 2746 return (!HAS_UNCOMPRESSED_JOINER(display) && num_joined_pipes == 2) || 2747 num_joined_pipes == 4; 2748 } 2749 2750 static int 2751 intel_dp_compute_link_config(struct intel_encoder *encoder, 2752 struct intel_crtc_state *pipe_config, 2753 struct drm_connector_state *conn_state, 2754 bool respect_downstream_limits) 2755 { 2756 struct intel_display *display = to_intel_display(encoder); 2757 struct intel_crtc *crtc = to_intel_crtc(pipe_config->uapi.crtc); 2758 struct intel_connector *connector = 2759 to_intel_connector(conn_state->connector); 2760 const struct drm_display_mode *adjusted_mode = 2761 &pipe_config->hw.adjusted_mode; 2762 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 2763 struct link_config_limits limits; 2764 bool dsc_needed, joiner_needs_dsc; 2765 int num_joined_pipes; 2766 int ret = 0; 2767 2768 if (pipe_config->fec_enable && 2769 !intel_dp_supports_fec(intel_dp, connector, pipe_config)) 2770 return -EINVAL; 2771 2772 num_joined_pipes = intel_dp_num_joined_pipes(intel_dp, connector, 2773 adjusted_mode->crtc_hdisplay, 2774 adjusted_mode->crtc_clock); 2775 if (num_joined_pipes > 1) 2776 pipe_config->joiner_pipes = GENMASK(crtc->pipe + num_joined_pipes - 1, crtc->pipe); 2777 2778 joiner_needs_dsc = intel_dp_joiner_needs_dsc(display, num_joined_pipes); 2779 2780 dsc_needed = joiner_needs_dsc || intel_dp->force_dsc_en || 2781 !intel_dp_compute_config_limits(intel_dp, conn_state, pipe_config, 2782 respect_downstream_limits, 2783 false, 2784 &limits); 2785 2786 if (!dsc_needed) { 2787 /* 2788 * Optimize for slow and wide for everything, because there are some 2789 * eDP 1.3 and 1.4 panels don't work well with fast and narrow. 2790 */ 2791 ret = intel_dp_compute_link_config_wide(intel_dp, pipe_config, 2792 conn_state, &limits); 2793 if (!ret && intel_dp_is_uhbr(pipe_config)) 2794 ret = intel_dp_mtp_tu_compute_config(intel_dp, 2795 pipe_config, 2796 conn_state, 2797 fxp_q4_from_int(pipe_config->pipe_bpp), 2798 fxp_q4_from_int(pipe_config->pipe_bpp), 2799 0, false); 2800 if (ret) 2801 dsc_needed = true; 2802 } 2803 2804 if (dsc_needed && !intel_dp_supports_dsc(intel_dp, connector, pipe_config)) { 2805 drm_dbg_kms(display->drm, "DSC required but not available\n"); 2806 return -EINVAL; 2807 } 2808 2809 if (dsc_needed) { 2810 drm_dbg_kms(display->drm, 2811 "Try DSC (fallback=%s, joiner=%s, force=%s)\n", 2812 str_yes_no(ret), str_yes_no(joiner_needs_dsc), 2813 str_yes_no(intel_dp->force_dsc_en)); 2814 2815 if (!intel_dp_compute_config_limits(intel_dp, conn_state, pipe_config, 2816 respect_downstream_limits, 2817 true, 2818 &limits)) 2819 return -EINVAL; 2820 2821 ret = intel_dp_dsc_compute_config(intel_dp, pipe_config, 2822 conn_state, &limits, 64); 2823 if (ret < 0) 2824 return ret; 2825 } 2826 2827 drm_dbg_kms(display->drm, 2828 "DP lane count %d clock %d bpp input %d compressed " FXP_Q4_FMT " link rate required %d available %d\n", 2829 pipe_config->lane_count, pipe_config->port_clock, 2830 pipe_config->pipe_bpp, 2831 FXP_Q4_ARGS(pipe_config->dsc.compressed_bpp_x16), 2832 intel_dp_config_required_rate(pipe_config), 2833 intel_dp_max_link_data_rate(intel_dp, 2834 pipe_config->port_clock, 2835 pipe_config->lane_count)); 2836 2837 return 0; 2838 } 2839 2840 bool intel_dp_limited_color_range(const struct intel_crtc_state *crtc_state, 2841 const struct drm_connector_state *conn_state) 2842 { 2843 const struct intel_digital_connector_state *intel_conn_state = 2844 to_intel_digital_connector_state(conn_state); 2845 const struct drm_display_mode *adjusted_mode = 2846 &crtc_state->hw.adjusted_mode; 2847 2848 /* 2849 * Our YCbCr output is always limited range. 2850 * crtc_state->limited_color_range only applies to RGB, 2851 * and it must never be set for YCbCr or we risk setting 2852 * some conflicting bits in TRANSCONF which will mess up 2853 * the colors on the monitor. 2854 */ 2855 if (crtc_state->output_format != INTEL_OUTPUT_FORMAT_RGB) 2856 return false; 2857 2858 if (intel_conn_state->broadcast_rgb == INTEL_BROADCAST_RGB_AUTO) { 2859 /* 2860 * See: 2861 * CEA-861-E - 5.1 Default Encoding Parameters 2862 * VESA DisplayPort Ver.1.2a - 5.1.1.1 Video Colorimetry 2863 */ 2864 return crtc_state->pipe_bpp != 18 && 2865 drm_default_rgb_quant_range(adjusted_mode) == 2866 HDMI_QUANTIZATION_RANGE_LIMITED; 2867 } else { 2868 return intel_conn_state->broadcast_rgb == 2869 INTEL_BROADCAST_RGB_LIMITED; 2870 } 2871 } 2872 2873 static bool intel_dp_port_has_audio(struct intel_display *display, enum port port) 2874 { 2875 if (display->platform.g4x) 2876 return false; 2877 if (DISPLAY_VER(display) < 12 && port == PORT_A) 2878 return false; 2879 2880 return true; 2881 } 2882 2883 static void intel_dp_compute_vsc_colorimetry(const struct intel_crtc_state *crtc_state, 2884 const struct drm_connector_state *conn_state, 2885 struct drm_dp_vsc_sdp *vsc) 2886 { 2887 struct intel_display *display = to_intel_display(crtc_state); 2888 2889 if (crtc_state->has_panel_replay) { 2890 /* 2891 * Prepare VSC Header for SU as per DP 2.0 spec, Table 2-223 2892 * VSC SDP supporting 3D stereo, Panel Replay, and Pixel 2893 * Encoding/Colorimetry Format indication. 2894 */ 2895 vsc->revision = 0x7; 2896 } else { 2897 /* 2898 * Prepare VSC Header for SU as per DP 1.4 spec, Table 2-118 2899 * VSC SDP supporting 3D stereo, PSR2, and Pixel Encoding/ 2900 * Colorimetry Format indication. 2901 */ 2902 vsc->revision = 0x5; 2903 } 2904 2905 vsc->length = 0x13; 2906 2907 /* DP 1.4a spec, Table 2-120 */ 2908 switch (crtc_state->output_format) { 2909 case INTEL_OUTPUT_FORMAT_YCBCR444: 2910 vsc->pixelformat = DP_PIXELFORMAT_YUV444; 2911 break; 2912 case INTEL_OUTPUT_FORMAT_YCBCR420: 2913 vsc->pixelformat = DP_PIXELFORMAT_YUV420; 2914 break; 2915 case INTEL_OUTPUT_FORMAT_RGB: 2916 default: 2917 vsc->pixelformat = DP_PIXELFORMAT_RGB; 2918 } 2919 2920 switch (conn_state->colorspace) { 2921 case DRM_MODE_COLORIMETRY_BT709_YCC: 2922 vsc->colorimetry = DP_COLORIMETRY_BT709_YCC; 2923 break; 2924 case DRM_MODE_COLORIMETRY_XVYCC_601: 2925 vsc->colorimetry = DP_COLORIMETRY_XVYCC_601; 2926 break; 2927 case DRM_MODE_COLORIMETRY_XVYCC_709: 2928 vsc->colorimetry = DP_COLORIMETRY_XVYCC_709; 2929 break; 2930 case DRM_MODE_COLORIMETRY_SYCC_601: 2931 vsc->colorimetry = DP_COLORIMETRY_SYCC_601; 2932 break; 2933 case DRM_MODE_COLORIMETRY_OPYCC_601: 2934 vsc->colorimetry = DP_COLORIMETRY_OPYCC_601; 2935 break; 2936 case DRM_MODE_COLORIMETRY_BT2020_CYCC: 2937 vsc->colorimetry = DP_COLORIMETRY_BT2020_CYCC; 2938 break; 2939 case DRM_MODE_COLORIMETRY_BT2020_RGB: 2940 vsc->colorimetry = DP_COLORIMETRY_BT2020_RGB; 2941 break; 2942 case DRM_MODE_COLORIMETRY_BT2020_YCC: 2943 vsc->colorimetry = DP_COLORIMETRY_BT2020_YCC; 2944 break; 2945 case DRM_MODE_COLORIMETRY_DCI_P3_RGB_D65: 2946 case DRM_MODE_COLORIMETRY_DCI_P3_RGB_THEATER: 2947 vsc->colorimetry = DP_COLORIMETRY_DCI_P3_RGB; 2948 break; 2949 default: 2950 /* 2951 * RGB->YCBCR color conversion uses the BT.709 2952 * color space. 2953 */ 2954 if (crtc_state->output_format == INTEL_OUTPUT_FORMAT_YCBCR420) 2955 vsc->colorimetry = DP_COLORIMETRY_BT709_YCC; 2956 else 2957 vsc->colorimetry = DP_COLORIMETRY_DEFAULT; 2958 break; 2959 } 2960 2961 vsc->bpc = crtc_state->pipe_bpp / 3; 2962 2963 /* only RGB pixelformat supports 6 bpc */ 2964 drm_WARN_ON(display->drm, 2965 vsc->bpc == 6 && vsc->pixelformat != DP_PIXELFORMAT_RGB); 2966 2967 /* all YCbCr are always limited range */ 2968 vsc->dynamic_range = DP_DYNAMIC_RANGE_CTA; 2969 vsc->content_type = DP_CONTENT_TYPE_NOT_DEFINED; 2970 } 2971 2972 static void intel_dp_compute_as_sdp(struct intel_dp *intel_dp, 2973 struct intel_crtc_state *crtc_state) 2974 { 2975 struct drm_dp_as_sdp *as_sdp = &crtc_state->infoframes.as_sdp; 2976 const struct drm_display_mode *adjusted_mode = 2977 &crtc_state->hw.adjusted_mode; 2978 2979 if (!crtc_state->vrr.enable || !intel_dp->as_sdp_supported) 2980 return; 2981 2982 crtc_state->infoframes.enable |= intel_hdmi_infoframe_enable(DP_SDP_ADAPTIVE_SYNC); 2983 2984 as_sdp->sdp_type = DP_SDP_ADAPTIVE_SYNC; 2985 as_sdp->length = 0x9; 2986 as_sdp->duration_incr_ms = 0; 2987 as_sdp->vtotal = intel_vrr_vmin_vtotal(crtc_state); 2988 2989 if (crtc_state->cmrr.enable) { 2990 as_sdp->mode = DP_AS_SDP_FAVT_TRR_REACHED; 2991 as_sdp->target_rr = drm_mode_vrefresh(adjusted_mode); 2992 as_sdp->target_rr_divider = true; 2993 } else { 2994 as_sdp->mode = DP_AS_SDP_AVT_DYNAMIC_VTOTAL; 2995 as_sdp->target_rr = 0; 2996 } 2997 } 2998 2999 static void intel_dp_compute_vsc_sdp(struct intel_dp *intel_dp, 3000 struct intel_crtc_state *crtc_state, 3001 const struct drm_connector_state *conn_state) 3002 { 3003 struct drm_dp_vsc_sdp *vsc; 3004 3005 if ((!intel_dp->colorimetry_support || 3006 !intel_dp_needs_vsc_sdp(crtc_state, conn_state)) && 3007 !crtc_state->has_psr) 3008 return; 3009 3010 vsc = &crtc_state->infoframes.vsc; 3011 3012 crtc_state->infoframes.enable |= intel_hdmi_infoframe_enable(DP_SDP_VSC); 3013 vsc->sdp_type = DP_SDP_VSC; 3014 3015 /* Needs colorimetry */ 3016 if (intel_dp_needs_vsc_sdp(crtc_state, conn_state)) { 3017 intel_dp_compute_vsc_colorimetry(crtc_state, conn_state, 3018 vsc); 3019 } else if (crtc_state->has_panel_replay) { 3020 /* 3021 * [Panel Replay without colorimetry info] 3022 * Prepare VSC Header for SU as per DP 2.0 spec, Table 2-223 3023 * VSC SDP supporting 3D stereo + Panel Replay. 3024 */ 3025 vsc->revision = 0x6; 3026 vsc->length = 0x10; 3027 } else if (crtc_state->has_sel_update) { 3028 /* 3029 * [PSR2 without colorimetry] 3030 * Prepare VSC Header for SU as per eDP 1.4 spec, Table 6-11 3031 * 3D stereo + PSR/PSR2 + Y-coordinate. 3032 */ 3033 vsc->revision = 0x4; 3034 vsc->length = 0xe; 3035 } else { 3036 /* 3037 * [PSR1] 3038 * Prepare VSC Header for SU as per DP 1.4 spec, Table 2-118 3039 * VSC SDP supporting 3D stereo + PSR (applies to eDP v1.3 or 3040 * higher). 3041 */ 3042 vsc->revision = 0x2; 3043 vsc->length = 0x8; 3044 } 3045 } 3046 3047 bool 3048 intel_dp_in_hdr_mode(const struct drm_connector_state *conn_state) 3049 { 3050 struct hdr_output_metadata *hdr_metadata; 3051 3052 if (!conn_state->hdr_output_metadata) 3053 return false; 3054 3055 hdr_metadata = conn_state->hdr_output_metadata->data; 3056 3057 return hdr_metadata->hdmi_metadata_type1.eotf == HDMI_EOTF_SMPTE_ST2084; 3058 } 3059 3060 static void 3061 intel_dp_compute_hdr_metadata_infoframe_sdp(struct intel_dp *intel_dp, 3062 struct intel_crtc_state *crtc_state, 3063 const struct drm_connector_state *conn_state) 3064 { 3065 struct intel_display *display = to_intel_display(intel_dp); 3066 int ret; 3067 struct hdmi_drm_infoframe *drm_infoframe = &crtc_state->infoframes.drm.drm; 3068 3069 if (!conn_state->hdr_output_metadata) 3070 return; 3071 3072 ret = drm_hdmi_infoframe_set_hdr_metadata(drm_infoframe, conn_state); 3073 3074 if (ret) { 3075 drm_dbg_kms(display->drm, 3076 "couldn't set HDR metadata in infoframe\n"); 3077 return; 3078 } 3079 3080 crtc_state->infoframes.enable |= 3081 intel_hdmi_infoframe_enable(HDMI_PACKET_TYPE_GAMUT_METADATA); 3082 } 3083 3084 static bool can_enable_drrs(struct intel_connector *connector, 3085 const struct intel_crtc_state *pipe_config, 3086 const struct drm_display_mode *downclock_mode) 3087 { 3088 struct intel_display *display = to_intel_display(connector); 3089 3090 if (pipe_config->vrr.enable) 3091 return false; 3092 3093 /* 3094 * DRRS and PSR can't be enable together, so giving preference to PSR 3095 * as it allows more power-savings by complete shutting down display, 3096 * so to guarantee this, intel_drrs_compute_config() must be called 3097 * after intel_psr_compute_config(). 3098 */ 3099 if (pipe_config->has_psr) 3100 return false; 3101 3102 /* FIXME missing FDI M2/N2 etc. */ 3103 if (pipe_config->has_pch_encoder) 3104 return false; 3105 3106 if (!intel_cpu_transcoder_has_drrs(display, pipe_config->cpu_transcoder)) 3107 return false; 3108 3109 return downclock_mode && 3110 intel_panel_drrs_type(connector) == DRRS_TYPE_SEAMLESS; 3111 } 3112 3113 static void 3114 intel_dp_drrs_compute_config(struct intel_connector *connector, 3115 struct intel_crtc_state *pipe_config, 3116 int link_bpp_x16) 3117 { 3118 struct intel_display *display = to_intel_display(connector); 3119 const struct drm_display_mode *downclock_mode = 3120 intel_panel_downclock_mode(connector, &pipe_config->hw.adjusted_mode); 3121 int pixel_clock; 3122 3123 /* 3124 * FIXME all joined pipes share the same transcoder. 3125 * Need to account for that when updating M/N live. 3126 */ 3127 if (has_seamless_m_n(connector) && !pipe_config->joiner_pipes) 3128 pipe_config->update_m_n = true; 3129 3130 if (!can_enable_drrs(connector, pipe_config, downclock_mode)) { 3131 if (intel_cpu_transcoder_has_m2_n2(display, pipe_config->cpu_transcoder)) 3132 intel_zero_m_n(&pipe_config->dp_m2_n2); 3133 return; 3134 } 3135 3136 if (display->platform.ironlake || display->platform.sandybridge || 3137 display->platform.ivybridge) 3138 pipe_config->msa_timing_delay = connector->panel.vbt.edp.drrs_msa_timing_delay; 3139 3140 pipe_config->has_drrs = true; 3141 3142 pixel_clock = downclock_mode->clock; 3143 if (pipe_config->splitter.enable) 3144 pixel_clock /= pipe_config->splitter.link_count; 3145 3146 intel_link_compute_m_n(link_bpp_x16, pipe_config->lane_count, pixel_clock, 3147 pipe_config->port_clock, 3148 intel_dp_bw_fec_overhead(pipe_config->fec_enable), 3149 &pipe_config->dp_m2_n2); 3150 3151 /* FIXME: abstract this better */ 3152 if (pipe_config->splitter.enable) 3153 pipe_config->dp_m2_n2.data_m *= pipe_config->splitter.link_count; 3154 } 3155 3156 static bool intel_dp_has_audio(struct intel_encoder *encoder, 3157 const struct drm_connector_state *conn_state) 3158 { 3159 struct intel_display *display = to_intel_display(encoder); 3160 const struct intel_digital_connector_state *intel_conn_state = 3161 to_intel_digital_connector_state(conn_state); 3162 struct intel_connector *connector = 3163 to_intel_connector(conn_state->connector); 3164 3165 if (!intel_dp_port_has_audio(display, encoder->port)) 3166 return false; 3167 3168 if (intel_conn_state->force_audio == HDMI_AUDIO_AUTO) 3169 return connector->base.display_info.has_audio; 3170 else 3171 return intel_conn_state->force_audio == HDMI_AUDIO_ON; 3172 } 3173 3174 static int 3175 intel_dp_compute_output_format(struct intel_encoder *encoder, 3176 struct intel_crtc_state *crtc_state, 3177 struct drm_connector_state *conn_state, 3178 bool respect_downstream_limits) 3179 { 3180 struct intel_display *display = to_intel_display(encoder); 3181 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 3182 struct intel_connector *connector = intel_dp->attached_connector; 3183 const struct drm_display_info *info = &connector->base.display_info; 3184 const struct drm_display_mode *adjusted_mode = &crtc_state->hw.adjusted_mode; 3185 bool ycbcr_420_only; 3186 int ret; 3187 3188 ycbcr_420_only = drm_mode_is_420_only(info, adjusted_mode); 3189 3190 if (ycbcr_420_only && !connector->base.ycbcr_420_allowed) { 3191 drm_dbg_kms(display->drm, 3192 "YCbCr 4:2:0 mode but YCbCr 4:2:0 output not possible. Falling back to RGB.\n"); 3193 crtc_state->sink_format = INTEL_OUTPUT_FORMAT_RGB; 3194 } else { 3195 crtc_state->sink_format = intel_dp_sink_format(connector, adjusted_mode); 3196 } 3197 3198 crtc_state->output_format = intel_dp_output_format(connector, crtc_state->sink_format); 3199 3200 ret = intel_dp_compute_link_config(encoder, crtc_state, conn_state, 3201 respect_downstream_limits); 3202 if (ret) { 3203 if (crtc_state->sink_format == INTEL_OUTPUT_FORMAT_YCBCR420 || 3204 !connector->base.ycbcr_420_allowed || 3205 !drm_mode_is_420_also(info, adjusted_mode)) 3206 return ret; 3207 3208 crtc_state->sink_format = INTEL_OUTPUT_FORMAT_YCBCR420; 3209 crtc_state->output_format = intel_dp_output_format(connector, 3210 crtc_state->sink_format); 3211 ret = intel_dp_compute_link_config(encoder, crtc_state, conn_state, 3212 respect_downstream_limits); 3213 } 3214 3215 return ret; 3216 } 3217 3218 void 3219 intel_dp_audio_compute_config(struct intel_encoder *encoder, 3220 struct intel_crtc_state *pipe_config, 3221 struct drm_connector_state *conn_state) 3222 { 3223 pipe_config->has_audio = 3224 intel_dp_has_audio(encoder, conn_state) && 3225 intel_audio_compute_config(encoder, pipe_config, conn_state); 3226 3227 pipe_config->sdp_split_enable = pipe_config->has_audio && 3228 intel_dp_is_uhbr(pipe_config); 3229 } 3230 3231 void 3232 intel_dp_queue_modeset_retry_for_link(struct intel_atomic_state *state, 3233 struct intel_encoder *encoder, 3234 const struct intel_crtc_state *crtc_state) 3235 { 3236 struct intel_connector *connector; 3237 struct intel_digital_connector_state *conn_state; 3238 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 3239 int i; 3240 3241 if (intel_dp->needs_modeset_retry) 3242 return; 3243 3244 intel_dp->needs_modeset_retry = true; 3245 3246 if (!intel_crtc_has_type(crtc_state, INTEL_OUTPUT_DP_MST)) { 3247 intel_connector_queue_modeset_retry_work(intel_dp->attached_connector); 3248 3249 return; 3250 } 3251 3252 for_each_new_intel_connector_in_state(state, connector, conn_state, i) { 3253 if (!conn_state->base.crtc) 3254 continue; 3255 3256 if (connector->mst.dp == intel_dp) 3257 intel_connector_queue_modeset_retry_work(connector); 3258 } 3259 } 3260 3261 int intel_dp_compute_min_hblank(struct intel_crtc_state *crtc_state, 3262 const struct drm_connector_state *conn_state) 3263 { 3264 struct intel_display *display = to_intel_display(crtc_state); 3265 const struct drm_display_mode *adjusted_mode = 3266 &crtc_state->hw.adjusted_mode; 3267 struct intel_connector *connector = to_intel_connector(conn_state->connector); 3268 int symbol_size = intel_dp_is_uhbr(crtc_state) ? 32 : 8; 3269 /* 3270 * min symbol cycles is 3(BS,VBID, BE) for 128b/132b and 3271 * 5(BS, VBID, MVID, MAUD, BE) for 8b/10b 3272 */ 3273 int min_sym_cycles = intel_dp_is_uhbr(crtc_state) ? 3 : 5; 3274 bool is_mst = intel_crtc_has_type(crtc_state, INTEL_OUTPUT_DP_MST); 3275 int num_joined_pipes = intel_crtc_num_joined_pipes(crtc_state); 3276 int min_hblank; 3277 int max_lane_count = 4; 3278 int hactive_sym_cycles, htotal_sym_cycles; 3279 int dsc_slices = 0; 3280 int link_bpp_x16; 3281 3282 if (DISPLAY_VER(display) < 30) 3283 return 0; 3284 3285 /* MIN_HBLANK should be set only for 8b/10b MST or for 128b/132b SST/MST */ 3286 if (!is_mst && !intel_dp_is_uhbr(crtc_state)) 3287 return 0; 3288 3289 if (crtc_state->dsc.compression_enable) { 3290 dsc_slices = intel_dp_dsc_get_slice_count(connector, 3291 adjusted_mode->crtc_clock, 3292 adjusted_mode->crtc_hdisplay, 3293 num_joined_pipes); 3294 if (!dsc_slices) { 3295 drm_dbg(display->drm, "failed to calculate dsc slice count\n"); 3296 return -EINVAL; 3297 } 3298 } 3299 3300 if (crtc_state->dsc.compression_enable) 3301 link_bpp_x16 = crtc_state->dsc.compressed_bpp_x16; 3302 else 3303 link_bpp_x16 = intel_dp_output_format_link_bpp_x16(crtc_state->output_format, 3304 crtc_state->pipe_bpp); 3305 3306 /* Calculate min Hblank Link Layer Symbol Cycle Count for 8b/10b MST & 128b/132b */ 3307 hactive_sym_cycles = drm_dp_link_symbol_cycles(max_lane_count, 3308 adjusted_mode->hdisplay, 3309 dsc_slices, 3310 link_bpp_x16, 3311 symbol_size, is_mst); 3312 htotal_sym_cycles = adjusted_mode->htotal * hactive_sym_cycles / 3313 adjusted_mode->hdisplay; 3314 3315 min_hblank = htotal_sym_cycles - hactive_sym_cycles; 3316 /* minimum Hblank calculation: https://groups.vesa.org/wg/DP/document/20494 */ 3317 min_hblank = max(min_hblank, min_sym_cycles); 3318 3319 /* 3320 * adjust the BlankingStart/BlankingEnd framing control from 3321 * the calculated value 3322 */ 3323 min_hblank = min_hblank - 2; 3324 3325 /* 3326 * min_hblank formula is undergoing a change, to avoid underrun use the 3327 * recomended value in spec to compare with the calculated one and use the 3328 * minimum value 3329 */ 3330 if (intel_dp_is_uhbr(crtc_state)) { 3331 /* 3332 * Note: Bspec requires a min_hblank of 2 for YCBCR420 3333 * with compressed bpp 6, but the minimum compressed bpp 3334 * supported by the driver is 8. 3335 */ 3336 drm_WARN_ON(display->drm, 3337 (crtc_state->dsc.compression_enable && 3338 crtc_state->output_format == INTEL_OUTPUT_FORMAT_YCBCR420 && 3339 crtc_state->dsc.compressed_bpp_x16 < fxp_q4_from_int(8))); 3340 min_hblank = min(3, min_hblank); 3341 } else { 3342 min_hblank = min(10, min_hblank); 3343 } 3344 3345 crtc_state->min_hblank = min_hblank; 3346 3347 return 0; 3348 } 3349 3350 int 3351 intel_dp_compute_config(struct intel_encoder *encoder, 3352 struct intel_crtc_state *pipe_config, 3353 struct drm_connector_state *conn_state) 3354 { 3355 struct intel_display *display = to_intel_display(encoder); 3356 struct intel_atomic_state *state = to_intel_atomic_state(conn_state->state); 3357 struct drm_display_mode *adjusted_mode = &pipe_config->hw.adjusted_mode; 3358 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 3359 const struct drm_display_mode *fixed_mode; 3360 struct intel_connector *connector = intel_dp->attached_connector; 3361 int ret = 0, link_bpp_x16; 3362 3363 fixed_mode = intel_panel_fixed_mode(connector, adjusted_mode); 3364 if (intel_dp_is_edp(intel_dp) && fixed_mode) { 3365 ret = intel_panel_compute_config(connector, adjusted_mode); 3366 if (ret) 3367 return ret; 3368 } 3369 3370 if (adjusted_mode->flags & DRM_MODE_FLAG_DBLSCAN) 3371 return -EINVAL; 3372 3373 if (!connector->base.interlace_allowed && 3374 adjusted_mode->flags & DRM_MODE_FLAG_INTERLACE) 3375 return -EINVAL; 3376 3377 if (adjusted_mode->flags & DRM_MODE_FLAG_DBLCLK) 3378 return -EINVAL; 3379 3380 if (intel_dp_hdisplay_bad(display, adjusted_mode->crtc_hdisplay)) 3381 return -EINVAL; 3382 3383 /* 3384 * Try to respect downstream TMDS clock limits first, if 3385 * that fails assume the user might know something we don't. 3386 */ 3387 ret = intel_dp_compute_output_format(encoder, pipe_config, conn_state, true); 3388 if (ret) 3389 ret = intel_dp_compute_output_format(encoder, pipe_config, conn_state, false); 3390 if (ret) 3391 return ret; 3392 3393 if ((intel_dp_is_edp(intel_dp) && fixed_mode) || 3394 pipe_config->output_format == INTEL_OUTPUT_FORMAT_YCBCR420) { 3395 ret = intel_pfit_compute_config(pipe_config, conn_state); 3396 if (ret) 3397 return ret; 3398 } 3399 3400 pipe_config->limited_color_range = 3401 intel_dp_limited_color_range(pipe_config, conn_state); 3402 3403 if (intel_dp_is_uhbr(pipe_config)) { 3404 /* 128b/132b SST also needs this */ 3405 pipe_config->mst_master_transcoder = pipe_config->cpu_transcoder; 3406 } else { 3407 pipe_config->enhanced_framing = 3408 drm_dp_enhanced_frame_cap(intel_dp->dpcd); 3409 } 3410 3411 if (pipe_config->dsc.compression_enable) 3412 link_bpp_x16 = pipe_config->dsc.compressed_bpp_x16; 3413 else 3414 link_bpp_x16 = intel_dp_output_format_link_bpp_x16(pipe_config->output_format, 3415 pipe_config->pipe_bpp); 3416 3417 if (intel_dp->mso_link_count) { 3418 int n = intel_dp->mso_link_count; 3419 int overlap = intel_dp->mso_pixel_overlap; 3420 3421 pipe_config->splitter.enable = true; 3422 pipe_config->splitter.link_count = n; 3423 pipe_config->splitter.pixel_overlap = overlap; 3424 3425 drm_dbg_kms(display->drm, 3426 "MSO link count %d, pixel overlap %d\n", 3427 n, overlap); 3428 3429 adjusted_mode->crtc_hdisplay = adjusted_mode->crtc_hdisplay / n + overlap; 3430 adjusted_mode->crtc_hblank_start = adjusted_mode->crtc_hblank_start / n + overlap; 3431 adjusted_mode->crtc_hblank_end = adjusted_mode->crtc_hblank_end / n + overlap; 3432 adjusted_mode->crtc_hsync_start = adjusted_mode->crtc_hsync_start / n + overlap; 3433 adjusted_mode->crtc_hsync_end = adjusted_mode->crtc_hsync_end / n + overlap; 3434 adjusted_mode->crtc_htotal = adjusted_mode->crtc_htotal / n + overlap; 3435 adjusted_mode->crtc_clock /= n; 3436 } 3437 3438 intel_dp_audio_compute_config(encoder, pipe_config, conn_state); 3439 3440 if (!intel_dp_is_uhbr(pipe_config)) { 3441 intel_link_compute_m_n(link_bpp_x16, 3442 pipe_config->lane_count, 3443 adjusted_mode->crtc_clock, 3444 pipe_config->port_clock, 3445 intel_dp_bw_fec_overhead(pipe_config->fec_enable), 3446 &pipe_config->dp_m_n); 3447 } 3448 3449 ret = intel_dp_compute_min_hblank(pipe_config, conn_state); 3450 if (ret) 3451 return ret; 3452 3453 /* FIXME: abstract this better */ 3454 if (pipe_config->splitter.enable) 3455 pipe_config->dp_m_n.data_m *= pipe_config->splitter.link_count; 3456 3457 intel_vrr_compute_config(pipe_config, conn_state); 3458 intel_dp_compute_as_sdp(intel_dp, pipe_config); 3459 intel_psr_compute_config(intel_dp, pipe_config, conn_state); 3460 intel_alpm_lobf_compute_config(intel_dp, pipe_config, conn_state); 3461 intel_dp_drrs_compute_config(connector, pipe_config, link_bpp_x16); 3462 intel_dp_compute_vsc_sdp(intel_dp, pipe_config, conn_state); 3463 intel_dp_compute_hdr_metadata_infoframe_sdp(intel_dp, pipe_config, conn_state); 3464 3465 return intel_dp_tunnel_atomic_compute_stream_bw(state, intel_dp, connector, 3466 pipe_config); 3467 } 3468 3469 void intel_dp_set_link_params(struct intel_dp *intel_dp, 3470 int link_rate, int lane_count) 3471 { 3472 memset(intel_dp->train_set, 0, sizeof(intel_dp->train_set)); 3473 intel_dp->link.active = false; 3474 intel_dp->needs_modeset_retry = false; 3475 intel_dp->link_rate = link_rate; 3476 intel_dp->lane_count = lane_count; 3477 } 3478 3479 void intel_dp_reset_link_params(struct intel_dp *intel_dp) 3480 { 3481 intel_dp->link.max_lane_count = intel_dp_max_common_lane_count(intel_dp); 3482 intel_dp->link.max_rate = intel_dp_max_common_rate(intel_dp); 3483 intel_dp->link.mst_probed_lane_count = 0; 3484 intel_dp->link.mst_probed_rate = 0; 3485 intel_dp->link.retrain_disabled = false; 3486 intel_dp->link.seq_train_failures = 0; 3487 } 3488 3489 /* Enable backlight PWM and backlight PP control. */ 3490 void intel_edp_backlight_on(const struct intel_crtc_state *crtc_state, 3491 const struct drm_connector_state *conn_state) 3492 { 3493 struct intel_display *display = to_intel_display(crtc_state); 3494 struct intel_dp *intel_dp = enc_to_intel_dp(to_intel_encoder(conn_state->best_encoder)); 3495 3496 if (!intel_dp_is_edp(intel_dp)) 3497 return; 3498 3499 drm_dbg_kms(display->drm, "\n"); 3500 3501 intel_backlight_enable(crtc_state, conn_state); 3502 intel_pps_backlight_on(intel_dp); 3503 } 3504 3505 /* Disable backlight PP control and backlight PWM. */ 3506 void intel_edp_backlight_off(const struct drm_connector_state *old_conn_state) 3507 { 3508 struct intel_dp *intel_dp = enc_to_intel_dp(to_intel_encoder(old_conn_state->best_encoder)); 3509 struct intel_display *display = to_intel_display(intel_dp); 3510 3511 if (!intel_dp_is_edp(intel_dp)) 3512 return; 3513 3514 drm_dbg_kms(display->drm, "\n"); 3515 3516 intel_pps_backlight_off(intel_dp); 3517 intel_backlight_disable(old_conn_state); 3518 } 3519 3520 static bool downstream_hpd_needs_d0(struct intel_dp *intel_dp) 3521 { 3522 /* 3523 * DPCD 1.2+ should support BRANCH_DEVICE_CTRL, and thus 3524 * be capable of signalling downstream hpd with a long pulse. 3525 * Whether or not that means D3 is safe to use is not clear, 3526 * but let's assume so until proven otherwise. 3527 * 3528 * FIXME should really check all downstream ports... 3529 */ 3530 return intel_dp->dpcd[DP_DPCD_REV] == 0x11 && 3531 drm_dp_is_branch(intel_dp->dpcd) && 3532 intel_dp->downstream_ports[0] & DP_DS_PORT_HPD; 3533 } 3534 3535 static int 3536 write_dsc_decompression_flag(struct drm_dp_aux *aux, u8 flag, bool set) 3537 { 3538 int err; 3539 u8 val; 3540 3541 err = drm_dp_dpcd_readb(aux, DP_DSC_ENABLE, &val); 3542 if (err < 0) 3543 return err; 3544 3545 if (set) 3546 val |= flag; 3547 else 3548 val &= ~flag; 3549 3550 return drm_dp_dpcd_writeb(aux, DP_DSC_ENABLE, val); 3551 } 3552 3553 static void 3554 intel_dp_sink_set_dsc_decompression(struct intel_connector *connector, 3555 bool enable) 3556 { 3557 struct intel_display *display = to_intel_display(connector); 3558 3559 if (write_dsc_decompression_flag(connector->dp.dsc_decompression_aux, 3560 DP_DECOMPRESSION_EN, enable) < 0) 3561 drm_dbg_kms(display->drm, 3562 "Failed to %s sink decompression state\n", 3563 str_enable_disable(enable)); 3564 } 3565 3566 static void 3567 intel_dp_sink_set_dsc_passthrough(const struct intel_connector *connector, 3568 bool enable) 3569 { 3570 struct intel_display *display = to_intel_display(connector); 3571 struct drm_dp_aux *aux = connector->mst.port ? 3572 connector->mst.port->passthrough_aux : NULL; 3573 3574 if (!aux) 3575 return; 3576 3577 if (write_dsc_decompression_flag(aux, 3578 DP_DSC_PASSTHROUGH_EN, enable) < 0) 3579 drm_dbg_kms(display->drm, 3580 "Failed to %s sink compression passthrough state\n", 3581 str_enable_disable(enable)); 3582 } 3583 3584 static int intel_dp_dsc_aux_ref_count(struct intel_atomic_state *state, 3585 const struct intel_connector *connector, 3586 bool for_get_ref) 3587 { 3588 struct intel_display *display = to_intel_display(state); 3589 struct drm_connector *_connector_iter; 3590 struct drm_connector_state *old_conn_state; 3591 struct drm_connector_state *new_conn_state; 3592 int ref_count = 0; 3593 int i; 3594 3595 /* 3596 * On SST the decompression AUX device won't be shared, each connector 3597 * uses for this its own AUX targeting the sink device. 3598 */ 3599 if (!connector->mst.dp) 3600 return connector->dp.dsc_decompression_enabled ? 1 : 0; 3601 3602 for_each_oldnew_connector_in_state(&state->base, _connector_iter, 3603 old_conn_state, new_conn_state, i) { 3604 const struct intel_connector * 3605 connector_iter = to_intel_connector(_connector_iter); 3606 3607 if (connector_iter->mst.dp != connector->mst.dp) 3608 continue; 3609 3610 if (!connector_iter->dp.dsc_decompression_enabled) 3611 continue; 3612 3613 drm_WARN_ON(display->drm, 3614 (for_get_ref && !new_conn_state->crtc) || 3615 (!for_get_ref && !old_conn_state->crtc)); 3616 3617 if (connector_iter->dp.dsc_decompression_aux == 3618 connector->dp.dsc_decompression_aux) 3619 ref_count++; 3620 } 3621 3622 return ref_count; 3623 } 3624 3625 static bool intel_dp_dsc_aux_get_ref(struct intel_atomic_state *state, 3626 struct intel_connector *connector) 3627 { 3628 bool ret = intel_dp_dsc_aux_ref_count(state, connector, true) == 0; 3629 3630 connector->dp.dsc_decompression_enabled = true; 3631 3632 return ret; 3633 } 3634 3635 static bool intel_dp_dsc_aux_put_ref(struct intel_atomic_state *state, 3636 struct intel_connector *connector) 3637 { 3638 connector->dp.dsc_decompression_enabled = false; 3639 3640 return intel_dp_dsc_aux_ref_count(state, connector, false) == 0; 3641 } 3642 3643 /** 3644 * intel_dp_sink_enable_decompression - Enable DSC decompression in sink/last branch device 3645 * @state: atomic state 3646 * @connector: connector to enable the decompression for 3647 * @new_crtc_state: new state for the CRTC driving @connector 3648 * 3649 * Enable the DSC decompression if required in the %DP_DSC_ENABLE DPCD 3650 * register of the appropriate sink/branch device. On SST this is always the 3651 * sink device, whereas on MST based on each device's DSC capabilities it's 3652 * either the last branch device (enabling decompression in it) or both the 3653 * last branch device (enabling passthrough in it) and the sink device 3654 * (enabling decompression in it). 3655 */ 3656 void intel_dp_sink_enable_decompression(struct intel_atomic_state *state, 3657 struct intel_connector *connector, 3658 const struct intel_crtc_state *new_crtc_state) 3659 { 3660 struct intel_display *display = to_intel_display(state); 3661 3662 if (!new_crtc_state->dsc.compression_enable) 3663 return; 3664 3665 if (drm_WARN_ON(display->drm, 3666 !connector->dp.dsc_decompression_aux || 3667 connector->dp.dsc_decompression_enabled)) 3668 return; 3669 3670 if (!intel_dp_dsc_aux_get_ref(state, connector)) 3671 return; 3672 3673 intel_dp_sink_set_dsc_passthrough(connector, true); 3674 intel_dp_sink_set_dsc_decompression(connector, true); 3675 } 3676 3677 /** 3678 * intel_dp_sink_disable_decompression - Disable DSC decompression in sink/last branch device 3679 * @state: atomic state 3680 * @connector: connector to disable the decompression for 3681 * @old_crtc_state: old state for the CRTC driving @connector 3682 * 3683 * Disable the DSC decompression if required in the %DP_DSC_ENABLE DPCD 3684 * register of the appropriate sink/branch device, corresponding to the 3685 * sequence in intel_dp_sink_enable_decompression(). 3686 */ 3687 void intel_dp_sink_disable_decompression(struct intel_atomic_state *state, 3688 struct intel_connector *connector, 3689 const struct intel_crtc_state *old_crtc_state) 3690 { 3691 struct intel_display *display = to_intel_display(state); 3692 3693 if (!old_crtc_state->dsc.compression_enable) 3694 return; 3695 3696 if (drm_WARN_ON(display->drm, 3697 !connector->dp.dsc_decompression_aux || 3698 !connector->dp.dsc_decompression_enabled)) 3699 return; 3700 3701 if (!intel_dp_dsc_aux_put_ref(state, connector)) 3702 return; 3703 3704 intel_dp_sink_set_dsc_decompression(connector, false); 3705 intel_dp_sink_set_dsc_passthrough(connector, false); 3706 } 3707 3708 static void 3709 intel_dp_init_source_oui(struct intel_dp *intel_dp) 3710 { 3711 struct intel_display *display = to_intel_display(intel_dp); 3712 u8 oui[] = { 0x00, 0xaa, 0x01 }; 3713 u8 buf[3] = {}; 3714 3715 if (READ_ONCE(intel_dp->oui_valid)) 3716 return; 3717 3718 WRITE_ONCE(intel_dp->oui_valid, true); 3719 3720 /* 3721 * During driver init, we want to be careful and avoid changing the source OUI if it's 3722 * already set to what we want, so as to avoid clearing any state by accident 3723 */ 3724 if (drm_dp_dpcd_read(&intel_dp->aux, DP_SOURCE_OUI, buf, sizeof(buf)) < 0) 3725 drm_dbg_kms(display->drm, "Failed to read source OUI\n"); 3726 3727 if (memcmp(oui, buf, sizeof(oui)) == 0) { 3728 /* Assume the OUI was written now. */ 3729 intel_dp->last_oui_write = jiffies; 3730 return; 3731 } 3732 3733 if (drm_dp_dpcd_write(&intel_dp->aux, DP_SOURCE_OUI, oui, sizeof(oui)) < 0) { 3734 drm_dbg_kms(display->drm, "Failed to write source OUI\n"); 3735 WRITE_ONCE(intel_dp->oui_valid, false); 3736 } 3737 3738 intel_dp->last_oui_write = jiffies; 3739 } 3740 3741 void intel_dp_invalidate_source_oui(struct intel_dp *intel_dp) 3742 { 3743 WRITE_ONCE(intel_dp->oui_valid, false); 3744 } 3745 3746 void intel_dp_wait_source_oui(struct intel_dp *intel_dp) 3747 { 3748 struct intel_display *display = to_intel_display(intel_dp); 3749 struct intel_connector *connector = intel_dp->attached_connector; 3750 3751 drm_dbg_kms(display->drm, 3752 "[CONNECTOR:%d:%s] Performing OUI wait (%u ms)\n", 3753 connector->base.base.id, connector->base.name, 3754 connector->panel.vbt.backlight.hdr_dpcd_refresh_timeout); 3755 3756 wait_remaining_ms_from_jiffies(intel_dp->last_oui_write, 3757 connector->panel.vbt.backlight.hdr_dpcd_refresh_timeout); 3758 } 3759 3760 /* If the device supports it, try to set the power state appropriately */ 3761 void intel_dp_set_power(struct intel_dp *intel_dp, u8 mode) 3762 { 3763 struct intel_display *display = to_intel_display(intel_dp); 3764 struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base; 3765 int ret, i; 3766 3767 /* Should have a valid DPCD by this point */ 3768 if (intel_dp->dpcd[DP_DPCD_REV] < 0x11) 3769 return; 3770 3771 if (mode != DP_SET_POWER_D0) { 3772 if (downstream_hpd_needs_d0(intel_dp)) 3773 return; 3774 3775 ret = drm_dp_dpcd_writeb(&intel_dp->aux, DP_SET_POWER, mode); 3776 } else { 3777 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp); 3778 3779 intel_lspcon_resume(dig_port); 3780 3781 /* Write the source OUI as early as possible */ 3782 intel_dp_init_source_oui(intel_dp); 3783 3784 /* 3785 * When turning on, we need to retry for 1ms to give the sink 3786 * time to wake up. 3787 */ 3788 for (i = 0; i < 3; i++) { 3789 ret = drm_dp_dpcd_writeb(&intel_dp->aux, DP_SET_POWER, mode); 3790 if (ret == 1) 3791 break; 3792 msleep(1); 3793 } 3794 3795 if (ret == 1 && intel_lspcon_active(dig_port)) 3796 intel_lspcon_wait_pcon_mode(dig_port); 3797 } 3798 3799 if (ret != 1) 3800 drm_dbg_kms(display->drm, 3801 "[ENCODER:%d:%s] Set power to %s failed\n", 3802 encoder->base.base.id, encoder->base.name, 3803 mode == DP_SET_POWER_D0 ? "D0" : "D3"); 3804 } 3805 3806 static bool 3807 intel_dp_get_dpcd(struct intel_dp *intel_dp); 3808 3809 /** 3810 * intel_dp_sync_state - sync the encoder state during init/resume 3811 * @encoder: intel encoder to sync 3812 * @crtc_state: state for the CRTC connected to the encoder 3813 * 3814 * Sync any state stored in the encoder wrt. HW state during driver init 3815 * and system resume. 3816 */ 3817 void intel_dp_sync_state(struct intel_encoder *encoder, 3818 const struct intel_crtc_state *crtc_state) 3819 { 3820 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 3821 bool dpcd_updated = false; 3822 3823 /* 3824 * Don't clobber DPCD if it's been already read out during output 3825 * setup (eDP) or detect. 3826 */ 3827 if (crtc_state && intel_dp->dpcd[DP_DPCD_REV] == 0) { 3828 intel_dp_get_dpcd(intel_dp); 3829 dpcd_updated = true; 3830 } 3831 3832 intel_dp_tunnel_resume(intel_dp, crtc_state, dpcd_updated); 3833 3834 if (crtc_state) { 3835 intel_dp_reset_link_params(intel_dp); 3836 intel_dp_set_link_params(intel_dp, crtc_state->port_clock, crtc_state->lane_count); 3837 intel_dp->link.active = true; 3838 } 3839 } 3840 3841 bool intel_dp_initial_fastset_check(struct intel_encoder *encoder, 3842 struct intel_crtc_state *crtc_state) 3843 { 3844 struct intel_display *display = to_intel_display(encoder); 3845 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 3846 bool fastset = true; 3847 3848 /* 3849 * If BIOS has set an unsupported or non-standard link rate for some 3850 * reason force an encoder recompute and full modeset. 3851 */ 3852 if (intel_dp_rate_index(intel_dp->source_rates, intel_dp->num_source_rates, 3853 crtc_state->port_clock) < 0) { 3854 drm_dbg_kms(display->drm, 3855 "[ENCODER:%d:%s] Forcing full modeset due to unsupported link rate\n", 3856 encoder->base.base.id, encoder->base.name); 3857 crtc_state->uapi.connectors_changed = true; 3858 fastset = false; 3859 } 3860 3861 /* 3862 * FIXME hack to force full modeset when DSC is being used. 3863 * 3864 * As long as we do not have full state readout and config comparison 3865 * of crtc_state->dsc, we have no way to ensure reliable fastset. 3866 * Remove once we have readout for DSC. 3867 */ 3868 if (crtc_state->dsc.compression_enable) { 3869 drm_dbg_kms(display->drm, 3870 "[ENCODER:%d:%s] Forcing full modeset due to DSC being enabled\n", 3871 encoder->base.base.id, encoder->base.name); 3872 crtc_state->uapi.mode_changed = true; 3873 fastset = false; 3874 } 3875 3876 if (CAN_PANEL_REPLAY(intel_dp)) { 3877 drm_dbg_kms(display->drm, 3878 "[ENCODER:%d:%s] Forcing full modeset to compute panel replay state\n", 3879 encoder->base.base.id, encoder->base.name); 3880 crtc_state->uapi.mode_changed = true; 3881 fastset = false; 3882 } 3883 3884 return fastset; 3885 } 3886 3887 static void intel_dp_get_pcon_dsc_cap(struct intel_dp *intel_dp) 3888 { 3889 struct intel_display *display = to_intel_display(intel_dp); 3890 3891 /* Clear the cached register set to avoid using stale values */ 3892 3893 memset(intel_dp->pcon_dsc_dpcd, 0, sizeof(intel_dp->pcon_dsc_dpcd)); 3894 3895 if (!drm_dp_is_branch(intel_dp->dpcd)) 3896 return; 3897 3898 if (drm_dp_dpcd_read(&intel_dp->aux, DP_PCON_DSC_ENCODER, 3899 intel_dp->pcon_dsc_dpcd, 3900 sizeof(intel_dp->pcon_dsc_dpcd)) < 0) 3901 drm_err(display->drm, "Failed to read DPCD register 0x%x\n", 3902 DP_PCON_DSC_ENCODER); 3903 3904 drm_dbg_kms(display->drm, "PCON ENCODER DSC DPCD: %*ph\n", 3905 (int)sizeof(intel_dp->pcon_dsc_dpcd), intel_dp->pcon_dsc_dpcd); 3906 } 3907 3908 static int intel_dp_pcon_get_frl_mask(u8 frl_bw_mask) 3909 { 3910 static const int bw_gbps[] = {9, 18, 24, 32, 40, 48}; 3911 int i; 3912 3913 for (i = ARRAY_SIZE(bw_gbps) - 1; i >= 0; i--) { 3914 if (frl_bw_mask & (1 << i)) 3915 return bw_gbps[i]; 3916 } 3917 return 0; 3918 } 3919 3920 static int intel_dp_pcon_set_frl_mask(int max_frl) 3921 { 3922 switch (max_frl) { 3923 case 48: 3924 return DP_PCON_FRL_BW_MASK_48GBPS; 3925 case 40: 3926 return DP_PCON_FRL_BW_MASK_40GBPS; 3927 case 32: 3928 return DP_PCON_FRL_BW_MASK_32GBPS; 3929 case 24: 3930 return DP_PCON_FRL_BW_MASK_24GBPS; 3931 case 18: 3932 return DP_PCON_FRL_BW_MASK_18GBPS; 3933 case 9: 3934 return DP_PCON_FRL_BW_MASK_9GBPS; 3935 } 3936 3937 return 0; 3938 } 3939 3940 static int intel_dp_hdmi_sink_max_frl(struct intel_dp *intel_dp) 3941 { 3942 struct intel_connector *connector = intel_dp->attached_connector; 3943 const struct drm_display_info *info = &connector->base.display_info; 3944 int max_frl_rate; 3945 int max_lanes, rate_per_lane; 3946 int max_dsc_lanes, dsc_rate_per_lane; 3947 3948 max_lanes = info->hdmi.max_lanes; 3949 rate_per_lane = info->hdmi.max_frl_rate_per_lane; 3950 max_frl_rate = max_lanes * rate_per_lane; 3951 3952 if (info->hdmi.dsc_cap.v_1p2) { 3953 max_dsc_lanes = info->hdmi.dsc_cap.max_lanes; 3954 dsc_rate_per_lane = info->hdmi.dsc_cap.max_frl_rate_per_lane; 3955 if (max_dsc_lanes && dsc_rate_per_lane) 3956 max_frl_rate = min(max_frl_rate, max_dsc_lanes * dsc_rate_per_lane); 3957 } 3958 3959 return max_frl_rate; 3960 } 3961 3962 static bool 3963 intel_dp_pcon_is_frl_trained(struct intel_dp *intel_dp, 3964 u8 max_frl_bw_mask, u8 *frl_trained_mask) 3965 { 3966 if (drm_dp_pcon_hdmi_link_active(&intel_dp->aux) && 3967 drm_dp_pcon_hdmi_link_mode(&intel_dp->aux, frl_trained_mask) == DP_PCON_HDMI_MODE_FRL && 3968 *frl_trained_mask >= max_frl_bw_mask) 3969 return true; 3970 3971 return false; 3972 } 3973 3974 static int intel_dp_pcon_start_frl_training(struct intel_dp *intel_dp) 3975 { 3976 struct intel_display *display = to_intel_display(intel_dp); 3977 #define TIMEOUT_FRL_READY_MS 500 3978 #define TIMEOUT_HDMI_LINK_ACTIVE_MS 1000 3979 int max_frl_bw, max_pcon_frl_bw, max_edid_frl_bw, ret; 3980 u8 max_frl_bw_mask = 0, frl_trained_mask; 3981 bool is_active; 3982 3983 max_pcon_frl_bw = intel_dp->dfp.pcon_max_frl_bw; 3984 drm_dbg(display->drm, "PCON max rate = %d Gbps\n", max_pcon_frl_bw); 3985 3986 max_edid_frl_bw = intel_dp_hdmi_sink_max_frl(intel_dp); 3987 drm_dbg(display->drm, "Sink max rate from EDID = %d Gbps\n", 3988 max_edid_frl_bw); 3989 3990 max_frl_bw = min(max_edid_frl_bw, max_pcon_frl_bw); 3991 3992 if (max_frl_bw <= 0) 3993 return -EINVAL; 3994 3995 max_frl_bw_mask = intel_dp_pcon_set_frl_mask(max_frl_bw); 3996 drm_dbg(display->drm, "MAX_FRL_BW_MASK = %u\n", max_frl_bw_mask); 3997 3998 if (intel_dp_pcon_is_frl_trained(intel_dp, max_frl_bw_mask, &frl_trained_mask)) 3999 goto frl_trained; 4000 4001 ret = drm_dp_pcon_frl_prepare(&intel_dp->aux, false); 4002 if (ret < 0) 4003 return ret; 4004 /* Wait for PCON to be FRL Ready */ 4005 ret = poll_timeout_us(is_active = drm_dp_pcon_is_frl_ready(&intel_dp->aux), 4006 is_active, 4007 1000, TIMEOUT_FRL_READY_MS * 1000, false); 4008 if (ret) 4009 return ret; 4010 4011 ret = drm_dp_pcon_frl_configure_1(&intel_dp->aux, max_frl_bw, 4012 DP_PCON_ENABLE_SEQUENTIAL_LINK); 4013 if (ret < 0) 4014 return ret; 4015 ret = drm_dp_pcon_frl_configure_2(&intel_dp->aux, max_frl_bw_mask, 4016 DP_PCON_FRL_LINK_TRAIN_NORMAL); 4017 if (ret < 0) 4018 return ret; 4019 ret = drm_dp_pcon_frl_enable(&intel_dp->aux); 4020 if (ret < 0) 4021 return ret; 4022 /* 4023 * Wait for FRL to be completed 4024 * Check if the HDMI Link is up and active. 4025 */ 4026 ret = poll_timeout_us(is_active = intel_dp_pcon_is_frl_trained(intel_dp, max_frl_bw_mask, &frl_trained_mask), 4027 is_active, 4028 1000, TIMEOUT_HDMI_LINK_ACTIVE_MS * 1000, false); 4029 if (ret) 4030 return ret; 4031 4032 frl_trained: 4033 drm_dbg(display->drm, "FRL_TRAINED_MASK = %u\n", frl_trained_mask); 4034 intel_dp->frl.trained_rate_gbps = intel_dp_pcon_get_frl_mask(frl_trained_mask); 4035 intel_dp->frl.is_trained = true; 4036 drm_dbg(display->drm, "FRL trained with : %d Gbps\n", 4037 intel_dp->frl.trained_rate_gbps); 4038 4039 return 0; 4040 } 4041 4042 static bool intel_dp_is_hdmi_2_1_sink(struct intel_dp *intel_dp) 4043 { 4044 if (drm_dp_is_branch(intel_dp->dpcd) && 4045 intel_dp_has_hdmi_sink(intel_dp) && 4046 intel_dp_hdmi_sink_max_frl(intel_dp) > 0) 4047 return true; 4048 4049 return false; 4050 } 4051 4052 static 4053 int intel_dp_pcon_set_tmds_mode(struct intel_dp *intel_dp) 4054 { 4055 int ret; 4056 u8 buf = 0; 4057 4058 /* Set PCON source control mode */ 4059 buf |= DP_PCON_ENABLE_SOURCE_CTL_MODE; 4060 4061 ret = drm_dp_dpcd_writeb(&intel_dp->aux, DP_PCON_HDMI_LINK_CONFIG_1, buf); 4062 if (ret < 0) 4063 return ret; 4064 4065 /* Set HDMI LINK ENABLE */ 4066 buf |= DP_PCON_ENABLE_HDMI_LINK; 4067 ret = drm_dp_dpcd_writeb(&intel_dp->aux, DP_PCON_HDMI_LINK_CONFIG_1, buf); 4068 if (ret < 0) 4069 return ret; 4070 4071 return 0; 4072 } 4073 4074 void intel_dp_check_frl_training(struct intel_dp *intel_dp) 4075 { 4076 struct intel_display *display = to_intel_display(intel_dp); 4077 4078 /* 4079 * Always go for FRL training if: 4080 * -PCON supports SRC_CTL_MODE (VESA DP2.0-HDMI2.1 PCON Spec Draft-1 Sec-7) 4081 * -sink is HDMI2.1 4082 */ 4083 if (!(intel_dp->downstream_ports[2] & DP_PCON_SOURCE_CTL_MODE) || 4084 !intel_dp_is_hdmi_2_1_sink(intel_dp) || 4085 intel_dp->frl.is_trained) 4086 return; 4087 4088 if (intel_dp_pcon_start_frl_training(intel_dp) < 0) { 4089 int ret, mode; 4090 4091 drm_dbg(display->drm, 4092 "Couldn't set FRL mode, continuing with TMDS mode\n"); 4093 ret = intel_dp_pcon_set_tmds_mode(intel_dp); 4094 mode = drm_dp_pcon_hdmi_link_mode(&intel_dp->aux, NULL); 4095 4096 if (ret < 0 || mode != DP_PCON_HDMI_MODE_TMDS) 4097 drm_dbg(display->drm, 4098 "Issue with PCON, cannot set TMDS mode\n"); 4099 } else { 4100 drm_dbg(display->drm, "FRL training Completed\n"); 4101 } 4102 } 4103 4104 static int 4105 intel_dp_pcon_dsc_enc_slice_height(const struct intel_crtc_state *crtc_state) 4106 { 4107 int vactive = crtc_state->hw.adjusted_mode.vdisplay; 4108 4109 return intel_hdmi_dsc_get_slice_height(vactive); 4110 } 4111 4112 static int 4113 intel_dp_pcon_dsc_enc_slices(struct intel_dp *intel_dp, 4114 const struct intel_crtc_state *crtc_state) 4115 { 4116 struct intel_connector *connector = intel_dp->attached_connector; 4117 const struct drm_display_info *info = &connector->base.display_info; 4118 int hdmi_throughput = info->hdmi.dsc_cap.clk_per_slice; 4119 int hdmi_max_slices = info->hdmi.dsc_cap.max_slices; 4120 int pcon_max_slices = drm_dp_pcon_dsc_max_slices(intel_dp->pcon_dsc_dpcd); 4121 int pcon_max_slice_width = drm_dp_pcon_dsc_max_slice_width(intel_dp->pcon_dsc_dpcd); 4122 4123 return intel_hdmi_dsc_get_num_slices(crtc_state, pcon_max_slices, 4124 pcon_max_slice_width, 4125 hdmi_max_slices, hdmi_throughput); 4126 } 4127 4128 static int 4129 intel_dp_pcon_dsc_enc_bpp(struct intel_dp *intel_dp, 4130 const struct intel_crtc_state *crtc_state, 4131 int num_slices, int slice_width) 4132 { 4133 struct intel_connector *connector = intel_dp->attached_connector; 4134 const struct drm_display_info *info = &connector->base.display_info; 4135 int output_format = crtc_state->output_format; 4136 bool hdmi_all_bpp = info->hdmi.dsc_cap.all_bpp; 4137 int pcon_fractional_bpp = drm_dp_pcon_dsc_bpp_incr(intel_dp->pcon_dsc_dpcd); 4138 int hdmi_max_chunk_bytes = 4139 info->hdmi.dsc_cap.total_chunk_kbytes * 1024; 4140 4141 return intel_hdmi_dsc_get_bpp(pcon_fractional_bpp, slice_width, 4142 num_slices, output_format, hdmi_all_bpp, 4143 hdmi_max_chunk_bytes); 4144 } 4145 4146 void 4147 intel_dp_pcon_dsc_configure(struct intel_dp *intel_dp, 4148 const struct intel_crtc_state *crtc_state) 4149 { 4150 struct intel_display *display = to_intel_display(intel_dp); 4151 struct intel_connector *connector = intel_dp->attached_connector; 4152 const struct drm_display_info *info; 4153 u8 pps_param[6]; 4154 int slice_height; 4155 int slice_width; 4156 int num_slices; 4157 int bits_per_pixel; 4158 int ret; 4159 bool hdmi_is_dsc_1_2; 4160 4161 if (!intel_dp_is_hdmi_2_1_sink(intel_dp)) 4162 return; 4163 4164 if (!connector) 4165 return; 4166 4167 info = &connector->base.display_info; 4168 4169 hdmi_is_dsc_1_2 = info->hdmi.dsc_cap.v_1p2; 4170 4171 if (!drm_dp_pcon_enc_is_dsc_1_2(intel_dp->pcon_dsc_dpcd) || 4172 !hdmi_is_dsc_1_2) 4173 return; 4174 4175 slice_height = intel_dp_pcon_dsc_enc_slice_height(crtc_state); 4176 if (!slice_height) 4177 return; 4178 4179 num_slices = intel_dp_pcon_dsc_enc_slices(intel_dp, crtc_state); 4180 if (!num_slices) 4181 return; 4182 4183 slice_width = DIV_ROUND_UP(crtc_state->hw.adjusted_mode.hdisplay, 4184 num_slices); 4185 4186 bits_per_pixel = intel_dp_pcon_dsc_enc_bpp(intel_dp, crtc_state, 4187 num_slices, slice_width); 4188 if (!bits_per_pixel) 4189 return; 4190 4191 pps_param[0] = slice_height & 0xFF; 4192 pps_param[1] = slice_height >> 8; 4193 pps_param[2] = slice_width & 0xFF; 4194 pps_param[3] = slice_width >> 8; 4195 pps_param[4] = bits_per_pixel & 0xFF; 4196 pps_param[5] = (bits_per_pixel >> 8) & 0x3; 4197 4198 ret = drm_dp_pcon_pps_override_param(&intel_dp->aux, pps_param); 4199 if (ret < 0) 4200 drm_dbg_kms(display->drm, "Failed to set pcon DSC\n"); 4201 } 4202 4203 void intel_dp_configure_protocol_converter(struct intel_dp *intel_dp, 4204 const struct intel_crtc_state *crtc_state) 4205 { 4206 struct intel_display *display = to_intel_display(intel_dp); 4207 bool ycbcr444_to_420 = false; 4208 bool rgb_to_ycbcr = false; 4209 u8 tmp; 4210 4211 if (intel_dp->dpcd[DP_DPCD_REV] < 0x13) 4212 return; 4213 4214 if (!drm_dp_is_branch(intel_dp->dpcd)) 4215 return; 4216 4217 tmp = intel_dp_has_hdmi_sink(intel_dp) ? DP_HDMI_DVI_OUTPUT_CONFIG : 0; 4218 4219 if (drm_dp_dpcd_writeb(&intel_dp->aux, 4220 DP_PROTOCOL_CONVERTER_CONTROL_0, tmp) != 1) 4221 drm_dbg_kms(display->drm, 4222 "Failed to %s protocol converter HDMI mode\n", 4223 str_enable_disable(intel_dp_has_hdmi_sink(intel_dp))); 4224 4225 if (crtc_state->sink_format == INTEL_OUTPUT_FORMAT_YCBCR420) { 4226 switch (crtc_state->output_format) { 4227 case INTEL_OUTPUT_FORMAT_YCBCR420: 4228 break; 4229 case INTEL_OUTPUT_FORMAT_YCBCR444: 4230 ycbcr444_to_420 = true; 4231 break; 4232 case INTEL_OUTPUT_FORMAT_RGB: 4233 rgb_to_ycbcr = true; 4234 ycbcr444_to_420 = true; 4235 break; 4236 default: 4237 MISSING_CASE(crtc_state->output_format); 4238 break; 4239 } 4240 } else if (crtc_state->sink_format == INTEL_OUTPUT_FORMAT_YCBCR444) { 4241 switch (crtc_state->output_format) { 4242 case INTEL_OUTPUT_FORMAT_YCBCR444: 4243 break; 4244 case INTEL_OUTPUT_FORMAT_RGB: 4245 rgb_to_ycbcr = true; 4246 break; 4247 default: 4248 MISSING_CASE(crtc_state->output_format); 4249 break; 4250 } 4251 } 4252 4253 tmp = ycbcr444_to_420 ? DP_CONVERSION_TO_YCBCR420_ENABLE : 0; 4254 4255 if (drm_dp_dpcd_writeb(&intel_dp->aux, 4256 DP_PROTOCOL_CONVERTER_CONTROL_1, tmp) != 1) 4257 drm_dbg_kms(display->drm, 4258 "Failed to %s protocol converter YCbCr 4:2:0 conversion mode\n", 4259 str_enable_disable(intel_dp->dfp.ycbcr_444_to_420)); 4260 4261 tmp = rgb_to_ycbcr ? DP_CONVERSION_BT709_RGB_YCBCR_ENABLE : 0; 4262 4263 if (drm_dp_pcon_convert_rgb_to_ycbcr(&intel_dp->aux, tmp) < 0) 4264 drm_dbg_kms(display->drm, 4265 "Failed to %s protocol converter RGB->YCbCr conversion mode\n", 4266 str_enable_disable(tmp)); 4267 } 4268 4269 static bool intel_dp_get_colorimetry_status(struct intel_dp *intel_dp) 4270 { 4271 u8 dprx = 0; 4272 4273 if (drm_dp_dpcd_readb(&intel_dp->aux, DP_DPRX_FEATURE_ENUMERATION_LIST, 4274 &dprx) != 1) 4275 return false; 4276 return dprx & DP_VSC_SDP_EXT_FOR_COLORIMETRY_SUPPORTED; 4277 } 4278 4279 static void intel_dp_read_dsc_dpcd(struct drm_dp_aux *aux, 4280 u8 dsc_dpcd[DP_DSC_RECEIVER_CAP_SIZE]) 4281 { 4282 if (drm_dp_dpcd_read(aux, DP_DSC_SUPPORT, dsc_dpcd, 4283 DP_DSC_RECEIVER_CAP_SIZE) < 0) { 4284 drm_err(aux->drm_dev, 4285 "Failed to read DPCD register 0x%x\n", 4286 DP_DSC_SUPPORT); 4287 return; 4288 } 4289 4290 drm_dbg_kms(aux->drm_dev, "DSC DPCD: %*ph\n", 4291 DP_DSC_RECEIVER_CAP_SIZE, 4292 dsc_dpcd); 4293 } 4294 4295 static void init_dsc_overall_throughput_limits(struct intel_connector *connector, bool is_branch) 4296 { 4297 u8 branch_caps[DP_DSC_BRANCH_CAP_SIZE]; 4298 int line_width; 4299 4300 connector->dp.dsc_branch_caps.overall_throughput.rgb_yuv444 = INT_MAX; 4301 connector->dp.dsc_branch_caps.overall_throughput.yuv422_420 = INT_MAX; 4302 connector->dp.dsc_branch_caps.max_line_width = INT_MAX; 4303 4304 if (!is_branch) 4305 return; 4306 4307 if (drm_dp_dpcd_read_data(connector->dp.dsc_decompression_aux, 4308 DP_DSC_BRANCH_OVERALL_THROUGHPUT_0, branch_caps, 4309 sizeof(branch_caps)) != 0) 4310 return; 4311 4312 connector->dp.dsc_branch_caps.overall_throughput.rgb_yuv444 = 4313 drm_dp_dsc_branch_max_overall_throughput(branch_caps, true) ? : INT_MAX; 4314 4315 connector->dp.dsc_branch_caps.overall_throughput.yuv422_420 = 4316 drm_dp_dsc_branch_max_overall_throughput(branch_caps, false) ? : INT_MAX; 4317 4318 line_width = drm_dp_dsc_branch_max_line_width(branch_caps); 4319 connector->dp.dsc_branch_caps.max_line_width = line_width > 0 ? line_width : INT_MAX; 4320 } 4321 4322 void intel_dp_get_dsc_sink_cap(u8 dpcd_rev, 4323 const struct drm_dp_desc *desc, bool is_branch, 4324 struct intel_connector *connector) 4325 { 4326 struct intel_display *display = to_intel_display(connector); 4327 4328 /* 4329 * Clear the cached register set to avoid using stale values 4330 * for the sinks that do not support DSC. 4331 */ 4332 memset(connector->dp.dsc_dpcd, 0, sizeof(connector->dp.dsc_dpcd)); 4333 4334 /* Clear fec_capable to avoid using stale values */ 4335 connector->dp.fec_capability = 0; 4336 4337 memset(&connector->dp.dsc_branch_caps, 0, sizeof(connector->dp.dsc_branch_caps)); 4338 connector->dp.dsc_throughput_quirk = false; 4339 4340 if (dpcd_rev < DP_DPCD_REV_14) 4341 return; 4342 4343 intel_dp_read_dsc_dpcd(connector->dp.dsc_decompression_aux, 4344 connector->dp.dsc_dpcd); 4345 4346 if (drm_dp_dpcd_readb(connector->dp.dsc_decompression_aux, DP_FEC_CAPABILITY, 4347 &connector->dp.fec_capability) < 0) { 4348 drm_err(display->drm, "Failed to read FEC DPCD register\n"); 4349 return; 4350 } 4351 4352 drm_dbg_kms(display->drm, "FEC CAPABILITY: %x\n", 4353 connector->dp.fec_capability); 4354 4355 if (!(connector->dp.dsc_dpcd[0] & DP_DSC_DECOMPRESSION_IS_SUPPORTED)) 4356 return; 4357 4358 init_dsc_overall_throughput_limits(connector, is_branch); 4359 4360 /* 4361 * TODO: Move the HW rev check as well to the DRM core quirk table if 4362 * that's required after clarifying the list of affected devices. 4363 */ 4364 if (drm_dp_has_quirk(desc, DP_DPCD_QUIRK_DSC_THROUGHPUT_BPP_LIMIT) && 4365 desc->ident.hw_rev == 0x10) 4366 connector->dp.dsc_throughput_quirk = true; 4367 } 4368 4369 static void intel_edp_get_dsc_sink_cap(u8 edp_dpcd_rev, struct intel_connector *connector) 4370 { 4371 if (edp_dpcd_rev < DP_EDP_14) 4372 return; 4373 4374 intel_dp_read_dsc_dpcd(connector->dp.dsc_decompression_aux, connector->dp.dsc_dpcd); 4375 4376 if (connector->dp.dsc_dpcd[0] & DP_DSC_DECOMPRESSION_IS_SUPPORTED) 4377 init_dsc_overall_throughput_limits(connector, false); 4378 } 4379 4380 static void 4381 intel_dp_detect_dsc_caps(struct intel_dp *intel_dp, struct intel_connector *connector) 4382 { 4383 struct intel_display *display = to_intel_display(intel_dp); 4384 4385 /* Read DP Sink DSC Cap DPCD regs for DP v1.4 */ 4386 if (!HAS_DSC(display)) 4387 return; 4388 4389 if (intel_dp_is_edp(intel_dp)) 4390 intel_edp_get_dsc_sink_cap(intel_dp->edp_dpcd[0], 4391 connector); 4392 else 4393 intel_dp_get_dsc_sink_cap(intel_dp->dpcd[DP_DPCD_REV], 4394 &intel_dp->desc, drm_dp_is_branch(intel_dp->dpcd), 4395 connector); 4396 } 4397 4398 static void intel_edp_mso_mode_fixup(struct intel_connector *connector, 4399 struct drm_display_mode *mode) 4400 { 4401 struct intel_display *display = to_intel_display(connector); 4402 struct intel_dp *intel_dp = intel_attached_dp(connector); 4403 int n = intel_dp->mso_link_count; 4404 int overlap = intel_dp->mso_pixel_overlap; 4405 4406 if (!mode || !n) 4407 return; 4408 4409 mode->hdisplay = (mode->hdisplay - overlap) * n; 4410 mode->hsync_start = (mode->hsync_start - overlap) * n; 4411 mode->hsync_end = (mode->hsync_end - overlap) * n; 4412 mode->htotal = (mode->htotal - overlap) * n; 4413 mode->clock *= n; 4414 4415 drm_mode_set_name(mode); 4416 4417 drm_dbg_kms(display->drm, 4418 "[CONNECTOR:%d:%s] using generated MSO mode: " DRM_MODE_FMT "\n", 4419 connector->base.base.id, connector->base.name, 4420 DRM_MODE_ARG(mode)); 4421 } 4422 4423 void intel_edp_fixup_vbt_bpp(struct intel_encoder *encoder, int pipe_bpp) 4424 { 4425 struct intel_display *display = to_intel_display(encoder); 4426 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 4427 struct intel_connector *connector = intel_dp->attached_connector; 4428 4429 if (connector->panel.vbt.edp.bpp && pipe_bpp > connector->panel.vbt.edp.bpp) { 4430 /* 4431 * This is a big fat ugly hack. 4432 * 4433 * Some machines in UEFI boot mode provide us a VBT that has 18 4434 * bpp and 1.62 GHz link bandwidth for eDP, which for reasons 4435 * unknown we fail to light up. Yet the same BIOS boots up with 4436 * 24 bpp and 2.7 GHz link. Use the same bpp as the BIOS uses as 4437 * max, not what it tells us to use. 4438 * 4439 * Note: This will still be broken if the eDP panel is not lit 4440 * up by the BIOS, and thus we can't get the mode at module 4441 * load. 4442 */ 4443 drm_dbg_kms(display->drm, 4444 "pipe has %d bpp for eDP panel, overriding BIOS-provided max %d bpp\n", 4445 pipe_bpp, connector->panel.vbt.edp.bpp); 4446 connector->panel.vbt.edp.bpp = pipe_bpp; 4447 } 4448 } 4449 4450 static void intel_edp_mso_init(struct intel_dp *intel_dp) 4451 { 4452 struct intel_display *display = to_intel_display(intel_dp); 4453 struct intel_connector *connector = intel_dp->attached_connector; 4454 struct drm_display_info *info = &connector->base.display_info; 4455 u8 mso; 4456 4457 if (intel_dp->edp_dpcd[0] < DP_EDP_14) 4458 return; 4459 4460 if (drm_dp_dpcd_readb(&intel_dp->aux, DP_EDP_MSO_LINK_CAPABILITIES, &mso) != 1) { 4461 drm_err(display->drm, "Failed to read MSO cap\n"); 4462 return; 4463 } 4464 4465 /* Valid configurations are SST or MSO 2x1, 2x2, 4x1 */ 4466 mso &= DP_EDP_MSO_NUMBER_OF_LINKS_MASK; 4467 if (mso % 2 || mso > drm_dp_max_lane_count(intel_dp->dpcd)) { 4468 drm_err(display->drm, "Invalid MSO link count cap %u\n", mso); 4469 mso = 0; 4470 } 4471 4472 if (mso) { 4473 drm_dbg_kms(display->drm, 4474 "Sink MSO %ux%u configuration, pixel overlap %u\n", 4475 mso, drm_dp_max_lane_count(intel_dp->dpcd) / mso, 4476 info->mso_pixel_overlap); 4477 if (!HAS_MSO(display)) { 4478 drm_err(display->drm, 4479 "No source MSO support, disabling\n"); 4480 mso = 0; 4481 } 4482 } 4483 4484 intel_dp->mso_link_count = mso; 4485 intel_dp->mso_pixel_overlap = mso ? info->mso_pixel_overlap : 0; 4486 } 4487 4488 static void 4489 intel_edp_set_data_override_rates(struct intel_dp *intel_dp) 4490 { 4491 struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base; 4492 int *sink_rates = intel_dp->sink_rates; 4493 int i, count = 0; 4494 4495 for (i = 0; i < intel_dp->num_sink_rates; i++) { 4496 if (intel_bios_encoder_reject_edp_rate(encoder->devdata, 4497 intel_dp->sink_rates[i])) 4498 continue; 4499 4500 sink_rates[count++] = intel_dp->sink_rates[i]; 4501 } 4502 intel_dp->num_sink_rates = count; 4503 } 4504 4505 static void 4506 intel_edp_set_sink_rates(struct intel_dp *intel_dp) 4507 { 4508 struct intel_display *display = to_intel_display(intel_dp); 4509 4510 intel_dp->num_sink_rates = 0; 4511 4512 if (intel_dp->edp_dpcd[0] >= DP_EDP_14) { 4513 __le16 sink_rates[DP_MAX_SUPPORTED_RATES]; 4514 int i; 4515 4516 drm_dp_dpcd_read(&intel_dp->aux, DP_SUPPORTED_LINK_RATES, 4517 sink_rates, sizeof(sink_rates)); 4518 4519 for (i = 0; i < ARRAY_SIZE(sink_rates); i++) { 4520 int rate; 4521 4522 /* Value read multiplied by 200kHz gives the per-lane 4523 * link rate in kHz. The source rates are, however, 4524 * stored in terms of LS_Clk kHz. The full conversion 4525 * back to symbols is 4526 * (val * 200kHz)*(8/10 ch. encoding)*(1/8 bit to Byte) 4527 */ 4528 rate = le16_to_cpu(sink_rates[i]) * 200 / 10; 4529 4530 if (rate == 0) 4531 break; 4532 4533 /* 4534 * Some platforms cannot reliably drive HBR3 rates due to PHY limitations, 4535 * even if the sink advertises support. Reject any sink rates above HBR2 on 4536 * the known machines for stable output. 4537 */ 4538 if (rate > 540000 && 4539 intel_has_quirk(display, QUIRK_EDP_LIMIT_RATE_HBR2)) 4540 break; 4541 4542 intel_dp->sink_rates[i] = rate; 4543 } 4544 intel_dp->num_sink_rates = i; 4545 } 4546 4547 /* 4548 * Use DP_LINK_RATE_SET if DP_SUPPORTED_LINK_RATES are available, 4549 * default to DP_MAX_LINK_RATE and DP_LINK_BW_SET otherwise. 4550 */ 4551 if (intel_dp->num_sink_rates) 4552 intel_dp->use_rate_select = true; 4553 else 4554 intel_dp_set_sink_rates(intel_dp); 4555 4556 intel_edp_set_data_override_rates(intel_dp); 4557 } 4558 4559 static bool 4560 intel_edp_init_dpcd(struct intel_dp *intel_dp, struct intel_connector *connector) 4561 { 4562 struct intel_display *display = to_intel_display(intel_dp); 4563 4564 /* this function is meant to be called only once */ 4565 drm_WARN_ON(display->drm, intel_dp->dpcd[DP_DPCD_REV] != 0); 4566 4567 if (drm_dp_read_dpcd_caps(&intel_dp->aux, intel_dp->dpcd) != 0) 4568 return false; 4569 4570 drm_dp_read_desc(&intel_dp->aux, &intel_dp->desc, 4571 drm_dp_is_branch(intel_dp->dpcd)); 4572 intel_init_dpcd_quirks(intel_dp, &intel_dp->desc.ident); 4573 4574 intel_dp->colorimetry_support = 4575 intel_dp_get_colorimetry_status(intel_dp); 4576 4577 /* 4578 * Read the eDP display control registers. 4579 * 4580 * Do this independent of DP_DPCD_DISPLAY_CONTROL_CAPABLE bit in 4581 * DP_EDP_CONFIGURATION_CAP, because some buggy displays do not have it 4582 * set, but require eDP 1.4+ detection (e.g. for supported link rates 4583 * method). The display control registers should read zero if they're 4584 * not supported anyway. 4585 */ 4586 if (drm_dp_dpcd_read(&intel_dp->aux, DP_EDP_DPCD_REV, 4587 intel_dp->edp_dpcd, sizeof(intel_dp->edp_dpcd)) == 4588 sizeof(intel_dp->edp_dpcd)) { 4589 drm_dbg_kms(display->drm, "eDP DPCD: %*ph\n", 4590 (int)sizeof(intel_dp->edp_dpcd), 4591 intel_dp->edp_dpcd); 4592 4593 intel_dp->use_max_params = intel_dp->edp_dpcd[0] < DP_EDP_14; 4594 } 4595 4596 /* 4597 * If needed, program our source OUI so we can make various Intel-specific AUX services 4598 * available (such as HDR backlight controls) 4599 */ 4600 intel_dp_init_source_oui(intel_dp); 4601 4602 /* 4603 * This has to be called after intel_dp->edp_dpcd is filled, PSR checks 4604 * for SET_POWER_CAPABLE bit in intel_dp->edp_dpcd[1] 4605 */ 4606 intel_psr_init_dpcd(intel_dp, connector); 4607 4608 intel_edp_set_sink_rates(intel_dp); 4609 intel_dp_set_max_sink_lane_count(intel_dp); 4610 4611 /* Read the eDP DSC DPCD registers */ 4612 intel_dp_detect_dsc_caps(intel_dp, connector); 4613 4614 return true; 4615 } 4616 4617 static bool 4618 intel_dp_has_sink_count(struct intel_dp *intel_dp) 4619 { 4620 if (!intel_dp->attached_connector) 4621 return false; 4622 4623 return drm_dp_read_sink_count_cap(&intel_dp->attached_connector->base, 4624 intel_dp->dpcd, 4625 &intel_dp->desc); 4626 } 4627 4628 void intel_dp_update_sink_caps(struct intel_dp *intel_dp) 4629 { 4630 intel_dp_set_sink_rates(intel_dp); 4631 intel_dp_set_max_sink_lane_count(intel_dp); 4632 intel_dp_set_common_rates(intel_dp); 4633 } 4634 4635 static bool 4636 intel_dp_get_dpcd(struct intel_dp *intel_dp) 4637 { 4638 int ret; 4639 4640 if (intel_dp_init_lttpr_and_dprx_caps(intel_dp) < 0) 4641 return false; 4642 4643 /* 4644 * Don't clobber cached eDP rates. Also skip re-reading 4645 * the OUI/ID since we know it won't change. 4646 */ 4647 if (!intel_dp_is_edp(intel_dp)) { 4648 drm_dp_read_desc(&intel_dp->aux, &intel_dp->desc, 4649 drm_dp_is_branch(intel_dp->dpcd)); 4650 4651 intel_init_dpcd_quirks(intel_dp, &intel_dp->desc.ident); 4652 4653 intel_dp->colorimetry_support = 4654 intel_dp_get_colorimetry_status(intel_dp); 4655 4656 intel_dp_update_sink_caps(intel_dp); 4657 } 4658 4659 if (intel_dp_has_sink_count(intel_dp)) { 4660 ret = drm_dp_read_sink_count(&intel_dp->aux); 4661 if (ret < 0) 4662 return false; 4663 4664 /* 4665 * Sink count can change between short pulse hpd hence 4666 * a member variable in intel_dp will track any changes 4667 * between short pulse interrupts. 4668 */ 4669 intel_dp->sink_count = ret; 4670 4671 /* 4672 * SINK_COUNT == 0 and DOWNSTREAM_PORT_PRESENT == 1 implies that 4673 * a dongle is present but no display. Unless we require to know 4674 * if a dongle is present or not, we don't need to update 4675 * downstream port information. So, an early return here saves 4676 * time from performing other operations which are not required. 4677 */ 4678 if (!intel_dp->sink_count) 4679 return false; 4680 } 4681 4682 return drm_dp_read_downstream_info(&intel_dp->aux, intel_dp->dpcd, 4683 intel_dp->downstream_ports) == 0; 4684 } 4685 4686 static const char *intel_dp_mst_mode_str(enum drm_dp_mst_mode mst_mode) 4687 { 4688 if (mst_mode == DRM_DP_MST) 4689 return "MST"; 4690 else if (mst_mode == DRM_DP_SST_SIDEBAND_MSG) 4691 return "SST w/ sideband messaging"; 4692 else 4693 return "SST"; 4694 } 4695 4696 static enum drm_dp_mst_mode 4697 intel_dp_mst_mode_choose(struct intel_dp *intel_dp, 4698 enum drm_dp_mst_mode sink_mst_mode) 4699 { 4700 struct intel_display *display = to_intel_display(intel_dp); 4701 4702 if (!display->params.enable_dp_mst) 4703 return DRM_DP_SST; 4704 4705 if (!intel_dp_mst_source_support(intel_dp)) 4706 return DRM_DP_SST; 4707 4708 if (sink_mst_mode == DRM_DP_SST_SIDEBAND_MSG && 4709 !(intel_dp->dpcd[DP_MAIN_LINK_CHANNEL_CODING] & DP_CAP_ANSI_128B132B)) 4710 return DRM_DP_SST; 4711 4712 return sink_mst_mode; 4713 } 4714 4715 static enum drm_dp_mst_mode 4716 intel_dp_mst_detect(struct intel_dp *intel_dp) 4717 { 4718 struct intel_display *display = to_intel_display(intel_dp); 4719 struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base; 4720 enum drm_dp_mst_mode sink_mst_mode; 4721 enum drm_dp_mst_mode mst_detect; 4722 4723 sink_mst_mode = drm_dp_read_mst_cap(&intel_dp->aux, intel_dp->dpcd); 4724 4725 mst_detect = intel_dp_mst_mode_choose(intel_dp, sink_mst_mode); 4726 4727 drm_dbg_kms(display->drm, 4728 "[ENCODER:%d:%s] MST support: port: %s, sink: %s, modparam: %s -> enable: %s\n", 4729 encoder->base.base.id, encoder->base.name, 4730 str_yes_no(intel_dp_mst_source_support(intel_dp)), 4731 intel_dp_mst_mode_str(sink_mst_mode), 4732 str_yes_no(display->params.enable_dp_mst), 4733 intel_dp_mst_mode_str(mst_detect)); 4734 4735 return mst_detect; 4736 } 4737 4738 static void 4739 intel_dp_mst_configure(struct intel_dp *intel_dp) 4740 { 4741 if (!intel_dp_mst_source_support(intel_dp)) 4742 return; 4743 4744 intel_dp->is_mst = intel_dp->mst_detect != DRM_DP_SST; 4745 4746 if (intel_dp->is_mst) 4747 intel_dp_mst_prepare_probe(intel_dp); 4748 4749 drm_dp_mst_topology_mgr_set_mst(&intel_dp->mst.mgr, intel_dp->is_mst); 4750 4751 /* Avoid stale info on the next detect cycle. */ 4752 intel_dp->mst_detect = DRM_DP_SST; 4753 } 4754 4755 static void 4756 intel_dp_mst_disconnect(struct intel_dp *intel_dp) 4757 { 4758 struct intel_display *display = to_intel_display(intel_dp); 4759 4760 if (!intel_dp->is_mst) 4761 return; 4762 4763 drm_dbg_kms(display->drm, 4764 "MST device may have disappeared %d vs %d\n", 4765 intel_dp->is_mst, intel_dp->mst.mgr.mst_state); 4766 intel_dp->is_mst = false; 4767 drm_dp_mst_topology_mgr_set_mst(&intel_dp->mst.mgr, intel_dp->is_mst); 4768 } 4769 4770 static bool 4771 intel_dp_get_sink_irq_esi(struct intel_dp *intel_dp, u8 *esi) 4772 { 4773 struct intel_display *display = to_intel_display(intel_dp); 4774 4775 /* 4776 * Display WA for HSD #13013007775: mtl/arl/lnl 4777 * Read the sink count and link service IRQ registers in separate 4778 * transactions to prevent disconnecting the sink on a TBT link 4779 * inadvertently. 4780 */ 4781 if (IS_DISPLAY_VER(display, 14, 20) && !display->platform.battlemage) { 4782 if (drm_dp_dpcd_read(&intel_dp->aux, DP_SINK_COUNT_ESI, esi, 3) != 3) 4783 return false; 4784 4785 /* DP_SINK_COUNT_ESI + 3 == DP_LINK_SERVICE_IRQ_VECTOR_ESI0 */ 4786 return drm_dp_dpcd_readb(&intel_dp->aux, DP_LINK_SERVICE_IRQ_VECTOR_ESI0, 4787 &esi[3]) == 1; 4788 } 4789 4790 return drm_dp_dpcd_read(&intel_dp->aux, DP_SINK_COUNT_ESI, esi, 4) == 4; 4791 } 4792 4793 static bool intel_dp_ack_sink_irq_esi(struct intel_dp *intel_dp, u8 esi[4]) 4794 { 4795 int retry; 4796 4797 for (retry = 0; retry < 3; retry++) { 4798 if (drm_dp_dpcd_write(&intel_dp->aux, DP_SINK_COUNT_ESI + 1, 4799 &esi[1], 3) == 3) 4800 return true; 4801 } 4802 4803 return false; 4804 } 4805 4806 bool 4807 intel_dp_needs_vsc_sdp(const struct intel_crtc_state *crtc_state, 4808 const struct drm_connector_state *conn_state) 4809 { 4810 /* 4811 * As per DP 1.4a spec section 2.2.4.3 [MSA Field for Indication 4812 * of Color Encoding Format and Content Color Gamut], in order to 4813 * sending YCBCR 420 or HDR BT.2020 signals we should use DP VSC SDP. 4814 */ 4815 if (crtc_state->output_format == INTEL_OUTPUT_FORMAT_YCBCR420) 4816 return true; 4817 4818 switch (conn_state->colorspace) { 4819 case DRM_MODE_COLORIMETRY_SYCC_601: 4820 case DRM_MODE_COLORIMETRY_OPYCC_601: 4821 case DRM_MODE_COLORIMETRY_BT2020_YCC: 4822 case DRM_MODE_COLORIMETRY_BT2020_RGB: 4823 case DRM_MODE_COLORIMETRY_BT2020_CYCC: 4824 return true; 4825 default: 4826 break; 4827 } 4828 4829 return false; 4830 } 4831 4832 static ssize_t intel_dp_as_sdp_pack(const struct drm_dp_as_sdp *as_sdp, 4833 struct dp_sdp *sdp, size_t size) 4834 { 4835 size_t length = sizeof(struct dp_sdp); 4836 4837 if (size < length) 4838 return -ENOSPC; 4839 4840 memset(sdp, 0, size); 4841 4842 /* Prepare AS (Adaptive Sync) SDP Header */ 4843 sdp->sdp_header.HB0 = 0; 4844 sdp->sdp_header.HB1 = as_sdp->sdp_type; 4845 sdp->sdp_header.HB2 = 0x02; 4846 sdp->sdp_header.HB3 = as_sdp->length; 4847 4848 /* Fill AS (Adaptive Sync) SDP Payload */ 4849 sdp->db[0] = as_sdp->mode; 4850 sdp->db[1] = as_sdp->vtotal & 0xFF; 4851 sdp->db[2] = (as_sdp->vtotal >> 8) & 0xFF; 4852 sdp->db[3] = as_sdp->target_rr & 0xFF; 4853 sdp->db[4] = (as_sdp->target_rr >> 8) & 0x3; 4854 4855 if (as_sdp->target_rr_divider) 4856 sdp->db[4] |= 0x20; 4857 4858 return length; 4859 } 4860 4861 static ssize_t 4862 intel_dp_hdr_metadata_infoframe_sdp_pack(struct intel_display *display, 4863 const struct hdmi_drm_infoframe *drm_infoframe, 4864 struct dp_sdp *sdp, 4865 size_t size) 4866 { 4867 size_t length = sizeof(struct dp_sdp); 4868 const int infoframe_size = HDMI_INFOFRAME_HEADER_SIZE + HDMI_DRM_INFOFRAME_SIZE; 4869 unsigned char buf[HDMI_INFOFRAME_HEADER_SIZE + HDMI_DRM_INFOFRAME_SIZE]; 4870 ssize_t len; 4871 4872 if (size < length) 4873 return -ENOSPC; 4874 4875 memset(sdp, 0, size); 4876 4877 len = hdmi_drm_infoframe_pack_only(drm_infoframe, buf, sizeof(buf)); 4878 if (len < 0) { 4879 drm_dbg_kms(display->drm, 4880 "buffer size is smaller than hdr metadata infoframe\n"); 4881 return -ENOSPC; 4882 } 4883 4884 if (len != infoframe_size) { 4885 drm_dbg_kms(display->drm, "wrong static hdr metadata size\n"); 4886 return -ENOSPC; 4887 } 4888 4889 /* 4890 * Set up the infoframe sdp packet for HDR static metadata. 4891 * Prepare VSC Header for SU as per DP 1.4a spec, 4892 * Table 2-100 and Table 2-101 4893 */ 4894 4895 /* Secondary-Data Packet ID, 00h for non-Audio INFOFRAME */ 4896 sdp->sdp_header.HB0 = 0; 4897 /* 4898 * Packet Type 80h + Non-audio INFOFRAME Type value 4899 * HDMI_INFOFRAME_TYPE_DRM: 0x87 4900 * - 80h + Non-audio INFOFRAME Type value 4901 * - InfoFrame Type: 0x07 4902 * [CTA-861-G Table-42 Dynamic Range and Mastering InfoFrame] 4903 */ 4904 sdp->sdp_header.HB1 = drm_infoframe->type; 4905 /* 4906 * Least Significant Eight Bits of (Data Byte Count – 1) 4907 * infoframe_size - 1 4908 */ 4909 sdp->sdp_header.HB2 = 0x1D; 4910 /* INFOFRAME SDP Version Number */ 4911 sdp->sdp_header.HB3 = (0x13 << 2); 4912 /* CTA Header Byte 2 (INFOFRAME Version Number) */ 4913 sdp->db[0] = drm_infoframe->version; 4914 /* CTA Header Byte 3 (Length of INFOFRAME): HDMI_DRM_INFOFRAME_SIZE */ 4915 sdp->db[1] = drm_infoframe->length; 4916 /* 4917 * Copy HDMI_DRM_INFOFRAME_SIZE size from a buffer after 4918 * HDMI_INFOFRAME_HEADER_SIZE 4919 */ 4920 BUILD_BUG_ON(sizeof(sdp->db) < HDMI_DRM_INFOFRAME_SIZE + 2); 4921 memcpy(&sdp->db[2], &buf[HDMI_INFOFRAME_HEADER_SIZE], 4922 HDMI_DRM_INFOFRAME_SIZE); 4923 4924 /* 4925 * Size of DP infoframe sdp packet for HDR static metadata consists of 4926 * - DP SDP Header(struct dp_sdp_header): 4 bytes 4927 * - Two Data Blocks: 2 bytes 4928 * CTA Header Byte2 (INFOFRAME Version Number) 4929 * CTA Header Byte3 (Length of INFOFRAME) 4930 * - HDMI_DRM_INFOFRAME_SIZE: 26 bytes 4931 * 4932 * Prior to GEN11's GMP register size is identical to DP HDR static metadata 4933 * infoframe size. But GEN11+ has larger than that size, write_infoframe 4934 * will pad rest of the size. 4935 */ 4936 return sizeof(struct dp_sdp_header) + 2 + HDMI_DRM_INFOFRAME_SIZE; 4937 } 4938 4939 static void intel_write_dp_sdp(struct intel_encoder *encoder, 4940 const struct intel_crtc_state *crtc_state, 4941 unsigned int type) 4942 { 4943 struct intel_display *display = to_intel_display(encoder); 4944 struct intel_digital_port *dig_port = enc_to_dig_port(encoder); 4945 struct dp_sdp sdp = {}; 4946 ssize_t len; 4947 4948 if ((crtc_state->infoframes.enable & 4949 intel_hdmi_infoframe_enable(type)) == 0) 4950 return; 4951 4952 switch (type) { 4953 case DP_SDP_VSC: 4954 len = drm_dp_vsc_sdp_pack(&crtc_state->infoframes.vsc, &sdp); 4955 break; 4956 case HDMI_PACKET_TYPE_GAMUT_METADATA: 4957 len = intel_dp_hdr_metadata_infoframe_sdp_pack(display, 4958 &crtc_state->infoframes.drm.drm, 4959 &sdp, sizeof(sdp)); 4960 break; 4961 case DP_SDP_ADAPTIVE_SYNC: 4962 len = intel_dp_as_sdp_pack(&crtc_state->infoframes.as_sdp, &sdp, 4963 sizeof(sdp)); 4964 break; 4965 default: 4966 MISSING_CASE(type); 4967 return; 4968 } 4969 4970 if (drm_WARN_ON(display->drm, len < 0)) 4971 return; 4972 4973 dig_port->write_infoframe(encoder, crtc_state, type, &sdp, len); 4974 } 4975 4976 void intel_dp_set_infoframes(struct intel_encoder *encoder, 4977 bool enable, 4978 const struct intel_crtc_state *crtc_state, 4979 const struct drm_connector_state *conn_state) 4980 { 4981 struct intel_display *display = to_intel_display(encoder); 4982 i915_reg_t reg = HSW_TVIDEO_DIP_CTL(display, crtc_state->cpu_transcoder); 4983 u32 dip_enable = VIDEO_DIP_ENABLE_AVI_HSW | VIDEO_DIP_ENABLE_GCP_HSW | 4984 VIDEO_DIP_ENABLE_VS_HSW | VIDEO_DIP_ENABLE_GMP_HSW | 4985 VIDEO_DIP_ENABLE_SPD_HSW | VIDEO_DIP_ENABLE_DRM_GLK; 4986 4987 if (HAS_AS_SDP(display)) 4988 dip_enable |= VIDEO_DIP_ENABLE_AS_ADL; 4989 4990 u32 val = intel_de_read(display, reg) & ~dip_enable; 4991 4992 /* TODO: Sanitize DSC enabling wrt. intel_dsc_dp_pps_write(). */ 4993 if (!enable && HAS_DSC(display)) 4994 val &= ~VDIP_ENABLE_PPS; 4995 4996 /* 4997 * This routine disables VSC DIP if the function is called 4998 * to disable SDP or if it does not have PSR 4999 */ 5000 if (!enable || !crtc_state->has_psr) 5001 val &= ~VIDEO_DIP_ENABLE_VSC_HSW; 5002 5003 intel_de_write(display, reg, val); 5004 intel_de_posting_read(display, reg); 5005 5006 if (!enable) 5007 return; 5008 5009 intel_write_dp_sdp(encoder, crtc_state, DP_SDP_VSC); 5010 intel_write_dp_sdp(encoder, crtc_state, DP_SDP_ADAPTIVE_SYNC); 5011 5012 intel_write_dp_sdp(encoder, crtc_state, HDMI_PACKET_TYPE_GAMUT_METADATA); 5013 } 5014 5015 static 5016 int intel_dp_as_sdp_unpack(struct drm_dp_as_sdp *as_sdp, 5017 const void *buffer, size_t size) 5018 { 5019 const struct dp_sdp *sdp = buffer; 5020 5021 if (size < sizeof(struct dp_sdp)) 5022 return -EINVAL; 5023 5024 memset(as_sdp, 0, sizeof(*as_sdp)); 5025 5026 if (sdp->sdp_header.HB0 != 0) 5027 return -EINVAL; 5028 5029 if (sdp->sdp_header.HB1 != DP_SDP_ADAPTIVE_SYNC) 5030 return -EINVAL; 5031 5032 if (sdp->sdp_header.HB2 != 0x02) 5033 return -EINVAL; 5034 5035 if ((sdp->sdp_header.HB3 & 0x3F) != 9) 5036 return -EINVAL; 5037 5038 as_sdp->length = sdp->sdp_header.HB3 & DP_ADAPTIVE_SYNC_SDP_LENGTH; 5039 as_sdp->mode = sdp->db[0] & DP_ADAPTIVE_SYNC_SDP_OPERATION_MODE; 5040 as_sdp->vtotal = (sdp->db[2] << 8) | sdp->db[1]; 5041 as_sdp->target_rr = (u64)sdp->db[3] | ((u64)sdp->db[4] & 0x3); 5042 as_sdp->target_rr_divider = sdp->db[4] & 0x20 ? true : false; 5043 5044 return 0; 5045 } 5046 5047 static int intel_dp_vsc_sdp_unpack(struct drm_dp_vsc_sdp *vsc, 5048 const void *buffer, size_t size) 5049 { 5050 const struct dp_sdp *sdp = buffer; 5051 5052 if (size < sizeof(struct dp_sdp)) 5053 return -EINVAL; 5054 5055 memset(vsc, 0, sizeof(*vsc)); 5056 5057 if (sdp->sdp_header.HB0 != 0) 5058 return -EINVAL; 5059 5060 if (sdp->sdp_header.HB1 != DP_SDP_VSC) 5061 return -EINVAL; 5062 5063 vsc->sdp_type = sdp->sdp_header.HB1; 5064 vsc->revision = sdp->sdp_header.HB2; 5065 vsc->length = sdp->sdp_header.HB3; 5066 5067 if ((sdp->sdp_header.HB2 == 0x2 && sdp->sdp_header.HB3 == 0x8) || 5068 (sdp->sdp_header.HB2 == 0x4 && sdp->sdp_header.HB3 == 0xe) || 5069 (sdp->sdp_header.HB2 == 0x6 && sdp->sdp_header.HB3 == 0x10)) { 5070 /* 5071 * - HB2 = 0x2, HB3 = 0x8 5072 * VSC SDP supporting 3D stereo + PSR 5073 * - HB2 = 0x4, HB3 = 0xe 5074 * VSC SDP supporting 3D stereo + PSR2 with Y-coordinate of 5075 * first scan line of the SU region (applies to eDP v1.4b 5076 * and higher). 5077 * - HB2 = 0x6, HB3 = 0x10 5078 * VSC SDP supporting 3D stereo + Panel Replay. 5079 */ 5080 return 0; 5081 } else if (sdp->sdp_header.HB2 == 0x5 && sdp->sdp_header.HB3 == 0x13) { 5082 /* 5083 * - HB2 = 0x5, HB3 = 0x13 5084 * VSC SDP supporting 3D stereo + PSR2 + Pixel Encoding/Colorimetry 5085 * Format. 5086 */ 5087 vsc->pixelformat = (sdp->db[16] >> 4) & 0xf; 5088 vsc->colorimetry = sdp->db[16] & 0xf; 5089 vsc->dynamic_range = (sdp->db[17] >> 7) & 0x1; 5090 5091 switch (sdp->db[17] & 0x7) { 5092 case 0x0: 5093 vsc->bpc = 6; 5094 break; 5095 case 0x1: 5096 vsc->bpc = 8; 5097 break; 5098 case 0x2: 5099 vsc->bpc = 10; 5100 break; 5101 case 0x3: 5102 vsc->bpc = 12; 5103 break; 5104 case 0x4: 5105 vsc->bpc = 16; 5106 break; 5107 default: 5108 MISSING_CASE(sdp->db[17] & 0x7); 5109 return -EINVAL; 5110 } 5111 5112 vsc->content_type = sdp->db[18] & 0x7; 5113 } else { 5114 return -EINVAL; 5115 } 5116 5117 return 0; 5118 } 5119 5120 static void 5121 intel_read_dp_as_sdp(struct intel_encoder *encoder, 5122 struct intel_crtc_state *crtc_state, 5123 struct drm_dp_as_sdp *as_sdp) 5124 { 5125 struct intel_display *display = to_intel_display(encoder); 5126 struct intel_digital_port *dig_port = enc_to_dig_port(encoder); 5127 unsigned int type = DP_SDP_ADAPTIVE_SYNC; 5128 struct dp_sdp sdp = {}; 5129 int ret; 5130 5131 if ((crtc_state->infoframes.enable & 5132 intel_hdmi_infoframe_enable(type)) == 0) 5133 return; 5134 5135 dig_port->read_infoframe(encoder, crtc_state, type, &sdp, 5136 sizeof(sdp)); 5137 5138 ret = intel_dp_as_sdp_unpack(as_sdp, &sdp, sizeof(sdp)); 5139 if (ret) 5140 drm_dbg_kms(display->drm, "Failed to unpack DP AS SDP\n"); 5141 } 5142 5143 static int 5144 intel_dp_hdr_metadata_infoframe_sdp_unpack(struct hdmi_drm_infoframe *drm_infoframe, 5145 const void *buffer, size_t size) 5146 { 5147 int ret; 5148 5149 const struct dp_sdp *sdp = buffer; 5150 5151 if (size < sizeof(struct dp_sdp)) 5152 return -EINVAL; 5153 5154 if (sdp->sdp_header.HB0 != 0) 5155 return -EINVAL; 5156 5157 if (sdp->sdp_header.HB1 != HDMI_INFOFRAME_TYPE_DRM) 5158 return -EINVAL; 5159 5160 /* 5161 * Least Significant Eight Bits of (Data Byte Count – 1) 5162 * 1Dh (i.e., Data Byte Count = 30 bytes). 5163 */ 5164 if (sdp->sdp_header.HB2 != 0x1D) 5165 return -EINVAL; 5166 5167 /* Most Significant Two Bits of (Data Byte Count – 1), Clear to 00b. */ 5168 if ((sdp->sdp_header.HB3 & 0x3) != 0) 5169 return -EINVAL; 5170 5171 /* INFOFRAME SDP Version Number */ 5172 if (((sdp->sdp_header.HB3 >> 2) & 0x3f) != 0x13) 5173 return -EINVAL; 5174 5175 /* CTA Header Byte 2 (INFOFRAME Version Number) */ 5176 if (sdp->db[0] != 1) 5177 return -EINVAL; 5178 5179 /* CTA Header Byte 3 (Length of INFOFRAME): HDMI_DRM_INFOFRAME_SIZE */ 5180 if (sdp->db[1] != HDMI_DRM_INFOFRAME_SIZE) 5181 return -EINVAL; 5182 5183 ret = hdmi_drm_infoframe_unpack_only(drm_infoframe, &sdp->db[2], 5184 HDMI_DRM_INFOFRAME_SIZE); 5185 5186 return ret; 5187 } 5188 5189 static void intel_read_dp_vsc_sdp(struct intel_encoder *encoder, 5190 struct intel_crtc_state *crtc_state, 5191 struct drm_dp_vsc_sdp *vsc) 5192 { 5193 struct intel_display *display = to_intel_display(encoder); 5194 struct intel_digital_port *dig_port = enc_to_dig_port(encoder); 5195 unsigned int type = DP_SDP_VSC; 5196 struct dp_sdp sdp = {}; 5197 int ret; 5198 5199 if ((crtc_state->infoframes.enable & 5200 intel_hdmi_infoframe_enable(type)) == 0) 5201 return; 5202 5203 dig_port->read_infoframe(encoder, crtc_state, type, &sdp, sizeof(sdp)); 5204 5205 ret = intel_dp_vsc_sdp_unpack(vsc, &sdp, sizeof(sdp)); 5206 5207 if (ret) 5208 drm_dbg_kms(display->drm, "Failed to unpack DP VSC SDP\n"); 5209 } 5210 5211 static void intel_read_dp_hdr_metadata_infoframe_sdp(struct intel_encoder *encoder, 5212 struct intel_crtc_state *crtc_state, 5213 struct hdmi_drm_infoframe *drm_infoframe) 5214 { 5215 struct intel_display *display = to_intel_display(encoder); 5216 struct intel_digital_port *dig_port = enc_to_dig_port(encoder); 5217 unsigned int type = HDMI_PACKET_TYPE_GAMUT_METADATA; 5218 struct dp_sdp sdp = {}; 5219 int ret; 5220 5221 if ((crtc_state->infoframes.enable & 5222 intel_hdmi_infoframe_enable(type)) == 0) 5223 return; 5224 5225 dig_port->read_infoframe(encoder, crtc_state, type, &sdp, 5226 sizeof(sdp)); 5227 5228 ret = intel_dp_hdr_metadata_infoframe_sdp_unpack(drm_infoframe, &sdp, 5229 sizeof(sdp)); 5230 5231 if (ret) 5232 drm_dbg_kms(display->drm, 5233 "Failed to unpack DP HDR Metadata Infoframe SDP\n"); 5234 } 5235 5236 void intel_read_dp_sdp(struct intel_encoder *encoder, 5237 struct intel_crtc_state *crtc_state, 5238 unsigned int type) 5239 { 5240 switch (type) { 5241 case DP_SDP_VSC: 5242 intel_read_dp_vsc_sdp(encoder, crtc_state, 5243 &crtc_state->infoframes.vsc); 5244 break; 5245 case HDMI_PACKET_TYPE_GAMUT_METADATA: 5246 intel_read_dp_hdr_metadata_infoframe_sdp(encoder, crtc_state, 5247 &crtc_state->infoframes.drm.drm); 5248 break; 5249 case DP_SDP_ADAPTIVE_SYNC: 5250 intel_read_dp_as_sdp(encoder, crtc_state, 5251 &crtc_state->infoframes.as_sdp); 5252 break; 5253 default: 5254 MISSING_CASE(type); 5255 break; 5256 } 5257 } 5258 5259 static bool intel_dp_link_ok(struct intel_dp *intel_dp, 5260 u8 link_status[DP_LINK_STATUS_SIZE]) 5261 { 5262 struct intel_display *display = to_intel_display(intel_dp); 5263 struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base; 5264 bool uhbr = intel_dp->link_rate >= 1000000; 5265 bool ok; 5266 5267 if (uhbr) 5268 ok = drm_dp_128b132b_lane_channel_eq_done(link_status, 5269 intel_dp->lane_count); 5270 else 5271 ok = drm_dp_channel_eq_ok(link_status, intel_dp->lane_count); 5272 5273 if (ok) 5274 return true; 5275 5276 intel_dp_dump_link_status(intel_dp, DP_PHY_DPRX, link_status); 5277 drm_dbg_kms(display->drm, 5278 "[ENCODER:%d:%s] %s link not ok, retraining\n", 5279 encoder->base.base.id, encoder->base.name, 5280 uhbr ? "128b/132b" : "8b/10b"); 5281 5282 return false; 5283 } 5284 5285 static void 5286 intel_dp_mst_hpd_irq(struct intel_dp *intel_dp, u8 *esi, u8 *ack) 5287 { 5288 bool handled = false; 5289 5290 drm_dp_mst_hpd_irq_handle_event(&intel_dp->mst.mgr, esi, ack, &handled); 5291 5292 if (esi[1] & DP_CP_IRQ) { 5293 intel_hdcp_handle_cp_irq(intel_dp->attached_connector); 5294 ack[1] |= DP_CP_IRQ; 5295 } 5296 } 5297 5298 static bool intel_dp_mst_link_status(struct intel_dp *intel_dp) 5299 { 5300 struct intel_display *display = to_intel_display(intel_dp); 5301 struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base; 5302 u8 link_status[DP_LINK_STATUS_SIZE] = {}; 5303 const size_t esi_link_status_size = DP_LINK_STATUS_SIZE - 2; 5304 5305 if (drm_dp_dpcd_read(&intel_dp->aux, DP_LANE0_1_STATUS_ESI, link_status, 5306 esi_link_status_size) != esi_link_status_size) { 5307 drm_err(display->drm, 5308 "[ENCODER:%d:%s] Failed to read link status\n", 5309 encoder->base.base.id, encoder->base.name); 5310 return false; 5311 } 5312 5313 return intel_dp_link_ok(intel_dp, link_status); 5314 } 5315 5316 /** 5317 * intel_dp_check_mst_status - service any pending MST interrupts, check link status 5318 * @intel_dp: Intel DP struct 5319 * 5320 * Read any pending MST interrupts, call MST core to handle these and ack the 5321 * interrupts. Check if the main and AUX link state is ok. 5322 * 5323 * Returns: 5324 * - %true if pending interrupts were serviced (or no interrupts were 5325 * pending) w/o detecting an error condition. 5326 * - %false if an error condition - like AUX failure or a loss of link - is 5327 * detected, or another condition - like a DP tunnel BW state change - needs 5328 * servicing from the hotplug work. 5329 */ 5330 static bool 5331 intel_dp_check_mst_status(struct intel_dp *intel_dp) 5332 { 5333 struct intel_display *display = to_intel_display(intel_dp); 5334 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp); 5335 struct intel_encoder *encoder = &dig_port->base; 5336 bool link_ok = true; 5337 bool reprobe_needed = false; 5338 5339 for (;;) { 5340 u8 esi[4] = {}; 5341 u8 ack[4] = {}; 5342 5343 if (!intel_dp_get_sink_irq_esi(intel_dp, esi)) { 5344 drm_dbg_kms(display->drm, 5345 "failed to get ESI - device may have failed\n"); 5346 link_ok = false; 5347 5348 break; 5349 } 5350 5351 drm_dbg_kms(display->drm, "DPRX ESI: %4ph\n", esi); 5352 5353 if (intel_dp_mst_active_streams(intel_dp) > 0 && link_ok && 5354 esi[3] & LINK_STATUS_CHANGED) { 5355 if (!intel_dp_mst_link_status(intel_dp)) 5356 link_ok = false; 5357 ack[3] |= LINK_STATUS_CHANGED; 5358 } 5359 5360 intel_dp_mst_hpd_irq(intel_dp, esi, ack); 5361 5362 if (esi[3] & DP_TUNNELING_IRQ) { 5363 if (drm_dp_tunnel_handle_irq(display->dp_tunnel_mgr, 5364 &intel_dp->aux)) 5365 reprobe_needed = true; 5366 ack[3] |= DP_TUNNELING_IRQ; 5367 } 5368 5369 if (mem_is_zero(ack, sizeof(ack))) 5370 break; 5371 5372 if (!intel_dp_ack_sink_irq_esi(intel_dp, ack)) 5373 drm_dbg_kms(display->drm, "Failed to ack ESI\n"); 5374 5375 if (ack[1] & (DP_DOWN_REP_MSG_RDY | DP_UP_REQ_MSG_RDY)) 5376 drm_dp_mst_hpd_irq_send_new_request(&intel_dp->mst.mgr); 5377 } 5378 5379 if (!link_ok || intel_dp->link.force_retrain) 5380 intel_encoder_link_check_queue_work(encoder, 0); 5381 5382 return !reprobe_needed; 5383 } 5384 5385 static void 5386 intel_dp_handle_hdmi_link_status_change(struct intel_dp *intel_dp) 5387 { 5388 bool is_active; 5389 u8 buf = 0; 5390 5391 is_active = drm_dp_pcon_hdmi_link_active(&intel_dp->aux); 5392 if (intel_dp->frl.is_trained && !is_active) { 5393 if (drm_dp_dpcd_readb(&intel_dp->aux, DP_PCON_HDMI_LINK_CONFIG_1, &buf) < 0) 5394 return; 5395 5396 buf &= ~DP_PCON_ENABLE_HDMI_LINK; 5397 if (drm_dp_dpcd_writeb(&intel_dp->aux, DP_PCON_HDMI_LINK_CONFIG_1, buf) < 0) 5398 return; 5399 5400 drm_dp_pcon_hdmi_frl_link_error_count(&intel_dp->aux, &intel_dp->attached_connector->base); 5401 5402 intel_dp->frl.is_trained = false; 5403 5404 /* Restart FRL training or fall back to TMDS mode */ 5405 intel_dp_check_frl_training(intel_dp); 5406 } 5407 } 5408 5409 static bool 5410 intel_dp_needs_link_retrain(struct intel_dp *intel_dp) 5411 { 5412 u8 link_status[DP_LINK_STATUS_SIZE]; 5413 5414 if (!intel_dp->link.active) 5415 return false; 5416 5417 /* 5418 * While PSR source HW is enabled, it will control main-link sending 5419 * frames, enabling and disabling it so trying to do a retrain will fail 5420 * as the link would or not be on or it could mix training patterns 5421 * and frame data at the same time causing retrain to fail. 5422 * Also when exiting PSR, HW will retrain the link anyways fixing 5423 * any link status error. 5424 */ 5425 if (intel_psr_enabled(intel_dp)) 5426 return false; 5427 5428 if (intel_dp->link.force_retrain) 5429 return true; 5430 5431 if (drm_dp_dpcd_read_phy_link_status(&intel_dp->aux, DP_PHY_DPRX, 5432 link_status) < 0) 5433 return false; 5434 5435 /* 5436 * Validate the cached values of intel_dp->link_rate and 5437 * intel_dp->lane_count before attempting to retrain. 5438 * 5439 * FIXME would be nice to user the crtc state here, but since 5440 * we need to call this from the short HPD handler that seems 5441 * a bit hard. 5442 */ 5443 if (!intel_dp_link_params_valid(intel_dp, intel_dp->link_rate, 5444 intel_dp->lane_count)) 5445 return false; 5446 5447 if (intel_dp->link.retrain_disabled) 5448 return false; 5449 5450 if (intel_dp->link.seq_train_failures) 5451 return true; 5452 5453 /* Retrain if link not ok */ 5454 return !intel_dp_link_ok(intel_dp, link_status) && 5455 !intel_psr_link_ok(intel_dp); 5456 } 5457 5458 bool intel_dp_has_connector(struct intel_dp *intel_dp, 5459 const struct drm_connector_state *conn_state) 5460 { 5461 struct intel_display *display = to_intel_display(intel_dp); 5462 struct intel_encoder *encoder; 5463 enum pipe pipe; 5464 5465 if (!conn_state->best_encoder) 5466 return false; 5467 5468 /* SST */ 5469 encoder = &dp_to_dig_port(intel_dp)->base; 5470 if (conn_state->best_encoder == &encoder->base) 5471 return true; 5472 5473 /* MST */ 5474 for_each_pipe(display, pipe) { 5475 encoder = &intel_dp->mst.stream_encoders[pipe]->base; 5476 if (conn_state->best_encoder == &encoder->base) 5477 return true; 5478 } 5479 5480 return false; 5481 } 5482 5483 static void wait_for_connector_hw_done(const struct drm_connector_state *conn_state) 5484 { 5485 struct intel_connector *connector = to_intel_connector(conn_state->connector); 5486 struct intel_display *display = to_intel_display(connector); 5487 5488 drm_modeset_lock_assert_held(&display->drm->mode_config.connection_mutex); 5489 5490 if (!conn_state->commit) 5491 return; 5492 5493 drm_WARN_ON(display->drm, 5494 !wait_for_completion_timeout(&conn_state->commit->hw_done, 5495 msecs_to_jiffies(5000))); 5496 } 5497 5498 int intel_dp_get_active_pipes(struct intel_dp *intel_dp, 5499 struct drm_modeset_acquire_ctx *ctx, 5500 u8 *pipe_mask) 5501 { 5502 struct intel_display *display = to_intel_display(intel_dp); 5503 struct drm_connector_list_iter conn_iter; 5504 struct intel_connector *connector; 5505 int ret = 0; 5506 5507 *pipe_mask = 0; 5508 5509 drm_connector_list_iter_begin(display->drm, &conn_iter); 5510 for_each_intel_connector_iter(connector, &conn_iter) { 5511 struct drm_connector_state *conn_state = 5512 connector->base.state; 5513 struct intel_crtc_state *crtc_state; 5514 struct intel_crtc *crtc; 5515 5516 if (!intel_dp_has_connector(intel_dp, conn_state)) 5517 continue; 5518 5519 crtc = to_intel_crtc(conn_state->crtc); 5520 if (!crtc) 5521 continue; 5522 5523 ret = drm_modeset_lock(&crtc->base.mutex, ctx); 5524 if (ret) 5525 break; 5526 5527 crtc_state = to_intel_crtc_state(crtc->base.state); 5528 5529 drm_WARN_ON(display->drm, 5530 !intel_crtc_has_dp_encoder(crtc_state)); 5531 5532 if (!crtc_state->hw.active) 5533 continue; 5534 5535 wait_for_connector_hw_done(conn_state); 5536 5537 *pipe_mask |= BIT(crtc->pipe); 5538 } 5539 drm_connector_list_iter_end(&conn_iter); 5540 5541 return ret; 5542 } 5543 5544 void intel_dp_flush_connector_commits(struct intel_connector *connector) 5545 { 5546 wait_for_connector_hw_done(connector->base.state); 5547 } 5548 5549 static bool intel_dp_is_connected(struct intel_dp *intel_dp) 5550 { 5551 struct intel_connector *connector = intel_dp->attached_connector; 5552 5553 return connector->base.status == connector_status_connected || 5554 intel_dp->is_mst; 5555 } 5556 5557 static int intel_dp_retrain_link(struct intel_encoder *encoder, 5558 struct drm_modeset_acquire_ctx *ctx) 5559 { 5560 struct intel_display *display = to_intel_display(encoder); 5561 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 5562 u8 pipe_mask; 5563 int ret; 5564 5565 if (!intel_dp_is_connected(intel_dp)) 5566 return 0; 5567 5568 ret = drm_modeset_lock(&display->drm->mode_config.connection_mutex, 5569 ctx); 5570 if (ret) 5571 return ret; 5572 5573 if (!intel_dp_needs_link_retrain(intel_dp)) 5574 return 0; 5575 5576 ret = intel_dp_get_active_pipes(intel_dp, ctx, &pipe_mask); 5577 if (ret) 5578 return ret; 5579 5580 if (pipe_mask == 0) 5581 return 0; 5582 5583 if (!intel_dp_needs_link_retrain(intel_dp)) 5584 return 0; 5585 5586 drm_dbg_kms(display->drm, 5587 "[ENCODER:%d:%s] retraining link (forced %s)\n", 5588 encoder->base.base.id, encoder->base.name, 5589 str_yes_no(intel_dp->link.force_retrain)); 5590 5591 ret = intel_modeset_commit_pipes(display, pipe_mask, ctx); 5592 if (ret == -EDEADLK) 5593 return ret; 5594 5595 intel_dp->link.force_retrain = false; 5596 5597 if (ret) 5598 drm_dbg_kms(display->drm, 5599 "[ENCODER:%d:%s] link retraining failed: %pe\n", 5600 encoder->base.base.id, encoder->base.name, 5601 ERR_PTR(ret)); 5602 5603 return ret; 5604 } 5605 5606 void intel_dp_link_check(struct intel_encoder *encoder) 5607 { 5608 struct drm_modeset_acquire_ctx ctx; 5609 int ret; 5610 5611 intel_modeset_lock_ctx_retry(&ctx, NULL, 0, ret) 5612 ret = intel_dp_retrain_link(encoder, &ctx); 5613 } 5614 5615 void intel_dp_check_link_state(struct intel_dp *intel_dp) 5616 { 5617 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp); 5618 struct intel_encoder *encoder = &dig_port->base; 5619 5620 if (!intel_dp_is_connected(intel_dp)) 5621 return; 5622 5623 if (!intel_dp_needs_link_retrain(intel_dp)) 5624 return; 5625 5626 intel_encoder_link_check_queue_work(encoder, 0); 5627 } 5628 5629 static void intel_dp_check_device_service_irq(struct intel_dp *intel_dp) 5630 { 5631 struct intel_display *display = to_intel_display(intel_dp); 5632 u8 val; 5633 5634 if (intel_dp->dpcd[DP_DPCD_REV] < 0x11) 5635 return; 5636 5637 if (drm_dp_dpcd_readb(&intel_dp->aux, 5638 DP_DEVICE_SERVICE_IRQ_VECTOR, &val) != 1 || !val) 5639 return; 5640 5641 drm_dp_dpcd_writeb(&intel_dp->aux, DP_DEVICE_SERVICE_IRQ_VECTOR, val); 5642 5643 if (val & DP_AUTOMATED_TEST_REQUEST) 5644 intel_dp_test_request(intel_dp); 5645 5646 if (val & DP_CP_IRQ) 5647 intel_hdcp_handle_cp_irq(intel_dp->attached_connector); 5648 5649 if (val & DP_SINK_SPECIFIC_IRQ) 5650 drm_dbg_kms(display->drm, "Sink specific irq unhandled\n"); 5651 } 5652 5653 static bool intel_dp_check_link_service_irq(struct intel_dp *intel_dp) 5654 { 5655 struct intel_display *display = to_intel_display(intel_dp); 5656 bool reprobe_needed = false; 5657 u8 val; 5658 5659 if (intel_dp->dpcd[DP_DPCD_REV] < 0x11) 5660 return false; 5661 5662 if (drm_dp_dpcd_readb(&intel_dp->aux, 5663 DP_LINK_SERVICE_IRQ_VECTOR_ESI0, &val) != 1 || !val) 5664 return false; 5665 5666 if ((val & DP_TUNNELING_IRQ) && 5667 drm_dp_tunnel_handle_irq(display->dp_tunnel_mgr, 5668 &intel_dp->aux)) 5669 reprobe_needed = true; 5670 5671 if (drm_dp_dpcd_writeb(&intel_dp->aux, 5672 DP_LINK_SERVICE_IRQ_VECTOR_ESI0, val) != 1) 5673 return reprobe_needed; 5674 5675 if (val & HDMI_LINK_STATUS_CHANGED) 5676 intel_dp_handle_hdmi_link_status_change(intel_dp); 5677 5678 return reprobe_needed; 5679 } 5680 5681 /* 5682 * According to DP spec 5683 * 5.1.2: 5684 * 1. Read DPCD 5685 * 2. Configure link according to Receiver Capabilities 5686 * 3. Use Link Training from 2.5.3.3 and 3.5.1.3 5687 * 4. Check link status on receipt of hot-plug interrupt 5688 * 5689 * intel_dp_short_pulse - handles short pulse interrupts 5690 * when full detection is not required. 5691 * Returns %true if short pulse is handled and full detection 5692 * is NOT required and %false otherwise. 5693 */ 5694 static bool 5695 intel_dp_short_pulse(struct intel_dp *intel_dp) 5696 { 5697 u8 old_sink_count = intel_dp->sink_count; 5698 bool reprobe_needed = false; 5699 bool ret; 5700 5701 intel_dp_test_reset(intel_dp); 5702 5703 /* 5704 * Now read the DPCD to see if it's actually running 5705 * If the current value of sink count doesn't match with 5706 * the value that was stored earlier or dpcd read failed 5707 * we need to do full detection 5708 */ 5709 ret = intel_dp_get_dpcd(intel_dp); 5710 5711 if ((old_sink_count != intel_dp->sink_count) || !ret) { 5712 /* No need to proceed if we are going to do full detect */ 5713 return false; 5714 } 5715 5716 intel_dp_check_device_service_irq(intel_dp); 5717 reprobe_needed = intel_dp_check_link_service_irq(intel_dp); 5718 5719 /* Handle CEC interrupts, if any */ 5720 drm_dp_cec_irq(&intel_dp->aux); 5721 5722 intel_dp_check_link_state(intel_dp); 5723 5724 intel_psr_short_pulse(intel_dp); 5725 5726 if (intel_alpm_get_error(intel_dp)) { 5727 intel_alpm_disable(intel_dp); 5728 intel_dp->alpm.sink_alpm_error = true; 5729 } 5730 5731 if (intel_dp_test_short_pulse(intel_dp)) 5732 reprobe_needed = true; 5733 5734 return !reprobe_needed; 5735 } 5736 5737 /* XXX this is probably wrong for multiple downstream ports */ 5738 static enum drm_connector_status 5739 intel_dp_detect_dpcd(struct intel_dp *intel_dp) 5740 { 5741 struct intel_display *display = to_intel_display(intel_dp); 5742 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp); 5743 u8 *dpcd = intel_dp->dpcd; 5744 u8 type; 5745 5746 if (drm_WARN_ON(display->drm, intel_dp_is_edp(intel_dp))) 5747 return connector_status_connected; 5748 5749 intel_lspcon_resume(dig_port); 5750 5751 if (!intel_dp_get_dpcd(intel_dp)) 5752 return connector_status_disconnected; 5753 5754 intel_dp->mst_detect = intel_dp_mst_detect(intel_dp); 5755 5756 /* if there's no downstream port, we're done */ 5757 if (!drm_dp_is_branch(dpcd)) 5758 return connector_status_connected; 5759 5760 /* If we're HPD-aware, SINK_COUNT changes dynamically */ 5761 if (intel_dp_has_sink_count(intel_dp) && 5762 intel_dp->downstream_ports[0] & DP_DS_PORT_HPD) { 5763 return intel_dp->sink_count ? 5764 connector_status_connected : connector_status_disconnected; 5765 } 5766 5767 if (intel_dp->mst_detect == DRM_DP_MST) 5768 return connector_status_connected; 5769 5770 /* If no HPD, poke DDC gently */ 5771 if (drm_probe_ddc(&intel_dp->aux.ddc)) 5772 return connector_status_connected; 5773 5774 /* Well we tried, say unknown for unreliable port types */ 5775 if (intel_dp->dpcd[DP_DPCD_REV] >= 0x11) { 5776 type = intel_dp->downstream_ports[0] & DP_DS_PORT_TYPE_MASK; 5777 if (type == DP_DS_PORT_TYPE_VGA || 5778 type == DP_DS_PORT_TYPE_NON_EDID) 5779 return connector_status_unknown; 5780 } else { 5781 type = intel_dp->dpcd[DP_DOWNSTREAMPORT_PRESENT] & 5782 DP_DWN_STRM_PORT_TYPE_MASK; 5783 if (type == DP_DWN_STRM_PORT_TYPE_ANALOG || 5784 type == DP_DWN_STRM_PORT_TYPE_OTHER) 5785 return connector_status_unknown; 5786 } 5787 5788 /* Anything else is out of spec, warn and ignore */ 5789 drm_dbg_kms(display->drm, "Broken DP branch device, ignoring\n"); 5790 return connector_status_disconnected; 5791 } 5792 5793 static enum drm_connector_status 5794 edp_detect(struct intel_dp *intel_dp) 5795 { 5796 return connector_status_connected; 5797 } 5798 5799 void intel_digital_port_lock(struct intel_encoder *encoder) 5800 { 5801 struct intel_digital_port *dig_port = enc_to_dig_port(encoder); 5802 5803 if (dig_port->lock) 5804 dig_port->lock(dig_port); 5805 } 5806 5807 void intel_digital_port_unlock(struct intel_encoder *encoder) 5808 { 5809 struct intel_digital_port *dig_port = enc_to_dig_port(encoder); 5810 5811 if (dig_port->unlock) 5812 dig_port->unlock(dig_port); 5813 } 5814 5815 /* 5816 * intel_digital_port_connected_locked - is the specified port connected? 5817 * @encoder: intel_encoder 5818 * 5819 * In cases where there's a connector physically connected but it can't be used 5820 * by our hardware we also return false, since the rest of the driver should 5821 * pretty much treat the port as disconnected. This is relevant for type-C 5822 * (starting on ICL) where there's ownership involved. 5823 * 5824 * The caller must hold the lock acquired by calling intel_digital_port_lock() 5825 * when calling this function. 5826 * 5827 * Return %true if port is connected, %false otherwise. 5828 */ 5829 bool intel_digital_port_connected_locked(struct intel_encoder *encoder) 5830 { 5831 struct intel_display *display = to_intel_display(encoder); 5832 struct intel_digital_port *dig_port = enc_to_dig_port(encoder); 5833 bool is_glitch_free = intel_tc_port_handles_hpd_glitches(dig_port); 5834 bool is_connected = false; 5835 5836 with_intel_display_power(display, POWER_DOMAIN_DISPLAY_CORE) { 5837 poll_timeout_us(is_connected = dig_port->connected(encoder), 5838 is_connected || is_glitch_free, 5839 30, 4000, false); 5840 } 5841 5842 return is_connected; 5843 } 5844 5845 bool intel_digital_port_connected(struct intel_encoder *encoder) 5846 { 5847 bool ret; 5848 5849 intel_digital_port_lock(encoder); 5850 ret = intel_digital_port_connected_locked(encoder); 5851 intel_digital_port_unlock(encoder); 5852 5853 return ret; 5854 } 5855 5856 static const struct drm_edid * 5857 intel_dp_get_edid(struct intel_dp *intel_dp) 5858 { 5859 struct intel_connector *connector = intel_dp->attached_connector; 5860 const struct drm_edid *fixed_edid = connector->panel.fixed_edid; 5861 5862 /* Use panel fixed edid if we have one */ 5863 if (fixed_edid) { 5864 /* invalid edid */ 5865 if (IS_ERR(fixed_edid)) 5866 return NULL; 5867 5868 return drm_edid_dup(fixed_edid); 5869 } 5870 5871 return drm_edid_read_ddc(&connector->base, &intel_dp->aux.ddc); 5872 } 5873 5874 static void 5875 intel_dp_update_dfp(struct intel_dp *intel_dp, 5876 const struct drm_edid *drm_edid) 5877 { 5878 struct intel_display *display = to_intel_display(intel_dp); 5879 struct intel_connector *connector = intel_dp->attached_connector; 5880 5881 intel_dp->dfp.max_bpc = 5882 drm_dp_downstream_max_bpc(intel_dp->dpcd, 5883 intel_dp->downstream_ports, drm_edid); 5884 5885 intel_dp->dfp.max_dotclock = 5886 drm_dp_downstream_max_dotclock(intel_dp->dpcd, 5887 intel_dp->downstream_ports); 5888 5889 intel_dp->dfp.min_tmds_clock = 5890 drm_dp_downstream_min_tmds_clock(intel_dp->dpcd, 5891 intel_dp->downstream_ports, 5892 drm_edid); 5893 intel_dp->dfp.max_tmds_clock = 5894 drm_dp_downstream_max_tmds_clock(intel_dp->dpcd, 5895 intel_dp->downstream_ports, 5896 drm_edid); 5897 5898 intel_dp->dfp.pcon_max_frl_bw = 5899 drm_dp_get_pcon_max_frl_bw(intel_dp->dpcd, 5900 intel_dp->downstream_ports); 5901 5902 drm_dbg_kms(display->drm, 5903 "[CONNECTOR:%d:%s] DFP max bpc %d, max dotclock %d, TMDS clock %d-%d, PCON Max FRL BW %dGbps\n", 5904 connector->base.base.id, connector->base.name, 5905 intel_dp->dfp.max_bpc, 5906 intel_dp->dfp.max_dotclock, 5907 intel_dp->dfp.min_tmds_clock, 5908 intel_dp->dfp.max_tmds_clock, 5909 intel_dp->dfp.pcon_max_frl_bw); 5910 5911 intel_dp_get_pcon_dsc_cap(intel_dp); 5912 } 5913 5914 static bool 5915 intel_dp_can_ycbcr420(struct intel_dp *intel_dp) 5916 { 5917 if (source_can_output(intel_dp, INTEL_OUTPUT_FORMAT_YCBCR420) && 5918 (!drm_dp_is_branch(intel_dp->dpcd) || intel_dp->dfp.ycbcr420_passthrough)) 5919 return true; 5920 5921 if (source_can_output(intel_dp, INTEL_OUTPUT_FORMAT_RGB) && 5922 dfp_can_convert_from_rgb(intel_dp, INTEL_OUTPUT_FORMAT_YCBCR420)) 5923 return true; 5924 5925 if (source_can_output(intel_dp, INTEL_OUTPUT_FORMAT_YCBCR444) && 5926 dfp_can_convert_from_ycbcr444(intel_dp, INTEL_OUTPUT_FORMAT_YCBCR420)) 5927 return true; 5928 5929 return false; 5930 } 5931 5932 static void 5933 intel_dp_update_420(struct intel_dp *intel_dp) 5934 { 5935 struct intel_display *display = to_intel_display(intel_dp); 5936 struct intel_connector *connector = intel_dp->attached_connector; 5937 5938 intel_dp->dfp.ycbcr420_passthrough = 5939 drm_dp_downstream_420_passthrough(intel_dp->dpcd, 5940 intel_dp->downstream_ports); 5941 /* on-board LSPCON always assumed to support 4:4:4->4:2:0 conversion */ 5942 intel_dp->dfp.ycbcr_444_to_420 = 5943 intel_lspcon_active(dp_to_dig_port(intel_dp)) || 5944 drm_dp_downstream_444_to_420_conversion(intel_dp->dpcd, 5945 intel_dp->downstream_ports); 5946 intel_dp->dfp.rgb_to_ycbcr = 5947 drm_dp_downstream_rgb_to_ycbcr_conversion(intel_dp->dpcd, 5948 intel_dp->downstream_ports, 5949 DP_DS_HDMI_BT709_RGB_YCBCR_CONV); 5950 5951 connector->base.ycbcr_420_allowed = intel_dp_can_ycbcr420(intel_dp); 5952 5953 drm_dbg_kms(display->drm, 5954 "[CONNECTOR:%d:%s] RGB->YcbCr conversion? %s, YCbCr 4:2:0 allowed? %s, YCbCr 4:4:4->4:2:0 conversion? %s\n", 5955 connector->base.base.id, connector->base.name, 5956 str_yes_no(intel_dp->dfp.rgb_to_ycbcr), 5957 str_yes_no(connector->base.ycbcr_420_allowed), 5958 str_yes_no(intel_dp->dfp.ycbcr_444_to_420)); 5959 } 5960 5961 static void 5962 intel_dp_set_edid(struct intel_dp *intel_dp) 5963 { 5964 struct intel_display *display = to_intel_display(intel_dp); 5965 struct intel_connector *connector = intel_dp->attached_connector; 5966 const struct drm_edid *drm_edid; 5967 bool vrr_capable; 5968 5969 intel_dp_unset_edid(intel_dp); 5970 drm_edid = intel_dp_get_edid(intel_dp); 5971 connector->detect_edid = drm_edid; 5972 5973 /* Below we depend on display info having been updated */ 5974 drm_edid_connector_update(&connector->base, drm_edid); 5975 5976 vrr_capable = intel_vrr_is_capable(connector); 5977 drm_dbg_kms(display->drm, "[CONNECTOR:%d:%s] VRR capable: %s\n", 5978 connector->base.base.id, connector->base.name, str_yes_no(vrr_capable)); 5979 drm_connector_set_vrr_capable_property(&connector->base, vrr_capable); 5980 5981 intel_dp_update_dfp(intel_dp, drm_edid); 5982 intel_dp_update_420(intel_dp); 5983 5984 drm_dp_cec_attach(&intel_dp->aux, 5985 connector->base.display_info.source_physical_address); 5986 } 5987 5988 static void 5989 intel_dp_unset_edid(struct intel_dp *intel_dp) 5990 { 5991 struct intel_connector *connector = intel_dp->attached_connector; 5992 5993 drm_dp_cec_unset_edid(&intel_dp->aux); 5994 drm_edid_free(connector->detect_edid); 5995 connector->detect_edid = NULL; 5996 5997 intel_dp->dfp.max_bpc = 0; 5998 intel_dp->dfp.max_dotclock = 0; 5999 intel_dp->dfp.min_tmds_clock = 0; 6000 intel_dp->dfp.max_tmds_clock = 0; 6001 6002 intel_dp->dfp.pcon_max_frl_bw = 0; 6003 6004 intel_dp->dfp.ycbcr_444_to_420 = false; 6005 connector->base.ycbcr_420_allowed = false; 6006 6007 drm_connector_set_vrr_capable_property(&connector->base, 6008 false); 6009 } 6010 6011 static void 6012 intel_dp_detect_sdp_caps(struct intel_dp *intel_dp) 6013 { 6014 struct intel_display *display = to_intel_display(intel_dp); 6015 6016 intel_dp->as_sdp_supported = HAS_AS_SDP(display) && 6017 drm_dp_as_sdp_supported(&intel_dp->aux, intel_dp->dpcd); 6018 } 6019 6020 static bool intel_dp_needs_dpcd_probe(struct intel_dp *intel_dp, bool force_on_external) 6021 { 6022 struct intel_connector *connector = intel_dp->attached_connector; 6023 6024 if (intel_dp_is_edp(intel_dp)) 6025 return false; 6026 6027 if (force_on_external) 6028 return true; 6029 6030 if (intel_dp->is_mst) 6031 return false; 6032 6033 return drm_edid_has_quirk(&connector->base, DRM_EDID_QUIRK_DP_DPCD_PROBE); 6034 } 6035 6036 void intel_dp_dpcd_set_probe(struct intel_dp *intel_dp, bool force_on_external) 6037 { 6038 drm_dp_dpcd_set_probe(&intel_dp->aux, 6039 intel_dp_needs_dpcd_probe(intel_dp, force_on_external)); 6040 } 6041 6042 static int 6043 intel_dp_detect(struct drm_connector *_connector, 6044 struct drm_modeset_acquire_ctx *ctx, 6045 bool force) 6046 { 6047 struct intel_display *display = to_intel_display(_connector->dev); 6048 struct intel_connector *connector = to_intel_connector(_connector); 6049 struct intel_dp *intel_dp = intel_attached_dp(connector); 6050 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp); 6051 struct intel_encoder *encoder = &dig_port->base; 6052 enum drm_connector_status status; 6053 int ret; 6054 6055 drm_dbg_kms(display->drm, "[CONNECTOR:%d:%s]\n", 6056 connector->base.base.id, connector->base.name); 6057 drm_WARN_ON(display->drm, 6058 !drm_modeset_is_locked(&display->drm->mode_config.connection_mutex)); 6059 6060 if (!intel_display_device_enabled(display)) 6061 return connector_status_disconnected; 6062 6063 if (!intel_display_driver_check_access(display)) 6064 return connector->base.status; 6065 6066 intel_dp_flush_connector_commits(connector); 6067 6068 intel_pps_vdd_on(intel_dp); 6069 6070 /* Can't disconnect eDP */ 6071 if (intel_dp_is_edp(intel_dp)) 6072 status = edp_detect(intel_dp); 6073 else if (intel_digital_port_connected(encoder)) 6074 status = intel_dp_detect_dpcd(intel_dp); 6075 else 6076 status = connector_status_disconnected; 6077 6078 if (status != connector_status_disconnected && 6079 !intel_dp_mst_verify_dpcd_state(intel_dp)) 6080 /* 6081 * This requires retrying detection for instance to re-enable 6082 * the MST mode that got reset via a long HPD pulse. The retry 6083 * will happen either via the hotplug handler's retry logic, 6084 * ensured by setting the connector here to SST/disconnected, 6085 * or via a userspace connector probing in response to the 6086 * hotplug uevent sent when removing the MST connectors. 6087 */ 6088 status = connector_status_disconnected; 6089 6090 if (status == connector_status_disconnected) { 6091 intel_dp_test_reset(intel_dp); 6092 /* 6093 * FIXME: Resetting these caps here cause 6094 * state computation fail if the connector need to be 6095 * modeset after sink disconnect. Move resetting them 6096 * to where new sink is connected. 6097 */ 6098 memset(connector->dp.dsc_dpcd, 0, sizeof(connector->dp.dsc_dpcd)); 6099 memset(connector->dp.panel_replay_caps.dpcd, 0, 6100 sizeof(connector->dp.panel_replay_caps.dpcd)); 6101 intel_dp->psr.sink_panel_replay_support = false; 6102 connector->dp.panel_replay_caps.support = false; 6103 connector->dp.panel_replay_caps.su_support = false; 6104 connector->dp.panel_replay_caps.dsc_support = 6105 INTEL_DP_PANEL_REPLAY_DSC_NOT_SUPPORTED; 6106 6107 intel_dp_mst_disconnect(intel_dp); 6108 6109 intel_dp_tunnel_disconnect(intel_dp); 6110 6111 goto out_unset_edid; 6112 } 6113 6114 intel_dp_init_source_oui(intel_dp); 6115 6116 ret = intel_dp_tunnel_detect(intel_dp, ctx); 6117 if (ret == -EDEADLK) { 6118 status = ret; 6119 6120 goto out_vdd_off; 6121 } 6122 6123 if (ret == 1) 6124 connector->base.epoch_counter++; 6125 6126 if (!intel_dp_is_edp(intel_dp)) 6127 intel_psr_init_dpcd(intel_dp, connector); 6128 6129 intel_dp_detect_dsc_caps(intel_dp, connector); 6130 6131 intel_dp_detect_sdp_caps(intel_dp); 6132 6133 if (intel_dp->reset_link_params) { 6134 intel_dp_reset_link_params(intel_dp); 6135 intel_dp->reset_link_params = false; 6136 } 6137 6138 intel_dp_mst_configure(intel_dp); 6139 6140 intel_dp_print_rates(intel_dp); 6141 6142 if (intel_dp->is_mst) { 6143 /* 6144 * If we are in MST mode then this connector 6145 * won't appear connected or have anything 6146 * with EDID on it 6147 */ 6148 status = connector_status_disconnected; 6149 goto out_unset_edid; 6150 } 6151 6152 /* 6153 * Some external monitors do not signal loss of link synchronization 6154 * with an IRQ_HPD, so force a link status check. 6155 * 6156 * TODO: this probably became redundant, so remove it: the link state 6157 * is rechecked/recovered now after modesets, where the loss of 6158 * synchronization tends to occur. 6159 */ 6160 if (!intel_dp_is_edp(intel_dp)) 6161 intel_dp_check_link_state(intel_dp); 6162 6163 /* 6164 * Clearing NACK and defer counts to get their exact values 6165 * while reading EDID which are required by Compliance tests 6166 * 4.2.2.4 and 4.2.2.5 6167 */ 6168 intel_dp->aux.i2c_nack_count = 0; 6169 intel_dp->aux.i2c_defer_count = 0; 6170 6171 intel_dp_set_edid(intel_dp); 6172 if (intel_dp_is_edp(intel_dp) || connector->detect_edid) 6173 status = connector_status_connected; 6174 6175 intel_dp_check_device_service_irq(intel_dp); 6176 6177 out_unset_edid: 6178 if (status != connector_status_connected && !intel_dp->is_mst) 6179 intel_dp_unset_edid(intel_dp); 6180 6181 intel_dp_dpcd_set_probe(intel_dp, false); 6182 6183 if (!intel_dp_is_edp(intel_dp)) 6184 drm_dp_set_subconnector_property(&connector->base, 6185 status, 6186 intel_dp->dpcd, 6187 intel_dp->downstream_ports); 6188 out_vdd_off: 6189 intel_pps_vdd_off(intel_dp); 6190 6191 return status; 6192 } 6193 6194 static void 6195 intel_dp_force(struct drm_connector *_connector) 6196 { 6197 struct intel_connector *connector = to_intel_connector(_connector); 6198 struct intel_display *display = to_intel_display(connector); 6199 struct intel_dp *intel_dp = intel_attached_dp(connector); 6200 6201 drm_dbg_kms(display->drm, "[CONNECTOR:%d:%s]\n", 6202 connector->base.base.id, connector->base.name); 6203 6204 if (!intel_display_driver_check_access(display)) 6205 return; 6206 6207 intel_dp_unset_edid(intel_dp); 6208 6209 if (connector->base.status != connector_status_connected) 6210 return; 6211 6212 intel_dp_set_edid(intel_dp); 6213 6214 intel_dp_dpcd_set_probe(intel_dp, false); 6215 } 6216 6217 static int intel_dp_get_modes(struct drm_connector *_connector) 6218 { 6219 struct intel_display *display = to_intel_display(_connector->dev); 6220 struct intel_connector *connector = to_intel_connector(_connector); 6221 struct intel_dp *intel_dp = intel_attached_dp(connector); 6222 int num_modes; 6223 6224 /* drm_edid_connector_update() done in ->detect() or ->force() */ 6225 num_modes = drm_edid_connector_add_modes(&connector->base); 6226 6227 /* Also add fixed mode, which may or may not be present in EDID */ 6228 if (intel_dp_is_edp(intel_dp)) 6229 num_modes += intel_panel_get_modes(connector); 6230 6231 if (num_modes) 6232 return num_modes; 6233 6234 if (!connector->detect_edid) { 6235 struct drm_display_mode *mode; 6236 6237 mode = drm_dp_downstream_mode(display->drm, 6238 intel_dp->dpcd, 6239 intel_dp->downstream_ports); 6240 if (mode) { 6241 drm_mode_probed_add(&connector->base, mode); 6242 num_modes++; 6243 } 6244 } 6245 6246 return num_modes; 6247 } 6248 6249 static int 6250 intel_dp_connector_register(struct drm_connector *_connector) 6251 { 6252 struct intel_connector *connector = to_intel_connector(_connector); 6253 struct intel_display *display = to_intel_display(connector); 6254 struct intel_dp *intel_dp = intel_attached_dp(connector); 6255 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp); 6256 int ret; 6257 6258 ret = intel_connector_register(&connector->base); 6259 if (ret) 6260 return ret; 6261 6262 drm_dbg_kms(display->drm, "registering %s bus for %s\n", 6263 intel_dp->aux.name, connector->base.kdev->kobj.name); 6264 6265 intel_dp->aux.dev = connector->base.kdev; 6266 ret = drm_dp_aux_register(&intel_dp->aux); 6267 if (!ret) 6268 drm_dp_cec_register_connector(&intel_dp->aux, &connector->base); 6269 6270 if (!intel_bios_encoder_is_lspcon(dig_port->base.devdata)) 6271 return ret; 6272 6273 /* 6274 * ToDo: Clean this up to handle lspcon init and resume more 6275 * efficiently and streamlined. 6276 */ 6277 if (intel_lspcon_init(dig_port)) { 6278 if (intel_lspcon_detect_hdr_capability(dig_port)) 6279 drm_connector_attach_hdr_output_metadata_property(&connector->base); 6280 } 6281 6282 return ret; 6283 } 6284 6285 static void 6286 intel_dp_connector_unregister(struct drm_connector *_connector) 6287 { 6288 struct intel_connector *connector = to_intel_connector(_connector); 6289 struct intel_dp *intel_dp = intel_attached_dp(connector); 6290 6291 drm_dp_cec_unregister_connector(&intel_dp->aux); 6292 drm_dp_aux_unregister(&intel_dp->aux); 6293 intel_connector_unregister(&connector->base); 6294 } 6295 6296 void intel_dp_connector_sync_state(struct intel_connector *connector, 6297 const struct intel_crtc_state *crtc_state) 6298 { 6299 struct intel_display *display = to_intel_display(connector); 6300 6301 if (crtc_state && crtc_state->dsc.compression_enable) { 6302 drm_WARN_ON(display->drm, 6303 !connector->dp.dsc_decompression_aux); 6304 connector->dp.dsc_decompression_enabled = true; 6305 } else { 6306 connector->dp.dsc_decompression_enabled = false; 6307 } 6308 } 6309 6310 void intel_dp_encoder_flush_work(struct drm_encoder *_encoder) 6311 { 6312 struct intel_encoder *encoder = to_intel_encoder(_encoder); 6313 struct intel_digital_port *dig_port = enc_to_dig_port(encoder); 6314 struct intel_dp *intel_dp = &dig_port->dp; 6315 6316 intel_encoder_link_check_flush_work(encoder); 6317 6318 intel_dp_mst_encoder_cleanup(dig_port); 6319 6320 intel_dp_tunnel_destroy(intel_dp); 6321 6322 intel_pps_vdd_off_sync(intel_dp); 6323 6324 /* 6325 * Ensure power off delay is respected on module remove, so that we can 6326 * reduce delays at driver probe. See pps_init_timestamps(). 6327 */ 6328 intel_pps_wait_power_cycle(intel_dp); 6329 6330 intel_dp_aux_fini(intel_dp); 6331 } 6332 6333 void intel_dp_encoder_suspend(struct intel_encoder *encoder) 6334 { 6335 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 6336 6337 intel_pps_vdd_off_sync(intel_dp); 6338 6339 intel_dp_tunnel_suspend(intel_dp); 6340 } 6341 6342 void intel_dp_encoder_shutdown(struct intel_encoder *encoder) 6343 { 6344 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 6345 6346 intel_pps_wait_power_cycle(intel_dp); 6347 } 6348 6349 static int intel_modeset_tile_group(struct intel_atomic_state *state, 6350 int tile_group_id) 6351 { 6352 struct intel_display *display = to_intel_display(state); 6353 struct drm_connector_list_iter conn_iter; 6354 struct intel_connector *connector; 6355 int ret = 0; 6356 6357 drm_connector_list_iter_begin(display->drm, &conn_iter); 6358 for_each_intel_connector_iter(connector, &conn_iter) { 6359 struct drm_connector_state *conn_state; 6360 struct intel_crtc_state *crtc_state; 6361 struct intel_crtc *crtc; 6362 6363 if (!connector->base.has_tile || 6364 connector->base.tile_group->id != tile_group_id) 6365 continue; 6366 6367 conn_state = drm_atomic_get_connector_state(&state->base, 6368 &connector->base); 6369 if (IS_ERR(conn_state)) { 6370 ret = PTR_ERR(conn_state); 6371 break; 6372 } 6373 6374 crtc = to_intel_crtc(conn_state->crtc); 6375 6376 if (!crtc) 6377 continue; 6378 6379 crtc_state = intel_atomic_get_new_crtc_state(state, crtc); 6380 crtc_state->uapi.mode_changed = true; 6381 6382 ret = drm_atomic_add_affected_planes(&state->base, &crtc->base); 6383 if (ret) 6384 break; 6385 } 6386 drm_connector_list_iter_end(&conn_iter); 6387 6388 return ret; 6389 } 6390 6391 static int intel_modeset_affected_transcoders(struct intel_atomic_state *state, u8 transcoders) 6392 { 6393 struct intel_display *display = to_intel_display(state); 6394 struct intel_crtc *crtc; 6395 6396 if (transcoders == 0) 6397 return 0; 6398 6399 for_each_intel_crtc(display->drm, crtc) { 6400 struct intel_crtc_state *crtc_state; 6401 int ret; 6402 6403 crtc_state = intel_atomic_get_crtc_state(&state->base, crtc); 6404 if (IS_ERR(crtc_state)) 6405 return PTR_ERR(crtc_state); 6406 6407 if (!crtc_state->hw.enable) 6408 continue; 6409 6410 if (!(transcoders & BIT(crtc_state->cpu_transcoder))) 6411 continue; 6412 6413 crtc_state->uapi.mode_changed = true; 6414 6415 ret = drm_atomic_add_affected_connectors(&state->base, &crtc->base); 6416 if (ret) 6417 return ret; 6418 6419 ret = drm_atomic_add_affected_planes(&state->base, &crtc->base); 6420 if (ret) 6421 return ret; 6422 6423 transcoders &= ~BIT(crtc_state->cpu_transcoder); 6424 } 6425 6426 drm_WARN_ON(display->drm, transcoders != 0); 6427 6428 return 0; 6429 } 6430 6431 static int intel_modeset_synced_crtcs(struct intel_atomic_state *state, 6432 struct drm_connector *_connector) 6433 { 6434 struct intel_connector *connector = to_intel_connector(_connector); 6435 const struct drm_connector_state *old_conn_state = 6436 drm_atomic_get_old_connector_state(&state->base, &connector->base); 6437 const struct intel_crtc_state *old_crtc_state; 6438 struct intel_crtc *crtc; 6439 u8 transcoders; 6440 6441 crtc = to_intel_crtc(old_conn_state->crtc); 6442 if (!crtc) 6443 return 0; 6444 6445 old_crtc_state = intel_atomic_get_old_crtc_state(state, crtc); 6446 6447 if (!old_crtc_state->hw.active) 6448 return 0; 6449 6450 transcoders = old_crtc_state->sync_mode_slaves_mask; 6451 if (old_crtc_state->master_transcoder != INVALID_TRANSCODER) 6452 transcoders |= BIT(old_crtc_state->master_transcoder); 6453 6454 return intel_modeset_affected_transcoders(state, 6455 transcoders); 6456 } 6457 6458 static int intel_dp_connector_atomic_check(struct drm_connector *_connector, 6459 struct drm_atomic_state *_state) 6460 { 6461 struct intel_connector *connector = to_intel_connector(_connector); 6462 struct intel_display *display = to_intel_display(connector); 6463 struct intel_atomic_state *state = to_intel_atomic_state(_state); 6464 struct drm_connector_state *conn_state = 6465 drm_atomic_get_new_connector_state(_state, &connector->base); 6466 struct intel_dp *intel_dp = enc_to_intel_dp(connector->encoder); 6467 int ret; 6468 6469 ret = intel_digital_connector_atomic_check(&connector->base, &state->base); 6470 if (ret) 6471 return ret; 6472 6473 if (intel_dp_mst_source_support(intel_dp)) { 6474 ret = drm_dp_mst_root_conn_atomic_check(conn_state, &intel_dp->mst.mgr); 6475 if (ret) 6476 return ret; 6477 } 6478 6479 if (!intel_connector_needs_modeset(state, &connector->base)) 6480 return 0; 6481 6482 ret = intel_dp_tunnel_atomic_check_state(state, 6483 intel_dp, 6484 connector); 6485 if (ret) 6486 return ret; 6487 6488 /* 6489 * We don't enable port sync on BDW due to missing w/as and 6490 * due to not having adjusted the modeset sequence appropriately. 6491 */ 6492 if (DISPLAY_VER(display) < 9) 6493 return 0; 6494 6495 if (connector->base.has_tile) { 6496 ret = intel_modeset_tile_group(state, connector->base.tile_group->id); 6497 if (ret) 6498 return ret; 6499 } 6500 6501 return intel_modeset_synced_crtcs(state, &connector->base); 6502 } 6503 6504 static void intel_dp_oob_hotplug_event(struct drm_connector *_connector, 6505 enum drm_connector_status hpd_state) 6506 { 6507 struct intel_connector *connector = to_intel_connector(_connector); 6508 struct intel_display *display = to_intel_display(connector); 6509 struct intel_encoder *encoder = intel_attached_encoder(connector); 6510 bool hpd_high = hpd_state == connector_status_connected; 6511 unsigned int hpd_pin = encoder->hpd_pin; 6512 bool need_work = false; 6513 6514 spin_lock_irq(&display->irq.lock); 6515 if (hpd_high != test_bit(hpd_pin, &display->hotplug.oob_hotplug_last_state)) { 6516 display->hotplug.event_bits |= BIT(hpd_pin); 6517 6518 __assign_bit(hpd_pin, 6519 &display->hotplug.oob_hotplug_last_state, 6520 hpd_high); 6521 need_work = true; 6522 } 6523 spin_unlock_irq(&display->irq.lock); 6524 6525 if (need_work) 6526 intel_hpd_schedule_detection(display); 6527 } 6528 6529 static const struct drm_connector_funcs intel_dp_connector_funcs = { 6530 .force = intel_dp_force, 6531 .fill_modes = drm_helper_probe_single_connector_modes, 6532 .atomic_get_property = intel_digital_connector_atomic_get_property, 6533 .atomic_set_property = intel_digital_connector_atomic_set_property, 6534 .late_register = intel_dp_connector_register, 6535 .early_unregister = intel_dp_connector_unregister, 6536 .destroy = intel_connector_destroy, 6537 .atomic_destroy_state = drm_atomic_helper_connector_destroy_state, 6538 .atomic_duplicate_state = intel_digital_connector_duplicate_state, 6539 .oob_hotplug_event = intel_dp_oob_hotplug_event, 6540 }; 6541 6542 static const struct drm_connector_helper_funcs intel_dp_connector_helper_funcs = { 6543 .detect_ctx = intel_dp_detect, 6544 .get_modes = intel_dp_get_modes, 6545 .mode_valid = intel_dp_mode_valid, 6546 .atomic_check = intel_dp_connector_atomic_check, 6547 }; 6548 6549 enum irqreturn 6550 intel_dp_hpd_pulse(struct intel_digital_port *dig_port, bool long_hpd) 6551 { 6552 struct intel_display *display = to_intel_display(dig_port); 6553 struct intel_dp *intel_dp = &dig_port->dp; 6554 u8 dpcd[DP_RECEIVER_CAP_SIZE]; 6555 6556 if (dig_port->base.type == INTEL_OUTPUT_EDP && 6557 (long_hpd || 6558 intel_display_rpm_suspended(display) || 6559 !intel_pps_have_panel_power_or_vdd(intel_dp))) { 6560 /* 6561 * vdd off can generate a long/short pulse on eDP which 6562 * would require vdd on to handle it, and thus we 6563 * would end up in an endless cycle of 6564 * "vdd off -> long/short hpd -> vdd on -> detect -> vdd off -> ..." 6565 */ 6566 drm_dbg_kms(display->drm, 6567 "ignoring %s hpd on eDP [ENCODER:%d:%s]\n", 6568 long_hpd ? "long" : "short", 6569 dig_port->base.base.base.id, 6570 dig_port->base.base.name); 6571 return IRQ_HANDLED; 6572 } 6573 6574 drm_dbg_kms(display->drm, "got hpd irq on [ENCODER:%d:%s] - %s\n", 6575 dig_port->base.base.base.id, 6576 dig_port->base.base.name, 6577 long_hpd ? "long" : "short"); 6578 6579 /* 6580 * TBT DP tunnels require the GFX driver to read out the DPRX caps in 6581 * response to long HPD pulses. The DP hotplug handler does that, 6582 * however the hotplug handler may be blocked by another 6583 * connector's/encoder's hotplug handler. Since the TBT CM may not 6584 * complete the DP tunnel BW request for the latter connector/encoder 6585 * waiting for this encoder's DPRX read, perform a dummy read here. 6586 */ 6587 if (long_hpd) { 6588 intel_dp_dpcd_set_probe(intel_dp, true); 6589 6590 intel_dp_read_dprx_caps(intel_dp, dpcd); 6591 6592 intel_dp->reset_link_params = true; 6593 intel_dp_invalidate_source_oui(intel_dp); 6594 6595 return IRQ_NONE; 6596 } 6597 6598 if (intel_dp->is_mst) { 6599 if (!intel_dp_check_mst_status(intel_dp)) 6600 return IRQ_NONE; 6601 } else if (!intel_dp_short_pulse(intel_dp)) { 6602 return IRQ_NONE; 6603 } 6604 6605 return IRQ_HANDLED; 6606 } 6607 6608 static bool _intel_dp_is_port_edp(struct intel_display *display, 6609 const struct intel_bios_encoder_data *devdata, 6610 enum port port) 6611 { 6612 /* 6613 * eDP not supported on g4x. so bail out early just 6614 * for a bit extra safety in case the VBT is bonkers. 6615 */ 6616 if (DISPLAY_VER(display) < 5) 6617 return false; 6618 6619 if (DISPLAY_VER(display) < 9 && port == PORT_A) 6620 return true; 6621 6622 return devdata && intel_bios_encoder_supports_edp(devdata); 6623 } 6624 6625 bool intel_dp_is_port_edp(struct intel_display *display, enum port port) 6626 { 6627 const struct intel_bios_encoder_data *devdata = 6628 intel_bios_encoder_data_lookup(display, port); 6629 6630 return _intel_dp_is_port_edp(display, devdata, port); 6631 } 6632 6633 bool 6634 intel_dp_has_gamut_metadata_dip(struct intel_encoder *encoder) 6635 { 6636 struct intel_display *display = to_intel_display(encoder); 6637 enum port port = encoder->port; 6638 6639 if (intel_bios_encoder_is_lspcon(encoder->devdata)) 6640 return false; 6641 6642 if (DISPLAY_VER(display) >= 11) 6643 return true; 6644 6645 if (port == PORT_A) 6646 return false; 6647 6648 if (display->platform.haswell || display->platform.broadwell || 6649 DISPLAY_VER(display) >= 9) 6650 return true; 6651 6652 return false; 6653 } 6654 6655 static void 6656 intel_dp_add_properties(struct intel_dp *intel_dp, struct drm_connector *_connector) 6657 { 6658 struct intel_connector *connector = to_intel_connector(_connector); 6659 struct intel_display *display = to_intel_display(intel_dp); 6660 enum port port = dp_to_dig_port(intel_dp)->base.port; 6661 6662 if (!intel_dp_is_edp(intel_dp)) 6663 drm_connector_attach_dp_subconnector_property(&connector->base); 6664 6665 if (!display->platform.g4x && port != PORT_A) 6666 intel_attach_force_audio_property(&connector->base); 6667 6668 intel_attach_broadcast_rgb_property(&connector->base); 6669 if (HAS_GMCH(display)) 6670 drm_connector_attach_max_bpc_property(&connector->base, 6, 10); 6671 else if (DISPLAY_VER(display) >= 5) 6672 drm_connector_attach_max_bpc_property(&connector->base, 6, 12); 6673 6674 /* Register HDMI colorspace for case of lspcon */ 6675 if (intel_bios_encoder_is_lspcon(dp_to_dig_port(intel_dp)->base.devdata)) { 6676 drm_connector_attach_content_type_property(&connector->base); 6677 intel_attach_hdmi_colorspace_property(&connector->base); 6678 } else { 6679 intel_attach_dp_colorspace_property(&connector->base); 6680 } 6681 6682 if (intel_dp_has_gamut_metadata_dip(&dp_to_dig_port(intel_dp)->base)) 6683 drm_connector_attach_hdr_output_metadata_property(&connector->base); 6684 6685 if (HAS_VRR(display)) 6686 drm_connector_attach_vrr_capable_property(&connector->base); 6687 } 6688 6689 static void 6690 intel_edp_add_properties(struct intel_dp *intel_dp) 6691 { 6692 struct intel_display *display = to_intel_display(intel_dp); 6693 struct intel_connector *connector = intel_dp->attached_connector; 6694 const struct drm_display_mode *fixed_mode = 6695 intel_panel_preferred_fixed_mode(connector); 6696 6697 intel_attach_scaling_mode_property(&connector->base); 6698 6699 drm_connector_set_panel_orientation_with_quirk(&connector->base, 6700 display->vbt.orientation, 6701 fixed_mode->hdisplay, 6702 fixed_mode->vdisplay); 6703 } 6704 6705 static void intel_edp_backlight_setup(struct intel_dp *intel_dp, 6706 struct intel_connector *connector) 6707 { 6708 struct intel_display *display = to_intel_display(intel_dp); 6709 enum pipe pipe = INVALID_PIPE; 6710 6711 if (display->platform.valleyview || display->platform.cherryview) 6712 pipe = vlv_pps_backlight_initial_pipe(intel_dp); 6713 6714 intel_backlight_setup(connector, pipe); 6715 } 6716 6717 static bool intel_edp_init_connector(struct intel_dp *intel_dp, 6718 struct intel_connector *connector) 6719 { 6720 struct intel_display *display = to_intel_display(intel_dp); 6721 struct drm_display_mode *fixed_mode; 6722 struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base; 6723 bool has_dpcd; 6724 const struct drm_edid *drm_edid; 6725 6726 if (!intel_dp_is_edp(intel_dp)) 6727 return true; 6728 6729 /* 6730 * On IBX/CPT we may get here with LVDS already registered. Since the 6731 * driver uses the only internal power sequencer available for both 6732 * eDP and LVDS bail out early in this case to prevent interfering 6733 * with an already powered-on LVDS power sequencer. 6734 */ 6735 if (intel_get_lvds_encoder(display)) { 6736 drm_WARN_ON(display->drm, 6737 !(HAS_PCH_IBX(display) || HAS_PCH_CPT(display))); 6738 drm_info(display->drm, 6739 "LVDS was detected, not registering eDP\n"); 6740 6741 return false; 6742 } 6743 6744 intel_bios_init_panel_early(display, &connector->panel, 6745 encoder->devdata); 6746 6747 if (!intel_pps_init(intel_dp)) { 6748 drm_info(display->drm, 6749 "[ENCODER:%d:%s] unusable PPS, disabling eDP\n", 6750 encoder->base.base.id, encoder->base.name); 6751 /* 6752 * The BIOS may have still enabled VDD on the PPS even 6753 * though it's unusable. Make sure we turn it back off 6754 * and to release the power domain references/etc. 6755 */ 6756 goto out_vdd_off; 6757 } 6758 6759 /* 6760 * Enable HPD sense for live status check. 6761 * intel_hpd_irq_setup() will turn it off again 6762 * if it's no longer needed later. 6763 * 6764 * The DPCD probe below will make sure VDD is on. 6765 */ 6766 intel_hpd_enable_detection(encoder); 6767 6768 intel_alpm_init(intel_dp); 6769 6770 /* Cache DPCD and EDID for edp. */ 6771 has_dpcd = intel_edp_init_dpcd(intel_dp, connector); 6772 6773 if (!has_dpcd) { 6774 /* if this fails, presume the device is a ghost */ 6775 drm_info(display->drm, 6776 "[ENCODER:%d:%s] failed to retrieve link info, disabling eDP\n", 6777 encoder->base.base.id, encoder->base.name); 6778 goto out_vdd_off; 6779 } 6780 6781 /* 6782 * VBT and straps are liars. Also check HPD as that seems 6783 * to be the most reliable piece of information available. 6784 * 6785 * ... expect on devices that forgot to hook HPD up for eDP 6786 * (eg. Acer Chromebook C710), so we'll check it only if multiple 6787 * ports are attempting to use the same AUX CH, according to VBT. 6788 */ 6789 if (intel_bios_dp_has_shared_aux_ch(encoder->devdata)) { 6790 /* 6791 * If this fails, presume the DPCD answer came 6792 * from some other port using the same AUX CH. 6793 * 6794 * FIXME maybe cleaner to check this before the 6795 * DPCD read? Would need sort out the VDD handling... 6796 */ 6797 if (!intel_digital_port_connected(encoder)) { 6798 drm_info(display->drm, 6799 "[ENCODER:%d:%s] HPD is down, disabling eDP\n", 6800 encoder->base.base.id, encoder->base.name); 6801 goto out_vdd_off; 6802 } 6803 6804 /* 6805 * Unfortunately even the HPD based detection fails on 6806 * eg. Asus B360M-A (CFL+CNP), so as a last resort fall 6807 * back to checking for a VGA branch device. Only do this 6808 * on known affected platforms to minimize false positives. 6809 */ 6810 if (DISPLAY_VER(display) == 9 && drm_dp_is_branch(intel_dp->dpcd) && 6811 (intel_dp->dpcd[DP_DOWNSTREAMPORT_PRESENT] & DP_DWN_STRM_PORT_TYPE_MASK) == 6812 DP_DWN_STRM_PORT_TYPE_ANALOG) { 6813 drm_info(display->drm, 6814 "[ENCODER:%d:%s] VGA converter detected, disabling eDP\n", 6815 encoder->base.base.id, encoder->base.name); 6816 goto out_vdd_off; 6817 } 6818 } 6819 6820 mutex_lock(&display->drm->mode_config.mutex); 6821 drm_edid = drm_edid_read_ddc(&connector->base, connector->base.ddc); 6822 if (!drm_edid) { 6823 /* Fallback to EDID from ACPI OpRegion, if any */ 6824 drm_edid = intel_opregion_get_edid(connector); 6825 if (drm_edid) 6826 drm_dbg_kms(display->drm, 6827 "[CONNECTOR:%d:%s] Using OpRegion EDID\n", 6828 connector->base.base.id, connector->base.name); 6829 } 6830 if (drm_edid) { 6831 if (drm_edid_connector_update(&connector->base, drm_edid) || 6832 !drm_edid_connector_add_modes(&connector->base)) { 6833 drm_edid_connector_update(&connector->base, NULL); 6834 drm_edid_free(drm_edid); 6835 drm_edid = ERR_PTR(-EINVAL); 6836 } 6837 } else { 6838 drm_edid = ERR_PTR(-ENOENT); 6839 } 6840 6841 intel_bios_init_panel_late(display, &connector->panel, encoder->devdata, 6842 IS_ERR(drm_edid) ? NULL : drm_edid); 6843 6844 intel_panel_add_edid_fixed_modes(connector, true); 6845 6846 /* MSO requires information from the EDID */ 6847 intel_edp_mso_init(intel_dp); 6848 6849 /* multiply the mode clock and horizontal timings for MSO */ 6850 list_for_each_entry(fixed_mode, &connector->panel.fixed_modes, head) 6851 intel_edp_mso_mode_fixup(connector, fixed_mode); 6852 6853 /* fallback to VBT if available for eDP */ 6854 if (!intel_panel_preferred_fixed_mode(connector)) 6855 intel_panel_add_vbt_lfp_fixed_mode(connector); 6856 6857 mutex_unlock(&display->drm->mode_config.mutex); 6858 6859 if (!intel_panel_preferred_fixed_mode(connector)) { 6860 drm_info(display->drm, 6861 "[ENCODER:%d:%s] failed to find fixed mode for the panel, disabling eDP\n", 6862 encoder->base.base.id, encoder->base.name); 6863 goto out_vdd_off; 6864 } 6865 6866 intel_panel_init(connector, drm_edid); 6867 6868 intel_edp_backlight_setup(intel_dp, connector); 6869 6870 intel_edp_add_properties(intel_dp); 6871 6872 intel_pps_init_late(intel_dp); 6873 6874 return true; 6875 6876 out_vdd_off: 6877 intel_pps_vdd_off_sync(intel_dp); 6878 intel_bios_fini_panel(&connector->panel); 6879 6880 return false; 6881 } 6882 6883 bool 6884 intel_dp_init_connector(struct intel_digital_port *dig_port, 6885 struct intel_connector *connector) 6886 { 6887 struct intel_display *display = to_intel_display(dig_port); 6888 struct intel_dp *intel_dp = &dig_port->dp; 6889 struct intel_encoder *encoder = &dig_port->base; 6890 struct drm_device *dev = encoder->base.dev; 6891 enum port port = encoder->port; 6892 int type; 6893 6894 if (drm_WARN(dev, dig_port->max_lanes < 1, 6895 "Not enough lanes (%d) for DP on [ENCODER:%d:%s]\n", 6896 dig_port->max_lanes, encoder->base.base.id, 6897 encoder->base.name)) 6898 return false; 6899 6900 intel_dp->reset_link_params = true; 6901 6902 /* Preserve the current hw state. */ 6903 intel_dp->DP = intel_de_read(display, intel_dp->output_reg); 6904 intel_dp->attached_connector = connector; 6905 6906 if (_intel_dp_is_port_edp(display, encoder->devdata, port)) { 6907 /* 6908 * Currently we don't support eDP on TypeC ports for DISPLAY_VER < 30, 6909 * although in theory it could work on TypeC legacy ports. 6910 */ 6911 drm_WARN_ON(dev, intel_encoder_is_tc(encoder) && 6912 DISPLAY_VER(display) < 30); 6913 type = DRM_MODE_CONNECTOR_eDP; 6914 encoder->type = INTEL_OUTPUT_EDP; 6915 6916 /* eDP only on port B and/or C on vlv/chv */ 6917 if (drm_WARN_ON(dev, (display->platform.valleyview || 6918 display->platform.cherryview) && 6919 port != PORT_B && port != PORT_C)) 6920 return false; 6921 } else { 6922 type = DRM_MODE_CONNECTOR_DisplayPort; 6923 } 6924 6925 intel_dp_set_default_sink_rates(intel_dp); 6926 intel_dp_set_default_max_sink_lane_count(intel_dp); 6927 6928 if (display->platform.valleyview || display->platform.cherryview) 6929 vlv_pps_pipe_init(intel_dp); 6930 6931 intel_dp_aux_init(intel_dp); 6932 connector->dp.dsc_decompression_aux = &intel_dp->aux; 6933 6934 drm_dbg_kms(display->drm, 6935 "Adding %s connector on [ENCODER:%d:%s]\n", 6936 type == DRM_MODE_CONNECTOR_eDP ? "eDP" : "DP", 6937 encoder->base.base.id, encoder->base.name); 6938 6939 drm_connector_init_with_ddc(dev, &connector->base, &intel_dp_connector_funcs, 6940 type, &intel_dp->aux.ddc); 6941 drm_connector_helper_add(&connector->base, &intel_dp_connector_helper_funcs); 6942 6943 if (!HAS_GMCH(display) && DISPLAY_VER(display) < 12) 6944 connector->base.interlace_allowed = true; 6945 6946 if (type != DRM_MODE_CONNECTOR_eDP) 6947 connector->polled = DRM_CONNECTOR_POLL_HPD; 6948 connector->base.polled = connector->polled; 6949 6950 intel_connector_attach_encoder(connector, encoder); 6951 6952 if (HAS_DDI(display)) 6953 connector->get_hw_state = intel_ddi_connector_get_hw_state; 6954 else 6955 connector->get_hw_state = intel_connector_get_hw_state; 6956 connector->sync_state = intel_dp_connector_sync_state; 6957 6958 if (!intel_edp_init_connector(intel_dp, connector)) { 6959 intel_dp_aux_fini(intel_dp); 6960 goto fail; 6961 } 6962 6963 intel_dp_set_source_rates(intel_dp); 6964 intel_dp_set_common_rates(intel_dp); 6965 intel_dp_reset_link_params(intel_dp); 6966 6967 /* init MST on ports that can support it */ 6968 intel_dp_mst_encoder_init(dig_port, connector->base.base.id); 6969 6970 intel_dp_add_properties(intel_dp, &connector->base); 6971 6972 if (is_hdcp_supported(display, port) && !intel_dp_is_edp(intel_dp)) { 6973 int ret = intel_dp_hdcp_init(dig_port, connector); 6974 if (ret) 6975 drm_dbg_kms(display->drm, 6976 "HDCP init failed, skipping.\n"); 6977 } 6978 6979 intel_dp->frl.is_trained = false; 6980 intel_dp->frl.trained_rate_gbps = 0; 6981 6982 intel_psr_init(intel_dp); 6983 6984 return true; 6985 6986 fail: 6987 intel_display_power_flush_work(display); 6988 drm_connector_cleanup(&connector->base); 6989 6990 return false; 6991 } 6992 6993 void intel_dp_mst_suspend(struct intel_display *display) 6994 { 6995 struct intel_encoder *encoder; 6996 6997 if (!HAS_DISPLAY(display)) 6998 return; 6999 7000 for_each_intel_encoder(display->drm, encoder) { 7001 struct intel_dp *intel_dp; 7002 7003 if (encoder->type != INTEL_OUTPUT_DDI) 7004 continue; 7005 7006 intel_dp = enc_to_intel_dp(encoder); 7007 7008 if (!intel_dp_mst_source_support(intel_dp)) 7009 continue; 7010 7011 if (intel_dp->is_mst) 7012 drm_dp_mst_topology_mgr_suspend(&intel_dp->mst.mgr); 7013 } 7014 } 7015 7016 void intel_dp_mst_resume(struct intel_display *display) 7017 { 7018 struct intel_encoder *encoder; 7019 7020 if (!HAS_DISPLAY(display)) 7021 return; 7022 7023 for_each_intel_encoder(display->drm, encoder) { 7024 struct intel_dp *intel_dp; 7025 int ret; 7026 7027 if (encoder->type != INTEL_OUTPUT_DDI) 7028 continue; 7029 7030 intel_dp = enc_to_intel_dp(encoder); 7031 7032 if (!intel_dp_mst_source_support(intel_dp)) 7033 continue; 7034 7035 ret = drm_dp_mst_topology_mgr_resume(&intel_dp->mst.mgr, true); 7036 if (ret) { 7037 intel_dp->is_mst = false; 7038 drm_dp_mst_topology_mgr_set_mst(&intel_dp->mst.mgr, false); 7039 } 7040 } 7041 } 7042 7043 static 7044 int intel_dp_sdp_compute_config_late(struct intel_crtc_state *crtc_state) 7045 { 7046 struct intel_display *display = to_intel_display(crtc_state); 7047 int guardband = intel_crtc_vblank_length(crtc_state); 7048 int min_sdp_guardband = intel_dp_sdp_min_guardband(crtc_state, false); 7049 7050 if (guardband < min_sdp_guardband) { 7051 drm_dbg_kms(display->drm, "guardband %d < min sdp guardband %d\n", 7052 guardband, min_sdp_guardband); 7053 return -EINVAL; 7054 } 7055 7056 return 0; 7057 } 7058 7059 int intel_dp_compute_config_late(struct intel_encoder *encoder, 7060 struct intel_crtc_state *crtc_state, 7061 struct drm_connector_state *conn_state) 7062 { 7063 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 7064 int ret; 7065 7066 intel_psr_compute_config_late(intel_dp, crtc_state); 7067 7068 ret = intel_dp_sdp_compute_config_late(crtc_state); 7069 if (ret) 7070 return ret; 7071 7072 return 0; 7073 } 7074 7075 static 7076 int intel_dp_get_lines_for_sdp(const struct intel_crtc_state *crtc_state, u32 type) 7077 { 7078 switch (type) { 7079 case DP_SDP_VSC_EXT_VESA: 7080 case DP_SDP_VSC_EXT_CEA: 7081 return 10; 7082 case HDMI_PACKET_TYPE_GAMUT_METADATA: 7083 return 8; 7084 case DP_SDP_PPS: 7085 return 7; 7086 case DP_SDP_ADAPTIVE_SYNC: 7087 return crtc_state->vrr.vsync_start + 1; 7088 default: 7089 break; 7090 } 7091 7092 return 0; 7093 } 7094 7095 int intel_dp_sdp_min_guardband(const struct intel_crtc_state *crtc_state, 7096 bool assume_all_enabled) 7097 { 7098 struct intel_display *display = to_intel_display(crtc_state); 7099 int sdp_guardband = 0; 7100 7101 if (assume_all_enabled || 7102 crtc_state->infoframes.enable & 7103 intel_hdmi_infoframe_enable(HDMI_PACKET_TYPE_GAMUT_METADATA)) 7104 sdp_guardband = max(sdp_guardband, 7105 intel_dp_get_lines_for_sdp(crtc_state, 7106 HDMI_PACKET_TYPE_GAMUT_METADATA)); 7107 7108 if (assume_all_enabled || 7109 crtc_state->dsc.compression_enable) 7110 sdp_guardband = max(sdp_guardband, 7111 intel_dp_get_lines_for_sdp(crtc_state, DP_SDP_PPS)); 7112 7113 if ((assume_all_enabled && HAS_AS_SDP(display)) || 7114 crtc_state->infoframes.enable & intel_hdmi_infoframe_enable(DP_SDP_ADAPTIVE_SYNC)) 7115 sdp_guardband = max(sdp_guardband, 7116 intel_dp_get_lines_for_sdp(crtc_state, DP_SDP_ADAPTIVE_SYNC)); 7117 7118 return sdp_guardband; 7119 } 7120