1 /* 2 * Copyright © 2008 Intel Corporation 3 * 4 * Permission is hereby granted, free of charge, to any person obtaining a 5 * copy of this software and associated documentation files (the "Software"), 6 * to deal in the Software without restriction, including without limitation 7 * the rights to use, copy, modify, merge, publish, distribute, sublicense, 8 * and/or sell copies of the Software, and to permit persons to whom the 9 * Software is furnished to do so, subject to the following conditions: 10 * 11 * The above copyright notice and this permission notice (including the next 12 * paragraph) shall be included in all copies or substantial portions of the 13 * Software. 14 * 15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL 18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING 20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS 21 * IN THE SOFTWARE. 22 * 23 * Authors: 24 * Keith Packard <keithp@keithp.com> 25 * 26 */ 27 28 #include <linux/export.h> 29 #include <linux/i2c.h> 30 #include <linux/iopoll.h> 31 #include <linux/log2.h> 32 #include <linux/math.h> 33 #include <linux/notifier.h> 34 #include <linux/seq_buf.h> 35 #include <linux/slab.h> 36 #include <linux/sort.h> 37 #include <linux/string_helpers.h> 38 #include <linux/timekeeping.h> 39 #include <linux/types.h> 40 #include <asm/byteorder.h> 41 42 #include <drm/display/drm_dp_helper.h> 43 #include <drm/display/drm_dp_tunnel.h> 44 #include <drm/display/drm_dsc_helper.h> 45 #include <drm/display/drm_hdmi_helper.h> 46 #include <drm/drm_atomic_helper.h> 47 #include <drm/drm_crtc.h> 48 #include <drm/drm_edid.h> 49 #include <drm/drm_fixed.h> 50 #include <drm/drm_print.h> 51 #include <drm/drm_probe_helper.h> 52 53 #include "g4x_dp.h" 54 #include "intel_alpm.h" 55 #include "intel_atomic.h" 56 #include "intel_audio.h" 57 #include "intel_backlight.h" 58 #include "intel_combo_phy_regs.h" 59 #include "intel_connector.h" 60 #include "intel_crtc.h" 61 #include "intel_crtc_state_dump.h" 62 #include "intel_cx0_phy.h" 63 #include "intel_ddi.h" 64 #include "intel_de.h" 65 #include "intel_display_driver.h" 66 #include "intel_display_jiffies.h" 67 #include "intel_display_utils.h" 68 #include "intel_display_regs.h" 69 #include "intel_display_rpm.h" 70 #include "intel_display_types.h" 71 #include "intel_dp.h" 72 #include "intel_dp_aux.h" 73 #include "intel_dp_hdcp.h" 74 #include "intel_dp_link_training.h" 75 #include "intel_dp_mst.h" 76 #include "intel_dp_test.h" 77 #include "intel_dp_tunnel.h" 78 #include "intel_dpio_phy.h" 79 #include "intel_dpll.h" 80 #include "intel_drrs.h" 81 #include "intel_encoder.h" 82 #include "intel_fifo_underrun.h" 83 #include "intel_hdcp.h" 84 #include "intel_hdmi.h" 85 #include "intel_hotplug.h" 86 #include "intel_hotplug_irq.h" 87 #include "intel_lspcon.h" 88 #include "intel_lvds.h" 89 #include "intel_modeset_lock.h" 90 #include "intel_panel.h" 91 #include "intel_pch_display.h" 92 #include "intel_pfit.h" 93 #include "intel_pps.h" 94 #include "intel_psr.h" 95 #include "intel_quirks.h" 96 #include "intel_tc.h" 97 #include "intel_vblank.h" 98 #include "intel_vdsc.h" 99 #include "intel_vrr.h" 100 101 /* Max DSC line buffer depth supported by HW. */ 102 #define INTEL_DP_DSC_MAX_LINE_BUF_DEPTH 13 103 104 /* DP DSC FEC Overhead factor in ppm = 1/(0.972261) = 1.028530 */ 105 #define DP_DSC_FEC_OVERHEAD_FACTOR 1028530 106 107 /* Constants for DP DSC configurations */ 108 static const u8 valid_dsc_bpp[] = {6, 8, 10, 12, 15}; 109 110 /* 111 * With Single pipe configuration, HW is capable of supporting maximum of: 112 * 2 slices per line for ICL, BMG 113 * 4 slices per line for other platforms. 114 * For now consider a max of 2 slices per line, which works for all platforms. 115 * With this we can have max of 4 DSC Slices per pipe. 116 * 117 * For higher resolutions where 12 slice support is required with 118 * ultrajoiner, only then each pipe can support 3 slices. 119 * 120 * #TODO Split this better to use 4 slices/dsc engine where supported. 121 */ 122 static const u8 valid_dsc_slicecount[] = {1, 2, 3, 4}; 123 124 /** 125 * intel_dp_is_edp - is the given port attached to an eDP panel (either CPU or PCH) 126 * @intel_dp: DP struct 127 * 128 * If a CPU or PCH DP output is attached to an eDP panel, this function 129 * will return true, and false otherwise. 130 * 131 * This function is not safe to use prior to encoder type being set. 132 */ 133 bool intel_dp_is_edp(struct intel_dp *intel_dp) 134 { 135 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp); 136 137 return dig_port->base.type == INTEL_OUTPUT_EDP; 138 } 139 140 static void intel_dp_unset_edid(struct intel_dp *intel_dp); 141 142 /* Is link rate UHBR and thus 128b/132b? */ 143 bool intel_dp_is_uhbr(const struct intel_crtc_state *crtc_state) 144 { 145 return drm_dp_is_uhbr_rate(crtc_state->port_clock); 146 } 147 148 /** 149 * intel_dp_link_symbol_size - get the link symbol size for a given link rate 150 * @rate: link rate in 10kbit/s units 151 * 152 * Returns the link symbol size in bits/symbol units depending on the link 153 * rate -> channel coding. 154 */ 155 int intel_dp_link_symbol_size(int rate) 156 { 157 return drm_dp_is_uhbr_rate(rate) ? 32 : 10; 158 } 159 160 /** 161 * intel_dp_link_symbol_clock - convert link rate to link symbol clock 162 * @rate: link rate in 10kbit/s units 163 * 164 * Returns the link symbol clock frequency in kHz units depending on the 165 * link rate and channel coding. 166 */ 167 int intel_dp_link_symbol_clock(int rate) 168 { 169 return DIV_ROUND_CLOSEST(rate * 10, intel_dp_link_symbol_size(rate)); 170 } 171 172 static int max_dprx_rate(struct intel_dp *intel_dp) 173 { 174 struct intel_display *display = to_intel_display(intel_dp); 175 int max_rate; 176 177 if (intel_dp_tunnel_bw_alloc_is_enabled(intel_dp)) 178 max_rate = drm_dp_tunnel_max_dprx_rate(intel_dp->tunnel); 179 else 180 max_rate = drm_dp_bw_code_to_link_rate(intel_dp->dpcd[DP_MAX_LINK_RATE]); 181 182 /* 183 * Some platforms + eDP panels may not reliably support HBR3 184 * due to signal integrity limitations, despite advertising it. 185 * Cap the link rate to HBR2 to avoid unstable configurations for the 186 * known machines. 187 */ 188 if (intel_dp_is_edp(intel_dp) && intel_has_quirk(display, QUIRK_EDP_LIMIT_RATE_HBR2)) 189 max_rate = min(max_rate, 540000); 190 191 return max_rate; 192 } 193 194 static int max_dprx_lane_count(struct intel_dp *intel_dp) 195 { 196 if (intel_dp_tunnel_bw_alloc_is_enabled(intel_dp)) 197 return drm_dp_tunnel_max_dprx_lane_count(intel_dp->tunnel); 198 199 return drm_dp_max_lane_count(intel_dp->dpcd); 200 } 201 202 static void intel_dp_set_default_sink_rates(struct intel_dp *intel_dp) 203 { 204 intel_dp->sink_rates[0] = 162000; 205 intel_dp->num_sink_rates = 1; 206 } 207 208 /* update sink rates from dpcd */ 209 static void intel_dp_set_dpcd_sink_rates(struct intel_dp *intel_dp) 210 { 211 static const int dp_rates[] = { 212 162000, 270000, 540000, 810000 213 }; 214 int i, max_rate; 215 int max_lttpr_rate; 216 217 if (drm_dp_has_quirk(&intel_dp->desc, DP_DPCD_QUIRK_CAN_DO_MAX_LINK_RATE_3_24_GBPS)) { 218 /* Needed, e.g., for Apple MBP 2017, 15 inch eDP Retina panel */ 219 static const int quirk_rates[] = { 162000, 270000, 324000 }; 220 221 memcpy(intel_dp->sink_rates, quirk_rates, sizeof(quirk_rates)); 222 intel_dp->num_sink_rates = ARRAY_SIZE(quirk_rates); 223 224 return; 225 } 226 227 /* 228 * Sink rates for 8b/10b. 229 */ 230 max_rate = max_dprx_rate(intel_dp); 231 max_lttpr_rate = drm_dp_lttpr_max_link_rate(intel_dp->lttpr_common_caps); 232 if (max_lttpr_rate) 233 max_rate = min(max_rate, max_lttpr_rate); 234 235 for (i = 0; i < ARRAY_SIZE(dp_rates); i++) { 236 if (dp_rates[i] > max_rate) 237 break; 238 intel_dp->sink_rates[i] = dp_rates[i]; 239 } 240 241 /* 242 * Sink rates for 128b/132b. If set, sink should support all 8b/10b 243 * rates and 10 Gbps. 244 */ 245 if (drm_dp_128b132b_supported(intel_dp->dpcd)) { 246 u8 uhbr_rates = 0; 247 248 BUILD_BUG_ON(ARRAY_SIZE(intel_dp->sink_rates) < ARRAY_SIZE(dp_rates) + 3); 249 250 drm_dp_dpcd_readb(&intel_dp->aux, 251 DP_128B132B_SUPPORTED_LINK_RATES, &uhbr_rates); 252 253 if (drm_dp_lttpr_count(intel_dp->lttpr_common_caps)) { 254 /* We have a repeater */ 255 if (intel_dp->lttpr_common_caps[0] >= 0x20 && 256 intel_dp->lttpr_common_caps[DP_MAIN_LINK_CHANNEL_CODING_PHY_REPEATER - 257 DP_LT_TUNABLE_PHY_REPEATER_FIELD_DATA_STRUCTURE_REV] & 258 DP_PHY_REPEATER_128B132B_SUPPORTED) { 259 /* Repeater supports 128b/132b, valid UHBR rates */ 260 uhbr_rates &= intel_dp->lttpr_common_caps[DP_PHY_REPEATER_128B132B_RATES - 261 DP_LT_TUNABLE_PHY_REPEATER_FIELD_DATA_STRUCTURE_REV]; 262 } else { 263 /* Does not support 128b/132b */ 264 uhbr_rates = 0; 265 } 266 } 267 268 if (uhbr_rates & DP_UHBR10) 269 intel_dp->sink_rates[i++] = 1000000; 270 if (uhbr_rates & DP_UHBR13_5) 271 intel_dp->sink_rates[i++] = 1350000; 272 if (uhbr_rates & DP_UHBR20) 273 intel_dp->sink_rates[i++] = 2000000; 274 } 275 276 intel_dp->num_sink_rates = i; 277 } 278 279 static void intel_dp_set_sink_rates(struct intel_dp *intel_dp) 280 { 281 struct intel_display *display = to_intel_display(intel_dp); 282 struct intel_connector *connector = intel_dp->attached_connector; 283 struct intel_digital_port *intel_dig_port = dp_to_dig_port(intel_dp); 284 struct intel_encoder *encoder = &intel_dig_port->base; 285 286 intel_dp_set_dpcd_sink_rates(intel_dp); 287 288 if (intel_dp->num_sink_rates) 289 return; 290 291 drm_err(display->drm, 292 "[CONNECTOR:%d:%s][ENCODER:%d:%s] Invalid DPCD with no link rates, using defaults\n", 293 connector->base.base.id, connector->base.name, 294 encoder->base.base.id, encoder->base.name); 295 296 intel_dp_set_default_sink_rates(intel_dp); 297 } 298 299 static void intel_dp_set_default_max_sink_lane_count(struct intel_dp *intel_dp) 300 { 301 intel_dp->max_sink_lane_count = 1; 302 } 303 304 static void intel_dp_set_max_sink_lane_count(struct intel_dp *intel_dp) 305 { 306 struct intel_display *display = to_intel_display(intel_dp); 307 struct intel_connector *connector = intel_dp->attached_connector; 308 struct intel_digital_port *intel_dig_port = dp_to_dig_port(intel_dp); 309 struct intel_encoder *encoder = &intel_dig_port->base; 310 311 intel_dp->max_sink_lane_count = max_dprx_lane_count(intel_dp); 312 313 switch (intel_dp->max_sink_lane_count) { 314 case 1: 315 case 2: 316 case 4: 317 return; 318 } 319 320 drm_err(display->drm, 321 "[CONNECTOR:%d:%s][ENCODER:%d:%s] Invalid DPCD max lane count (%d), using default\n", 322 connector->base.base.id, connector->base.name, 323 encoder->base.base.id, encoder->base.name, 324 intel_dp->max_sink_lane_count); 325 326 intel_dp_set_default_max_sink_lane_count(intel_dp); 327 } 328 329 /* Get length of rates array potentially limited by max_rate. */ 330 static int intel_dp_rate_limit_len(const int *rates, int len, int max_rate) 331 { 332 int i; 333 334 /* Limit results by potentially reduced max rate */ 335 for (i = 0; i < len; i++) { 336 if (rates[len - i - 1] <= max_rate) 337 return len - i; 338 } 339 340 return 0; 341 } 342 343 /* Get length of common rates array potentially limited by max_rate. */ 344 static int intel_dp_common_len_rate_limit(const struct intel_dp *intel_dp, 345 int max_rate) 346 { 347 return intel_dp_rate_limit_len(intel_dp->common_rates, 348 intel_dp->num_common_rates, max_rate); 349 } 350 351 int intel_dp_common_rate(struct intel_dp *intel_dp, int index) 352 { 353 struct intel_display *display = to_intel_display(intel_dp); 354 355 if (drm_WARN_ON(display->drm, 356 index < 0 || index >= intel_dp->num_common_rates)) 357 return 162000; 358 359 return intel_dp->common_rates[index]; 360 } 361 362 /* Theoretical max between source and sink */ 363 int intel_dp_max_common_rate(struct intel_dp *intel_dp) 364 { 365 return intel_dp_common_rate(intel_dp, intel_dp->num_common_rates - 1); 366 } 367 368 int intel_dp_max_source_lane_count(struct intel_digital_port *dig_port) 369 { 370 int vbt_max_lanes = intel_bios_dp_max_lane_count(dig_port->base.devdata); 371 int max_lanes = dig_port->max_lanes; 372 373 if (vbt_max_lanes) 374 max_lanes = min(max_lanes, vbt_max_lanes); 375 376 return max_lanes; 377 } 378 379 /* Theoretical max between source and sink */ 380 int intel_dp_max_common_lane_count(struct intel_dp *intel_dp) 381 { 382 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp); 383 int source_max = intel_dp_max_source_lane_count(dig_port); 384 int sink_max = intel_dp->max_sink_lane_count; 385 int lane_max = intel_tc_port_max_lane_count(dig_port); 386 int lttpr_max = drm_dp_lttpr_max_lane_count(intel_dp->lttpr_common_caps); 387 388 if (lttpr_max) 389 sink_max = min(sink_max, lttpr_max); 390 391 return min3(source_max, sink_max, lane_max); 392 } 393 394 static int forced_lane_count(struct intel_dp *intel_dp) 395 { 396 return clamp(intel_dp->link.force_lane_count, 1, intel_dp_max_common_lane_count(intel_dp)); 397 } 398 399 int intel_dp_max_lane_count(struct intel_dp *intel_dp) 400 { 401 int lane_count; 402 403 if (intel_dp->link.force_lane_count) 404 lane_count = forced_lane_count(intel_dp); 405 else 406 lane_count = intel_dp->link.max_lane_count; 407 408 switch (lane_count) { 409 case 1: 410 case 2: 411 case 4: 412 return lane_count; 413 default: 414 MISSING_CASE(lane_count); 415 return 1; 416 } 417 } 418 419 static int intel_dp_min_lane_count(struct intel_dp *intel_dp) 420 { 421 if (intel_dp->link.force_lane_count) 422 return forced_lane_count(intel_dp); 423 424 return 1; 425 } 426 427 int intel_dp_link_bw_overhead(int link_clock, int lane_count, int hdisplay, 428 int dsc_slice_count, int bpp_x16, unsigned long flags) 429 { 430 int overhead; 431 432 WARN_ON(flags & ~(DRM_DP_BW_OVERHEAD_MST | DRM_DP_BW_OVERHEAD_SSC_REF_CLK | 433 DRM_DP_BW_OVERHEAD_FEC)); 434 435 if (drm_dp_is_uhbr_rate(link_clock)) 436 flags |= DRM_DP_BW_OVERHEAD_UHBR; 437 438 if (dsc_slice_count) 439 flags |= DRM_DP_BW_OVERHEAD_DSC; 440 441 overhead = drm_dp_bw_overhead(lane_count, hdisplay, 442 dsc_slice_count, 443 bpp_x16, 444 flags); 445 446 /* 447 * TODO: clarify whether a minimum required by the fixed FEC overhead 448 * in the bspec audio programming sequence is required here. 449 */ 450 return max(overhead, intel_dp_bw_fec_overhead(flags & DRM_DP_BW_OVERHEAD_FEC)); 451 } 452 453 /* 454 * The required data bandwidth for a mode with given pixel clock and bpp. This 455 * is the required net bandwidth independent of the data bandwidth efficiency. 456 */ 457 int intel_dp_link_required(int link_clock, int lane_count, 458 int mode_clock, int mode_hdisplay, 459 int link_bpp_x16, unsigned long bw_overhead_flags) 460 { 461 int bw_overhead = intel_dp_link_bw_overhead(link_clock, lane_count, mode_hdisplay, 462 0, link_bpp_x16, bw_overhead_flags); 463 464 return intel_dp_effective_data_rate(mode_clock, link_bpp_x16, bw_overhead); 465 } 466 467 /** 468 * intel_dp_effective_data_rate - Return the pixel data rate accounting for BW allocation overhead 469 * @pixel_clock: pixel clock in kHz 470 * @bpp_x16: bits per pixel .4 fixed point format 471 * @bw_overhead: BW allocation overhead in 1ppm units 472 * 473 * Return the effective pixel data rate in kB/sec units taking into account 474 * the provided SSC, FEC, DSC BW allocation overhead. 475 */ 476 int intel_dp_effective_data_rate(int pixel_clock, int bpp_x16, 477 int bw_overhead) 478 { 479 return DIV_ROUND_UP_ULL(mul_u32_u32(pixel_clock * bpp_x16, bw_overhead), 480 1000000 * 16 * 8); 481 } 482 483 /** 484 * intel_dp_max_link_data_rate: Calculate the maximum rate for the given link params 485 * @intel_dp: Intel DP object 486 * @max_dprx_rate: Maximum data rate of the DPRX 487 * @max_dprx_lanes: Maximum lane count of the DPRX 488 * 489 * Calculate the maximum data rate for the provided link parameters taking into 490 * account any BW limitations by a DP tunnel attached to @intel_dp. 491 * 492 * Returns the maximum data rate in kBps units. 493 */ 494 int intel_dp_max_link_data_rate(struct intel_dp *intel_dp, 495 int max_dprx_rate, int max_dprx_lanes) 496 { 497 int max_rate = drm_dp_max_dprx_data_rate(max_dprx_rate, max_dprx_lanes); 498 499 if (intel_dp_tunnel_bw_alloc_is_enabled(intel_dp)) 500 max_rate = min(max_rate, 501 drm_dp_tunnel_available_bw(intel_dp->tunnel)); 502 503 return max_rate; 504 } 505 506 bool intel_dp_has_joiner(struct intel_dp *intel_dp) 507 { 508 struct intel_display *display = to_intel_display(intel_dp); 509 struct intel_digital_port *intel_dig_port = dp_to_dig_port(intel_dp); 510 struct intel_encoder *encoder = &intel_dig_port->base; 511 512 /* eDP MSO is not compatible with joiner */ 513 if (intel_dp->mso_link_count) 514 return false; 515 516 return DISPLAY_VER(display) >= 12 || 517 (DISPLAY_VER(display) == 11 && 518 encoder->port != PORT_A); 519 } 520 521 static int dg2_max_source_rate(struct intel_dp *intel_dp) 522 { 523 return intel_dp_is_edp(intel_dp) ? 810000 : 1350000; 524 } 525 526 static int icl_max_source_rate(struct intel_dp *intel_dp) 527 { 528 struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base; 529 530 if (intel_encoder_is_combo(encoder) && !intel_dp_is_edp(intel_dp)) 531 return 540000; 532 533 return 810000; 534 } 535 536 static int ehl_max_source_rate(struct intel_dp *intel_dp) 537 { 538 if (intel_dp_is_edp(intel_dp)) 539 return 540000; 540 541 return 810000; 542 } 543 544 static int mtl_max_source_rate(struct intel_dp *intel_dp) 545 { 546 struct intel_display *display = to_intel_display(intel_dp); 547 struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base; 548 549 if (intel_encoder_is_c10phy(encoder) || 550 display->platform.pantherlake_wildcatlake) 551 return 810000; 552 553 if (DISPLAY_VERx100(display) == 1401) 554 return 1350000; 555 556 return 2000000; 557 } 558 559 static int vbt_max_link_rate(struct intel_dp *intel_dp) 560 { 561 struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base; 562 int max_rate; 563 564 max_rate = intel_bios_dp_max_link_rate(encoder->devdata); 565 566 if (intel_dp_is_edp(intel_dp)) { 567 struct intel_connector *connector = intel_dp->attached_connector; 568 int edp_max_rate = connector->panel.vbt.edp.max_link_rate; 569 570 if (max_rate && edp_max_rate) 571 max_rate = min(max_rate, edp_max_rate); 572 else if (edp_max_rate) 573 max_rate = edp_max_rate; 574 } 575 576 return max_rate; 577 } 578 579 static void 580 intel_dp_set_source_rates(struct intel_dp *intel_dp) 581 { 582 /* The values must be in increasing order */ 583 static const int bmg_rates[] = { 584 162000, 216000, 243000, 270000, 324000, 432000, 540000, 675000, 585 810000, 1000000, 1350000, 586 }; 587 static const int mtl_rates[] = { 588 162000, 216000, 243000, 270000, 324000, 432000, 540000, 675000, 589 810000, 1000000, 2000000, 590 }; 591 static const int icl_rates[] = { 592 162000, 216000, 270000, 324000, 432000, 540000, 648000, 810000, 593 1000000, 1350000, 594 }; 595 static const int bxt_rates[] = { 596 162000, 216000, 243000, 270000, 324000, 432000, 540000 597 }; 598 static const int skl_rates[] = { 599 162000, 216000, 270000, 324000, 432000, 540000 600 }; 601 static const int hsw_rates[] = { 602 162000, 270000, 540000 603 }; 604 static const int g4x_rates[] = { 605 162000, 270000 606 }; 607 struct intel_display *display = to_intel_display(intel_dp); 608 const int *source_rates; 609 int size, max_rate = 0, vbt_max_rate; 610 611 /* This should only be done once */ 612 drm_WARN_ON(display->drm, 613 intel_dp->source_rates || intel_dp->num_source_rates); 614 615 if (DISPLAY_VER(display) >= 14) { 616 if (display->platform.battlemage) { 617 source_rates = bmg_rates; 618 size = ARRAY_SIZE(bmg_rates); 619 } else { 620 source_rates = mtl_rates; 621 size = ARRAY_SIZE(mtl_rates); 622 } 623 max_rate = mtl_max_source_rate(intel_dp); 624 } else if (DISPLAY_VER(display) >= 11) { 625 source_rates = icl_rates; 626 size = ARRAY_SIZE(icl_rates); 627 if (display->platform.dg2) 628 max_rate = dg2_max_source_rate(intel_dp); 629 else if (display->platform.alderlake_p || display->platform.alderlake_s || 630 display->platform.dg1 || display->platform.rocketlake) 631 max_rate = 810000; 632 else if (display->platform.jasperlake || display->platform.elkhartlake) 633 max_rate = ehl_max_source_rate(intel_dp); 634 else 635 max_rate = icl_max_source_rate(intel_dp); 636 } else if (display->platform.geminilake || display->platform.broxton) { 637 source_rates = bxt_rates; 638 size = ARRAY_SIZE(bxt_rates); 639 } else if (DISPLAY_VER(display) == 9) { 640 source_rates = skl_rates; 641 size = ARRAY_SIZE(skl_rates); 642 } else if ((display->platform.haswell && !display->platform.haswell_ulx) || 643 display->platform.broadwell) { 644 source_rates = hsw_rates; 645 size = ARRAY_SIZE(hsw_rates); 646 } else { 647 source_rates = g4x_rates; 648 size = ARRAY_SIZE(g4x_rates); 649 } 650 651 vbt_max_rate = vbt_max_link_rate(intel_dp); 652 if (max_rate && vbt_max_rate) 653 max_rate = min(max_rate, vbt_max_rate); 654 else if (vbt_max_rate) 655 max_rate = vbt_max_rate; 656 657 if (max_rate) 658 size = intel_dp_rate_limit_len(source_rates, size, max_rate); 659 660 intel_dp->source_rates = source_rates; 661 intel_dp->num_source_rates = size; 662 } 663 664 static int intersect_rates(const int *source_rates, int source_len, 665 const int *sink_rates, int sink_len, 666 int *common_rates) 667 { 668 int i = 0, j = 0, k = 0; 669 670 while (i < source_len && j < sink_len) { 671 if (source_rates[i] == sink_rates[j]) { 672 if (WARN_ON(k >= DP_MAX_SUPPORTED_RATES)) 673 return k; 674 common_rates[k] = source_rates[i]; 675 ++k; 676 ++i; 677 ++j; 678 } else if (source_rates[i] < sink_rates[j]) { 679 ++i; 680 } else { 681 ++j; 682 } 683 } 684 return k; 685 } 686 687 /* return index of rate in rates array, or -1 if not found */ 688 int intel_dp_rate_index(const int *rates, int len, int rate) 689 { 690 int i; 691 692 for (i = 0; i < len; i++) 693 if (rate == rates[i]) 694 return i; 695 696 return -1; 697 } 698 699 static int intel_dp_link_config_rate(struct intel_dp *intel_dp, 700 const struct intel_dp_link_config *lc) 701 { 702 return intel_dp_common_rate(intel_dp, lc->link_rate_idx); 703 } 704 705 static int intel_dp_link_config_lane_count(const struct intel_dp_link_config *lc) 706 { 707 return 1 << lc->lane_count_exp; 708 } 709 710 static int intel_dp_link_config_bw(struct intel_dp *intel_dp, 711 const struct intel_dp_link_config *lc) 712 { 713 return drm_dp_max_dprx_data_rate(intel_dp_link_config_rate(intel_dp, lc), 714 intel_dp_link_config_lane_count(lc)); 715 } 716 717 static int link_config_cmp_by_bw(const void *a, const void *b, const void *p) 718 { 719 struct intel_dp *intel_dp = (struct intel_dp *)p; /* remove const */ 720 const struct intel_dp_link_config *lc_a = a; 721 const struct intel_dp_link_config *lc_b = b; 722 int bw_a = intel_dp_link_config_bw(intel_dp, lc_a); 723 int bw_b = intel_dp_link_config_bw(intel_dp, lc_b); 724 725 if (bw_a != bw_b) 726 return bw_a - bw_b; 727 728 return intel_dp_link_config_rate(intel_dp, lc_a) - 729 intel_dp_link_config_rate(intel_dp, lc_b); 730 } 731 732 static void intel_dp_link_config_init(struct intel_dp *intel_dp) 733 { 734 struct intel_display *display = to_intel_display(intel_dp); 735 struct intel_dp_link_config *lc; 736 int num_common_lane_configs; 737 int i; 738 int j; 739 740 if (drm_WARN_ON(display->drm, !is_power_of_2(intel_dp_max_common_lane_count(intel_dp)))) 741 return; 742 743 num_common_lane_configs = ilog2(intel_dp_max_common_lane_count(intel_dp)) + 1; 744 745 if (drm_WARN_ON(display->drm, intel_dp->num_common_rates * num_common_lane_configs > 746 ARRAY_SIZE(intel_dp->link.configs))) 747 return; 748 749 intel_dp->link.num_configs = intel_dp->num_common_rates * num_common_lane_configs; 750 751 lc = &intel_dp->link.configs[0]; 752 for (i = 0; i < intel_dp->num_common_rates; i++) { 753 for (j = 0; j < num_common_lane_configs; j++) { 754 lc->lane_count_exp = j; 755 lc->link_rate_idx = i; 756 757 lc++; 758 } 759 } 760 761 sort_r(intel_dp->link.configs, intel_dp->link.num_configs, 762 sizeof(intel_dp->link.configs[0]), 763 link_config_cmp_by_bw, NULL, 764 intel_dp); 765 } 766 767 void intel_dp_link_config_get(struct intel_dp *intel_dp, int idx, int *link_rate, int *lane_count) 768 { 769 struct intel_display *display = to_intel_display(intel_dp); 770 const struct intel_dp_link_config *lc; 771 772 if (drm_WARN_ON(display->drm, idx < 0 || idx >= intel_dp->link.num_configs)) 773 idx = 0; 774 775 lc = &intel_dp->link.configs[idx]; 776 777 *link_rate = intel_dp_link_config_rate(intel_dp, lc); 778 *lane_count = intel_dp_link_config_lane_count(lc); 779 } 780 781 int intel_dp_link_config_index(struct intel_dp *intel_dp, int link_rate, int lane_count) 782 { 783 int link_rate_idx = intel_dp_rate_index(intel_dp->common_rates, intel_dp->num_common_rates, 784 link_rate); 785 int lane_count_exp = ilog2(lane_count); 786 int i; 787 788 for (i = 0; i < intel_dp->link.num_configs; i++) { 789 const struct intel_dp_link_config *lc = &intel_dp->link.configs[i]; 790 791 if (lc->lane_count_exp == lane_count_exp && 792 lc->link_rate_idx == link_rate_idx) 793 return i; 794 } 795 796 return -1; 797 } 798 799 static void intel_dp_set_common_rates(struct intel_dp *intel_dp) 800 { 801 struct intel_display *display = to_intel_display(intel_dp); 802 803 drm_WARN_ON(display->drm, 804 !intel_dp->num_source_rates || !intel_dp->num_sink_rates); 805 806 intel_dp->num_common_rates = intersect_rates(intel_dp->source_rates, 807 intel_dp->num_source_rates, 808 intel_dp->sink_rates, 809 intel_dp->num_sink_rates, 810 intel_dp->common_rates); 811 812 /* Paranoia, there should always be something in common. */ 813 if (drm_WARN_ON(display->drm, intel_dp->num_common_rates == 0)) { 814 intel_dp->common_rates[0] = 162000; 815 intel_dp->num_common_rates = 1; 816 } 817 818 intel_dp_link_config_init(intel_dp); 819 } 820 821 bool intel_dp_link_params_valid(struct intel_dp *intel_dp, int link_rate, 822 u8 lane_count) 823 { 824 /* 825 * FIXME: we need to synchronize the current link parameters with 826 * hardware readout. Currently fast link training doesn't work on 827 * boot-up. 828 */ 829 if (link_rate == 0 || 830 link_rate > intel_dp->link.max_rate) 831 return false; 832 833 if (lane_count == 0 || 834 lane_count > intel_dp_max_lane_count(intel_dp)) 835 return false; 836 837 return true; 838 } 839 840 u32 intel_dp_mode_to_fec_clock(u32 mode_clock) 841 { 842 return div_u64(mul_u32_u32(mode_clock, DP_DSC_FEC_OVERHEAD_FACTOR), 843 1000000U); 844 } 845 846 int intel_dp_bw_fec_overhead(bool fec_enabled) 847 { 848 /* 849 * TODO: Calculate the actual overhead for a given mode. 850 * The hard-coded 1/0.972261=2.853% overhead factor 851 * corresponds (for instance) to the 8b/10b DP FEC 2.4% + 852 * 0.453% DSC overhead. This is enough for a 3840 width mode, 853 * which has a DSC overhead of up to ~0.2%, but may not be 854 * enough for a 1024 width mode where this is ~0.8% (on a 4 855 * lane DP link, with 2 DSC slices and 8 bpp color depth). 856 */ 857 return fec_enabled ? DP_DSC_FEC_OVERHEAD_FACTOR : 1000000; 858 } 859 860 static int 861 small_joiner_ram_size_bits(struct intel_display *display) 862 { 863 if (DISPLAY_VER(display) >= 13) 864 return 17280 * 8; 865 else if (DISPLAY_VER(display) >= 11) 866 return 7680 * 8; 867 else 868 return 6144 * 8; 869 } 870 871 static int align_min_vesa_compressed_bpp_x16(int min_link_bpp_x16) 872 { 873 int i; 874 875 for (i = 0; i < ARRAY_SIZE(valid_dsc_bpp); i++) { 876 int vesa_bpp_x16 = fxp_q4_from_int(valid_dsc_bpp[i]); 877 878 if (vesa_bpp_x16 >= min_link_bpp_x16) 879 return vesa_bpp_x16; 880 } 881 882 return 0; 883 } 884 885 static int align_max_vesa_compressed_bpp_x16(int max_link_bpp_x16) 886 { 887 int i; 888 889 for (i = ARRAY_SIZE(valid_dsc_bpp) - 1; i >= 0; i--) { 890 int vesa_bpp_x16 = fxp_q4_from_int(valid_dsc_bpp[i]); 891 892 if (vesa_bpp_x16 <= max_link_bpp_x16) 893 return vesa_bpp_x16; 894 } 895 896 return 0; 897 } 898 899 static int bigjoiner_interface_bits(struct intel_display *display) 900 { 901 return DISPLAY_VER(display) >= 14 ? 36 : 24; 902 } 903 904 static u32 bigjoiner_bw_max_bpp(struct intel_display *display, u32 mode_clock, 905 int num_joined_pipes) 906 { 907 u32 max_bpp; 908 /* With bigjoiner multiple dsc engines are used in parallel so PPC is 2 */ 909 int ppc = 2; 910 int num_big_joiners = num_joined_pipes / 2; 911 912 max_bpp = display->cdclk.max_cdclk_freq * ppc * bigjoiner_interface_bits(display) / 913 intel_dp_mode_to_fec_clock(mode_clock); 914 915 max_bpp *= num_big_joiners; 916 917 return max_bpp; 918 919 } 920 921 static u32 small_joiner_ram_max_bpp(struct intel_display *display, 922 u32 mode_hdisplay, 923 int num_joined_pipes) 924 { 925 u32 max_bpp; 926 927 /* Small Joiner Check: output bpp <= joiner RAM (bits) / Horiz. width */ 928 max_bpp = small_joiner_ram_size_bits(display) / mode_hdisplay; 929 930 max_bpp *= num_joined_pipes; 931 932 return max_bpp; 933 } 934 935 static int ultrajoiner_ram_bits(void) 936 { 937 return 4 * 72 * 512; 938 } 939 940 static u32 ultrajoiner_ram_max_bpp(u32 mode_hdisplay) 941 { 942 return ultrajoiner_ram_bits() / mode_hdisplay; 943 } 944 945 /* TODO: return a bpp_x16 value */ 946 static 947 u32 get_max_compressed_bpp_with_joiner(struct intel_display *display, 948 u32 mode_clock, u32 mode_hdisplay, 949 int num_joined_pipes) 950 { 951 u32 max_bpp = small_joiner_ram_max_bpp(display, mode_hdisplay, num_joined_pipes); 952 953 if (num_joined_pipes > 1) 954 max_bpp = min(max_bpp, bigjoiner_bw_max_bpp(display, mode_clock, 955 num_joined_pipes)); 956 if (num_joined_pipes == 4) 957 max_bpp = min(max_bpp, ultrajoiner_ram_max_bpp(mode_hdisplay)); 958 959 return max_bpp; 960 } 961 962 u8 intel_dp_dsc_get_slice_count(const struct intel_connector *connector, 963 int mode_clock, int mode_hdisplay, 964 int num_joined_pipes) 965 { 966 struct intel_display *display = to_intel_display(connector); 967 u32 sink_slice_count_mask = 968 drm_dp_dsc_sink_slice_count_mask(connector->dp.dsc_dpcd, false); 969 u8 min_slice_count, i; 970 int max_slice_width; 971 int tp_rgb_yuv444; 972 int tp_yuv422_420; 973 974 /* 975 * TODO: Use the throughput value specific to the actual RGB/YUV 976 * format of the output. 977 * The RGB/YUV444 throughput value should be always either equal 978 * or smaller than the YUV422/420 value, but let's not depend on 979 * this assumption. 980 */ 981 if (mode_clock > max(connector->dp.dsc_branch_caps.overall_throughput.rgb_yuv444, 982 connector->dp.dsc_branch_caps.overall_throughput.yuv422_420)) 983 return 0; 984 985 if (mode_hdisplay > connector->dp.dsc_branch_caps.max_line_width) 986 return 0; 987 988 /* 989 * TODO: Pass the total pixel rate of all the streams transferred to 990 * an MST tiled display, calculate the total slice count for all tiles 991 * from this and the per-tile slice count from the total slice count. 992 */ 993 tp_rgb_yuv444 = drm_dp_dsc_sink_max_slice_throughput(connector->dp.dsc_dpcd, 994 mode_clock, true); 995 tp_yuv422_420 = drm_dp_dsc_sink_max_slice_throughput(connector->dp.dsc_dpcd, 996 mode_clock, false); 997 998 /* 999 * TODO: Use the throughput value specific to the actual RGB/YUV 1000 * format of the output. 1001 * For now use the smaller of these, which is ok, potentially 1002 * resulting in a higher than required minimum slice count. 1003 * The RGB/YUV444 throughput value should be always either equal 1004 * or smaller than the YUV422/420 value, but let's not depend on 1005 * this assumption. 1006 */ 1007 min_slice_count = DIV_ROUND_UP(mode_clock, min(tp_rgb_yuv444, tp_yuv422_420)); 1008 1009 /* 1010 * Due to some DSC engine BW limitations, we need to enable second 1011 * slice and VDSC engine, whenever we approach close enough to max CDCLK 1012 */ 1013 if (mode_clock >= ((display->cdclk.max_cdclk_freq * 85) / 100)) 1014 min_slice_count = max_t(u8, min_slice_count, 2); 1015 1016 max_slice_width = drm_dp_dsc_sink_max_slice_width(connector->dp.dsc_dpcd); 1017 if (max_slice_width < DP_DSC_MIN_SLICE_WIDTH_VALUE) { 1018 drm_dbg_kms(display->drm, 1019 "Unsupported slice width %d by DP DSC Sink device\n", 1020 max_slice_width); 1021 return 0; 1022 } 1023 /* Also take into account max slice width */ 1024 min_slice_count = max_t(u8, min_slice_count, 1025 DIV_ROUND_UP(mode_hdisplay, 1026 max_slice_width)); 1027 1028 /* Find the closest match to the valid slice count values */ 1029 for (i = 0; i < ARRAY_SIZE(valid_dsc_slicecount); i++) { 1030 u8 test_slice_count = valid_dsc_slicecount[i] * num_joined_pipes; 1031 1032 /* 1033 * 3 DSC Slices per pipe need 3 DSC engines, which is supported only 1034 * with Ultrajoiner only for some platforms. 1035 */ 1036 if (valid_dsc_slicecount[i] == 3 && 1037 (!HAS_DSC_3ENGINES(display) || num_joined_pipes != 4)) 1038 continue; 1039 1040 if (!(drm_dp_dsc_slice_count_to_mask(test_slice_count) & 1041 sink_slice_count_mask)) 1042 continue; 1043 1044 /* 1045 * Bigjoiner needs small joiner to be enabled. 1046 * So there should be at least 2 dsc slices per pipe, 1047 * whenever bigjoiner is enabled. 1048 */ 1049 if (num_joined_pipes > 1 && valid_dsc_slicecount[i] < 2) 1050 continue; 1051 1052 if (mode_hdisplay % test_slice_count) 1053 continue; 1054 1055 if (min_slice_count <= test_slice_count) 1056 return test_slice_count; 1057 } 1058 1059 /* Print slice count 1,2,4,..24 if bit#0,1,3,..23 is set in the mask. */ 1060 sink_slice_count_mask <<= 1; 1061 drm_dbg_kms(display->drm, 1062 "[CONNECTOR:%d:%s] Unsupported slice count (min: %d, sink supported: %*pbl)\n", 1063 connector->base.base.id, connector->base.name, 1064 min_slice_count, 1065 (int)BITS_PER_TYPE(sink_slice_count_mask), &sink_slice_count_mask); 1066 1067 return 0; 1068 } 1069 1070 static bool source_can_output(struct intel_dp *intel_dp, 1071 enum intel_output_format format) 1072 { 1073 struct intel_display *display = to_intel_display(intel_dp); 1074 1075 switch (format) { 1076 case INTEL_OUTPUT_FORMAT_RGB: 1077 return true; 1078 1079 case INTEL_OUTPUT_FORMAT_YCBCR444: 1080 /* 1081 * No YCbCr output support on gmch platforms. 1082 * Also, ILK doesn't seem capable of DP YCbCr output. 1083 * The displayed image is severely corrupted. SNB+ is fine. 1084 */ 1085 return !HAS_GMCH(display) && !display->platform.ironlake; 1086 1087 case INTEL_OUTPUT_FORMAT_YCBCR420: 1088 /* Platform < Gen 11 cannot output YCbCr420 format */ 1089 return DISPLAY_VER(display) >= 11; 1090 1091 default: 1092 MISSING_CASE(format); 1093 return false; 1094 } 1095 } 1096 1097 static bool 1098 dfp_can_convert_from_rgb(struct intel_dp *intel_dp, 1099 enum intel_output_format sink_format) 1100 { 1101 if (!drm_dp_is_branch(intel_dp->dpcd)) 1102 return false; 1103 1104 if (sink_format == INTEL_OUTPUT_FORMAT_YCBCR444) 1105 return intel_dp->dfp.rgb_to_ycbcr; 1106 1107 if (sink_format == INTEL_OUTPUT_FORMAT_YCBCR420) 1108 return intel_dp->dfp.rgb_to_ycbcr && 1109 intel_dp->dfp.ycbcr_444_to_420; 1110 1111 return false; 1112 } 1113 1114 static bool 1115 dfp_can_convert_from_ycbcr444(struct intel_dp *intel_dp, 1116 enum intel_output_format sink_format) 1117 { 1118 if (!drm_dp_is_branch(intel_dp->dpcd)) 1119 return false; 1120 1121 if (sink_format == INTEL_OUTPUT_FORMAT_YCBCR420) 1122 return intel_dp->dfp.ycbcr_444_to_420; 1123 1124 return false; 1125 } 1126 1127 static bool 1128 dfp_can_convert(struct intel_dp *intel_dp, 1129 enum intel_output_format output_format, 1130 enum intel_output_format sink_format) 1131 { 1132 switch (output_format) { 1133 case INTEL_OUTPUT_FORMAT_RGB: 1134 return dfp_can_convert_from_rgb(intel_dp, sink_format); 1135 case INTEL_OUTPUT_FORMAT_YCBCR444: 1136 return dfp_can_convert_from_ycbcr444(intel_dp, sink_format); 1137 default: 1138 MISSING_CASE(output_format); 1139 return false; 1140 } 1141 1142 return false; 1143 } 1144 1145 static enum intel_output_format 1146 intel_dp_output_format(struct intel_connector *connector, 1147 enum intel_output_format sink_format) 1148 { 1149 struct intel_display *display = to_intel_display(connector); 1150 struct intel_dp *intel_dp = intel_attached_dp(connector); 1151 enum intel_output_format force_dsc_output_format = 1152 intel_dp->force_dsc_output_format; 1153 enum intel_output_format output_format; 1154 if (force_dsc_output_format) { 1155 if (source_can_output(intel_dp, force_dsc_output_format) && 1156 (!drm_dp_is_branch(intel_dp->dpcd) || 1157 sink_format != force_dsc_output_format || 1158 dfp_can_convert(intel_dp, force_dsc_output_format, sink_format))) 1159 return force_dsc_output_format; 1160 1161 drm_dbg_kms(display->drm, "Cannot force DSC output format\n"); 1162 } 1163 1164 if (sink_format == INTEL_OUTPUT_FORMAT_RGB || 1165 dfp_can_convert_from_rgb(intel_dp, sink_format)) 1166 output_format = INTEL_OUTPUT_FORMAT_RGB; 1167 1168 else if (sink_format == INTEL_OUTPUT_FORMAT_YCBCR444 || 1169 dfp_can_convert_from_ycbcr444(intel_dp, sink_format)) 1170 output_format = INTEL_OUTPUT_FORMAT_YCBCR444; 1171 1172 else 1173 output_format = INTEL_OUTPUT_FORMAT_YCBCR420; 1174 1175 drm_WARN_ON(display->drm, !source_can_output(intel_dp, output_format)); 1176 1177 return output_format; 1178 } 1179 1180 int intel_dp_min_bpp(enum intel_output_format output_format) 1181 { 1182 if (output_format == INTEL_OUTPUT_FORMAT_RGB) 1183 return intel_display_min_pipe_bpp(); 1184 else 1185 return 8 * 3; 1186 } 1187 1188 int intel_dp_output_format_link_bpp_x16(enum intel_output_format output_format, int pipe_bpp) 1189 { 1190 /* 1191 * bpp value was assumed to RGB format. And YCbCr 4:2:0 output 1192 * format of the number of bytes per pixel will be half the number 1193 * of bytes of RGB pixel. 1194 */ 1195 if (output_format == INTEL_OUTPUT_FORMAT_YCBCR420) 1196 pipe_bpp /= 2; 1197 1198 return fxp_q4_from_int(pipe_bpp); 1199 } 1200 1201 static enum intel_output_format 1202 intel_dp_sink_format(struct intel_connector *connector, 1203 const struct drm_display_mode *mode) 1204 { 1205 const struct drm_display_info *info = &connector->base.display_info; 1206 1207 if (drm_mode_is_420_only(info, mode)) 1208 return INTEL_OUTPUT_FORMAT_YCBCR420; 1209 1210 return INTEL_OUTPUT_FORMAT_RGB; 1211 } 1212 1213 static int 1214 intel_dp_mode_min_link_bpp_x16(struct intel_connector *connector, 1215 const struct drm_display_mode *mode) 1216 { 1217 enum intel_output_format output_format, sink_format; 1218 1219 sink_format = intel_dp_sink_format(connector, mode); 1220 1221 output_format = intel_dp_output_format(connector, sink_format); 1222 1223 return intel_dp_output_format_link_bpp_x16(output_format, 1224 intel_dp_min_bpp(output_format)); 1225 } 1226 1227 static bool intel_dp_hdisplay_bad(struct intel_display *display, 1228 int hdisplay) 1229 { 1230 /* 1231 * Older platforms don't like hdisplay==4096 with DP. 1232 * 1233 * On ILK/SNB/IVB the pipe seems to be somewhat running (scanline 1234 * and frame counter increment), but we don't get vblank interrupts, 1235 * and the pipe underruns immediately. The link also doesn't seem 1236 * to get trained properly. 1237 * 1238 * On CHV the vblank interrupts don't seem to disappear but 1239 * otherwise the symptoms are similar. 1240 * 1241 * TODO: confirm the behaviour on HSW+ 1242 */ 1243 return hdisplay == 4096 && !HAS_DDI(display); 1244 } 1245 1246 static int intel_dp_max_tmds_clock(struct intel_dp *intel_dp) 1247 { 1248 struct intel_connector *connector = intel_dp->attached_connector; 1249 const struct drm_display_info *info = &connector->base.display_info; 1250 int max_tmds_clock = intel_dp->dfp.max_tmds_clock; 1251 1252 /* Only consider the sink's max TMDS clock if we know this is a HDMI DFP */ 1253 if (max_tmds_clock && info->max_tmds_clock) 1254 max_tmds_clock = min(max_tmds_clock, info->max_tmds_clock); 1255 1256 return max_tmds_clock; 1257 } 1258 1259 static enum drm_mode_status 1260 intel_dp_tmds_clock_valid(struct intel_dp *intel_dp, 1261 int clock, int bpc, 1262 enum intel_output_format sink_format, 1263 bool respect_downstream_limits) 1264 { 1265 int tmds_clock, min_tmds_clock, max_tmds_clock; 1266 1267 if (!respect_downstream_limits) 1268 return MODE_OK; 1269 1270 tmds_clock = intel_hdmi_tmds_clock(clock, bpc, sink_format); 1271 1272 min_tmds_clock = intel_dp->dfp.min_tmds_clock; 1273 max_tmds_clock = intel_dp_max_tmds_clock(intel_dp); 1274 1275 if (min_tmds_clock && tmds_clock < min_tmds_clock) 1276 return MODE_CLOCK_LOW; 1277 1278 if (max_tmds_clock && tmds_clock > max_tmds_clock) 1279 return MODE_CLOCK_HIGH; 1280 1281 return MODE_OK; 1282 } 1283 1284 static enum drm_mode_status 1285 intel_dp_mode_valid_downstream(struct intel_connector *connector, 1286 const struct drm_display_mode *mode, 1287 int target_clock) 1288 { 1289 struct intel_dp *intel_dp = intel_attached_dp(connector); 1290 const struct drm_display_info *info = &connector->base.display_info; 1291 enum drm_mode_status status; 1292 enum intel_output_format sink_format; 1293 1294 /* If PCON supports FRL MODE, check FRL bandwidth constraints */ 1295 if (intel_dp->dfp.pcon_max_frl_bw) { 1296 int link_bpp_x16 = intel_dp_mode_min_link_bpp_x16(connector, mode); 1297 int target_bw; 1298 int max_frl_bw; 1299 1300 target_bw = fxp_q4_to_int_roundup(link_bpp_x16) * target_clock; 1301 1302 max_frl_bw = intel_dp->dfp.pcon_max_frl_bw; 1303 1304 /* converting bw from Gbps to Kbps*/ 1305 max_frl_bw = max_frl_bw * 1000000; 1306 1307 if (target_bw > max_frl_bw) 1308 return MODE_CLOCK_HIGH; 1309 1310 return MODE_OK; 1311 } 1312 1313 if (intel_dp->dfp.max_dotclock && 1314 target_clock > intel_dp->dfp.max_dotclock) 1315 return MODE_CLOCK_HIGH; 1316 1317 sink_format = intel_dp_sink_format(connector, mode); 1318 1319 /* Assume 8bpc for the DP++/HDMI/DVI TMDS clock check */ 1320 status = intel_dp_tmds_clock_valid(intel_dp, target_clock, 1321 8, sink_format, true); 1322 1323 if (status != MODE_OK) { 1324 if (sink_format == INTEL_OUTPUT_FORMAT_YCBCR420 || 1325 !connector->base.ycbcr_420_allowed || 1326 !drm_mode_is_420_also(info, mode)) 1327 return status; 1328 sink_format = INTEL_OUTPUT_FORMAT_YCBCR420; 1329 status = intel_dp_tmds_clock_valid(intel_dp, target_clock, 1330 8, sink_format, true); 1331 if (status != MODE_OK) 1332 return status; 1333 } 1334 1335 return MODE_OK; 1336 } 1337 1338 static 1339 bool intel_dp_needs_joiner(struct intel_dp *intel_dp, 1340 struct intel_connector *connector, 1341 int hdisplay, int clock, 1342 int num_joined_pipes) 1343 { 1344 struct intel_display *display = to_intel_display(intel_dp); 1345 int hdisplay_limit; 1346 1347 if (!intel_dp_has_joiner(intel_dp)) 1348 return false; 1349 1350 num_joined_pipes /= 2; 1351 1352 hdisplay_limit = DISPLAY_VER(display) >= 30 ? 6144 : 5120; 1353 1354 return clock > num_joined_pipes * display->cdclk.max_dotclk_freq || 1355 hdisplay > num_joined_pipes * hdisplay_limit; 1356 } 1357 1358 int intel_dp_num_joined_pipes(struct intel_dp *intel_dp, 1359 struct intel_connector *connector, 1360 int hdisplay, int clock) 1361 { 1362 struct intel_display *display = to_intel_display(intel_dp); 1363 1364 if (connector->force_joined_pipes) 1365 return connector->force_joined_pipes; 1366 1367 if (HAS_ULTRAJOINER(display) && 1368 intel_dp_needs_joiner(intel_dp, connector, hdisplay, clock, 4)) 1369 return 4; 1370 1371 if ((HAS_BIGJOINER(display) || HAS_UNCOMPRESSED_JOINER(display)) && 1372 intel_dp_needs_joiner(intel_dp, connector, hdisplay, clock, 2)) 1373 return 2; 1374 1375 return 1; 1376 } 1377 1378 bool intel_dp_has_dsc(const struct intel_connector *connector) 1379 { 1380 struct intel_display *display = to_intel_display(connector); 1381 1382 if (!HAS_DSC(display)) 1383 return false; 1384 1385 if (connector->mst.dp && !HAS_DSC_MST(display)) 1386 return false; 1387 1388 if (connector->base.connector_type == DRM_MODE_CONNECTOR_eDP && 1389 connector->panel.vbt.edp.dsc_disable) 1390 return false; 1391 1392 if (!drm_dp_sink_supports_dsc(connector->dp.dsc_dpcd)) 1393 return false; 1394 1395 return true; 1396 } 1397 1398 static enum drm_mode_status 1399 intel_dp_mode_valid(struct drm_connector *_connector, 1400 const struct drm_display_mode *mode) 1401 { 1402 struct intel_display *display = to_intel_display(_connector->dev); 1403 struct intel_connector *connector = to_intel_connector(_connector); 1404 struct intel_dp *intel_dp = intel_attached_dp(connector); 1405 enum intel_output_format sink_format, output_format; 1406 const struct drm_display_mode *fixed_mode; 1407 int target_clock = mode->clock; 1408 int max_rate, mode_rate, max_lanes, max_link_clock; 1409 int max_dotclk = display->cdclk.max_dotclk_freq; 1410 u16 dsc_max_compressed_bpp = 0; 1411 u8 dsc_slice_count = 0; 1412 enum drm_mode_status status; 1413 bool dsc = false; 1414 int num_joined_pipes; 1415 int link_bpp_x16; 1416 1417 status = intel_cpu_transcoder_mode_valid(display, mode); 1418 if (status != MODE_OK) 1419 return status; 1420 1421 if (mode->flags & DRM_MODE_FLAG_DBLCLK) 1422 return MODE_H_ILLEGAL; 1423 1424 if (mode->clock < 10000) 1425 return MODE_CLOCK_LOW; 1426 1427 fixed_mode = intel_panel_fixed_mode(connector, mode); 1428 if (intel_dp_is_edp(intel_dp) && fixed_mode) { 1429 status = intel_panel_mode_valid(connector, mode); 1430 if (status != MODE_OK) 1431 return status; 1432 1433 target_clock = fixed_mode->clock; 1434 } 1435 1436 num_joined_pipes = intel_dp_num_joined_pipes(intel_dp, connector, 1437 mode->hdisplay, target_clock); 1438 max_dotclk *= num_joined_pipes; 1439 1440 sink_format = intel_dp_sink_format(connector, mode); 1441 output_format = intel_dp_output_format(connector, sink_format); 1442 1443 status = intel_pfit_mode_valid(display, mode, output_format, num_joined_pipes); 1444 if (status != MODE_OK) 1445 return status; 1446 1447 if (target_clock > max_dotclk) 1448 return MODE_CLOCK_HIGH; 1449 1450 if (intel_dp_hdisplay_bad(display, mode->hdisplay)) 1451 return MODE_H_ILLEGAL; 1452 1453 max_link_clock = intel_dp_max_link_rate(intel_dp); 1454 max_lanes = intel_dp_max_lane_count(intel_dp); 1455 1456 max_rate = intel_dp_max_link_data_rate(intel_dp, max_link_clock, max_lanes); 1457 1458 link_bpp_x16 = intel_dp_mode_min_link_bpp_x16(connector, mode); 1459 mode_rate = intel_dp_link_required(max_link_clock, max_lanes, 1460 target_clock, mode->hdisplay, 1461 link_bpp_x16, 0); 1462 1463 if (intel_dp_has_dsc(connector)) { 1464 int pipe_bpp; 1465 1466 /* 1467 * TBD pass the connector BPC, 1468 * for now U8_MAX so that max BPC on that platform would be picked 1469 */ 1470 pipe_bpp = intel_dp_dsc_compute_max_bpp(connector, U8_MAX); 1471 1472 /* 1473 * Output bpp is stored in 6.4 format so right shift by 4 to get the 1474 * integer value since we support only integer values of bpp. 1475 */ 1476 if (intel_dp_is_edp(intel_dp)) { 1477 dsc_max_compressed_bpp = 1478 drm_edp_dsc_sink_output_bpp(connector->dp.dsc_dpcd) >> 4; 1479 dsc_slice_count = 1480 drm_dp_dsc_sink_max_slice_count(connector->dp.dsc_dpcd, 1481 true); 1482 dsc = dsc_max_compressed_bpp && dsc_slice_count; 1483 } else if (drm_dp_sink_supports_fec(connector->dp.fec_capability)) { 1484 unsigned long bw_overhead_flags = 0; 1485 1486 if (!drm_dp_is_uhbr_rate(max_link_clock)) 1487 bw_overhead_flags |= DRM_DP_BW_OVERHEAD_FEC; 1488 1489 dsc = intel_dp_mode_valid_with_dsc(connector, 1490 max_link_clock, max_lanes, 1491 target_clock, mode->hdisplay, 1492 num_joined_pipes, 1493 output_format, pipe_bpp, 1494 bw_overhead_flags); 1495 } 1496 } 1497 1498 if (intel_dp_joiner_needs_dsc(display, num_joined_pipes) && !dsc) 1499 return MODE_CLOCK_HIGH; 1500 1501 if (mode_rate > max_rate && !dsc) 1502 return MODE_CLOCK_HIGH; 1503 1504 status = intel_dp_mode_valid_downstream(connector, mode, target_clock); 1505 if (status != MODE_OK) 1506 return status; 1507 1508 return intel_mode_valid_max_plane_size(display, mode, num_joined_pipes); 1509 } 1510 1511 bool intel_dp_source_supports_tps3(struct intel_display *display) 1512 { 1513 return DISPLAY_VER(display) >= 9 || 1514 display->platform.broadwell || display->platform.haswell; 1515 } 1516 1517 bool intel_dp_source_supports_tps4(struct intel_display *display) 1518 { 1519 return DISPLAY_VER(display) >= 10; 1520 } 1521 1522 static void seq_buf_print_array(struct seq_buf *s, const int *array, int nelem) 1523 { 1524 int i; 1525 1526 for (i = 0; i < nelem; i++) 1527 seq_buf_printf(s, "%s%d", i ? ", " : "", array[i]); 1528 } 1529 1530 static void intel_dp_print_rates(struct intel_dp *intel_dp) 1531 { 1532 struct intel_display *display = to_intel_display(intel_dp); 1533 DECLARE_SEQ_BUF(s, 128); /* FIXME: too big for stack? */ 1534 1535 if (!drm_debug_enabled(DRM_UT_KMS)) 1536 return; 1537 1538 seq_buf_print_array(&s, intel_dp->source_rates, intel_dp->num_source_rates); 1539 drm_dbg_kms(display->drm, "source rates: %s\n", seq_buf_str(&s)); 1540 1541 seq_buf_clear(&s); 1542 seq_buf_print_array(&s, intel_dp->sink_rates, intel_dp->num_sink_rates); 1543 drm_dbg_kms(display->drm, "sink rates: %s\n", seq_buf_str(&s)); 1544 1545 seq_buf_clear(&s); 1546 seq_buf_print_array(&s, intel_dp->common_rates, intel_dp->num_common_rates); 1547 drm_dbg_kms(display->drm, "common rates: %s\n", seq_buf_str(&s)); 1548 } 1549 1550 static int forced_link_rate(struct intel_dp *intel_dp) 1551 { 1552 int len = intel_dp_common_len_rate_limit(intel_dp, intel_dp->link.force_rate); 1553 1554 if (len == 0) 1555 return intel_dp_common_rate(intel_dp, 0); 1556 1557 return intel_dp_common_rate(intel_dp, len - 1); 1558 } 1559 1560 int 1561 intel_dp_max_link_rate(struct intel_dp *intel_dp) 1562 { 1563 int len; 1564 1565 if (intel_dp->link.force_rate) 1566 return forced_link_rate(intel_dp); 1567 1568 len = intel_dp_common_len_rate_limit(intel_dp, intel_dp->link.max_rate); 1569 1570 return intel_dp_common_rate(intel_dp, len - 1); 1571 } 1572 1573 static int 1574 intel_dp_min_link_rate(struct intel_dp *intel_dp) 1575 { 1576 if (intel_dp->link.force_rate) 1577 return forced_link_rate(intel_dp); 1578 1579 return intel_dp_common_rate(intel_dp, 0); 1580 } 1581 1582 int intel_dp_rate_select(struct intel_dp *intel_dp, int rate) 1583 { 1584 struct intel_display *display = to_intel_display(intel_dp); 1585 int i = intel_dp_rate_index(intel_dp->sink_rates, 1586 intel_dp->num_sink_rates, rate); 1587 1588 if (drm_WARN_ON(display->drm, i < 0)) 1589 i = 0; 1590 1591 return i; 1592 } 1593 1594 void intel_dp_compute_rate(struct intel_dp *intel_dp, int port_clock, 1595 u8 *link_bw, u8 *rate_select) 1596 { 1597 struct intel_display *display = to_intel_display(intel_dp); 1598 1599 /* FIXME g4x can't generate an exact 2.7GHz with the 96MHz non-SSC refclk */ 1600 if (display->platform.g4x && port_clock == 268800) 1601 port_clock = 270000; 1602 1603 /* eDP 1.4 rate select method. */ 1604 if (intel_dp->use_rate_select) { 1605 *link_bw = 0; 1606 *rate_select = 1607 intel_dp_rate_select(intel_dp, port_clock); 1608 } else { 1609 *link_bw = drm_dp_link_rate_to_bw_code(port_clock); 1610 *rate_select = 0; 1611 } 1612 } 1613 1614 bool intel_dp_has_hdmi_sink(struct intel_dp *intel_dp) 1615 { 1616 struct intel_connector *connector = intel_dp->attached_connector; 1617 1618 return connector->base.display_info.is_hdmi; 1619 } 1620 1621 static bool intel_dp_source_supports_fec(struct intel_dp *intel_dp, 1622 const struct intel_crtc_state *pipe_config) 1623 { 1624 struct intel_display *display = to_intel_display(intel_dp); 1625 struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base; 1626 1627 if (DISPLAY_VER(display) >= 12) 1628 return true; 1629 1630 if (DISPLAY_VER(display) == 11 && encoder->port != PORT_A && 1631 !intel_crtc_has_type(pipe_config, INTEL_OUTPUT_DP_MST)) 1632 return true; 1633 1634 return false; 1635 } 1636 1637 bool intel_dp_supports_fec(struct intel_dp *intel_dp, 1638 const struct intel_connector *connector, 1639 const struct intel_crtc_state *pipe_config) 1640 { 1641 return intel_dp_source_supports_fec(intel_dp, pipe_config) && 1642 drm_dp_sink_supports_fec(connector->dp.fec_capability); 1643 } 1644 1645 bool intel_dp_supports_dsc(struct intel_dp *intel_dp, 1646 const struct intel_connector *connector, 1647 const struct intel_crtc_state *crtc_state) 1648 { 1649 if (!intel_dp_has_dsc(connector)) 1650 return false; 1651 1652 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_DP) && 1653 !intel_dp_supports_fec(intel_dp, connector, crtc_state)) 1654 return false; 1655 1656 return intel_dsc_source_support(crtc_state); 1657 } 1658 1659 static int intel_dp_hdmi_compute_bpc(struct intel_dp *intel_dp, 1660 const struct intel_crtc_state *crtc_state, 1661 int bpc, bool respect_downstream_limits) 1662 { 1663 int clock = crtc_state->hw.adjusted_mode.crtc_clock; 1664 1665 /* 1666 * Current bpc could already be below 8bpc due to 1667 * FDI bandwidth constraints or other limits. 1668 * HDMI minimum is 8bpc however. 1669 */ 1670 bpc = max(bpc, 8); 1671 1672 /* 1673 * We will never exceed downstream TMDS clock limits while 1674 * attempting deep color. If the user insists on forcing an 1675 * out of spec mode they will have to be satisfied with 8bpc. 1676 */ 1677 if (!respect_downstream_limits) 1678 bpc = 8; 1679 1680 for (; bpc >= 8; bpc -= 2) { 1681 if (intel_hdmi_bpc_possible(crtc_state, bpc, 1682 intel_dp_has_hdmi_sink(intel_dp)) && 1683 intel_dp_tmds_clock_valid(intel_dp, clock, bpc, crtc_state->sink_format, 1684 respect_downstream_limits) == MODE_OK) 1685 return bpc; 1686 } 1687 1688 return -EINVAL; 1689 } 1690 1691 static int intel_dp_max_bpp(struct intel_dp *intel_dp, 1692 const struct intel_crtc_state *crtc_state, 1693 bool respect_downstream_limits) 1694 { 1695 struct intel_display *display = to_intel_display(intel_dp); 1696 struct intel_connector *connector = intel_dp->attached_connector; 1697 int bpp, bpc; 1698 1699 bpc = crtc_state->pipe_bpp / 3; 1700 1701 if (intel_dp->dfp.max_bpc) 1702 bpc = min_t(int, bpc, intel_dp->dfp.max_bpc); 1703 1704 if (intel_dp->dfp.min_tmds_clock) { 1705 int max_hdmi_bpc; 1706 1707 max_hdmi_bpc = intel_dp_hdmi_compute_bpc(intel_dp, crtc_state, bpc, 1708 respect_downstream_limits); 1709 if (max_hdmi_bpc < 0) 1710 return 0; 1711 1712 bpc = min(bpc, max_hdmi_bpc); 1713 } 1714 1715 bpp = bpc * 3; 1716 if (intel_dp_is_edp(intel_dp)) { 1717 /* Get bpp from vbt only for panels that dont have bpp in edid */ 1718 if (connector->base.display_info.bpc == 0 && 1719 connector->panel.vbt.edp.bpp && 1720 connector->panel.vbt.edp.bpp < bpp) { 1721 drm_dbg_kms(display->drm, 1722 "clamping bpp for eDP panel to BIOS-provided %i\n", 1723 connector->panel.vbt.edp.bpp); 1724 bpp = connector->panel.vbt.edp.bpp; 1725 } 1726 } 1727 1728 return bpp; 1729 } 1730 1731 static bool has_seamless_m_n(struct intel_connector *connector) 1732 { 1733 struct intel_display *display = to_intel_display(connector); 1734 1735 /* 1736 * Seamless M/N reprogramming only implemented 1737 * for BDW+ double buffered M/N registers so far. 1738 */ 1739 return HAS_DOUBLE_BUFFERED_M_N(display) && 1740 intel_panel_drrs_type(connector) == DRRS_TYPE_SEAMLESS; 1741 } 1742 1743 static int intel_dp_mode_clock(const struct intel_crtc_state *crtc_state, 1744 const struct drm_connector_state *conn_state) 1745 { 1746 struct intel_connector *connector = to_intel_connector(conn_state->connector); 1747 const struct drm_display_mode *adjusted_mode = &crtc_state->hw.adjusted_mode; 1748 1749 /* FIXME a bit of a mess wrt clock vs. crtc_clock */ 1750 if (has_seamless_m_n(connector)) 1751 return intel_panel_highest_mode(connector, adjusted_mode)->clock; 1752 else 1753 return adjusted_mode->crtc_clock; 1754 } 1755 1756 /* Optimize link config in order: max bpp, min clock, min lanes */ 1757 static int 1758 intel_dp_compute_link_config_wide(struct intel_dp *intel_dp, 1759 struct intel_crtc_state *pipe_config, 1760 const struct drm_connector_state *conn_state, 1761 const struct link_config_limits *limits) 1762 { 1763 int bpp, i, lane_count, clock = intel_dp_mode_clock(pipe_config, conn_state); 1764 int link_rate, link_avail; 1765 1766 for (bpp = fxp_q4_to_int(limits->link.max_bpp_x16); 1767 bpp >= fxp_q4_to_int(limits->link.min_bpp_x16); 1768 bpp -= 2 * 3) { 1769 int link_bpp_x16 = 1770 intel_dp_output_format_link_bpp_x16(pipe_config->output_format, bpp); 1771 1772 for (i = 0; i < intel_dp->num_common_rates; i++) { 1773 link_rate = intel_dp_common_rate(intel_dp, i); 1774 if (link_rate < limits->min_rate || 1775 link_rate > limits->max_rate) 1776 continue; 1777 1778 for (lane_count = limits->min_lane_count; 1779 lane_count <= limits->max_lane_count; 1780 lane_count <<= 1) { 1781 const struct drm_display_mode *adjusted_mode = 1782 &pipe_config->hw.adjusted_mode; 1783 int mode_rate = 1784 intel_dp_link_required(link_rate, lane_count, 1785 clock, adjusted_mode->hdisplay, 1786 link_bpp_x16, 0); 1787 1788 link_avail = intel_dp_max_link_data_rate(intel_dp, 1789 link_rate, 1790 lane_count); 1791 1792 if (mode_rate <= link_avail) { 1793 pipe_config->lane_count = lane_count; 1794 pipe_config->pipe_bpp = bpp; 1795 pipe_config->port_clock = link_rate; 1796 1797 return 0; 1798 } 1799 } 1800 } 1801 } 1802 1803 return -EINVAL; 1804 } 1805 1806 int intel_dp_dsc_max_src_input_bpc(struct intel_display *display) 1807 { 1808 /* Max DSC Input BPC for ICL is 10 and for TGL+ is 12 */ 1809 if (DISPLAY_VER(display) >= 12) 1810 return 12; 1811 if (DISPLAY_VER(display) == 11) 1812 return 10; 1813 1814 return intel_dp_dsc_min_src_input_bpc(); 1815 } 1816 1817 static int align_min_sink_dsc_input_bpp(const struct intel_connector *connector, 1818 int min_pipe_bpp) 1819 { 1820 u8 dsc_bpc[3]; 1821 int num_bpc; 1822 int i; 1823 1824 num_bpc = drm_dp_dsc_sink_supported_input_bpcs(connector->dp.dsc_dpcd, 1825 dsc_bpc); 1826 for (i = num_bpc - 1; i >= 0; i--) { 1827 if (dsc_bpc[i] * 3 >= min_pipe_bpp) 1828 return dsc_bpc[i] * 3; 1829 } 1830 1831 return 0; 1832 } 1833 1834 static int align_max_sink_dsc_input_bpp(const struct intel_connector *connector, 1835 int max_pipe_bpp) 1836 { 1837 u8 dsc_bpc[3]; 1838 int num_bpc; 1839 int i; 1840 1841 num_bpc = drm_dp_dsc_sink_supported_input_bpcs(connector->dp.dsc_dpcd, 1842 dsc_bpc); 1843 for (i = 0; i < num_bpc; i++) { 1844 if (dsc_bpc[i] * 3 <= max_pipe_bpp) 1845 return dsc_bpc[i] * 3; 1846 } 1847 1848 return 0; 1849 } 1850 1851 int intel_dp_dsc_compute_max_bpp(const struct intel_connector *connector, 1852 u8 max_req_bpc) 1853 { 1854 struct intel_display *display = to_intel_display(connector); 1855 int dsc_max_bpc; 1856 1857 dsc_max_bpc = intel_dp_dsc_max_src_input_bpc(display); 1858 1859 if (!dsc_max_bpc) 1860 return dsc_max_bpc; 1861 1862 dsc_max_bpc = min(dsc_max_bpc, max_req_bpc); 1863 1864 return align_max_sink_dsc_input_bpp(connector, dsc_max_bpc * 3); 1865 } 1866 1867 static int intel_dp_source_dsc_version_minor(struct intel_display *display) 1868 { 1869 return DISPLAY_VER(display) >= 14 ? 2 : 1; 1870 } 1871 1872 static int intel_dp_sink_dsc_version_minor(const u8 dsc_dpcd[DP_DSC_RECEIVER_CAP_SIZE]) 1873 { 1874 return (dsc_dpcd[DP_DSC_REV - DP_DSC_SUPPORT] & DP_DSC_MINOR_MASK) >> 1875 DP_DSC_MINOR_SHIFT; 1876 } 1877 1878 static int intel_dp_get_slice_height(int vactive) 1879 { 1880 int slice_height; 1881 1882 /* 1883 * VDSC 1.2a spec in Section 3.8 Options for Slices implies that 108 1884 * lines is an optimal slice height, but any size can be used as long as 1885 * vertical active integer multiple and maximum vertical slice count 1886 * requirements are met. 1887 */ 1888 for (slice_height = 108; slice_height <= vactive; slice_height += 2) 1889 if (vactive % slice_height == 0) 1890 return slice_height; 1891 1892 /* 1893 * Highly unlikely we reach here as most of the resolutions will end up 1894 * finding appropriate slice_height in above loop but returning 1895 * slice_height as 2 here as it should work with all resolutions. 1896 */ 1897 return 2; 1898 } 1899 1900 static int intel_dp_dsc_compute_params(const struct intel_connector *connector, 1901 struct intel_crtc_state *crtc_state) 1902 { 1903 struct intel_display *display = to_intel_display(connector); 1904 struct drm_dsc_config *vdsc_cfg = &crtc_state->dsc.config; 1905 int ret; 1906 1907 /* 1908 * RC_MODEL_SIZE is currently a constant across all configurations. 1909 * 1910 * FIXME: Look into using sink defined DPCD DP_DSC_RC_BUF_BLK_SIZE and 1911 * DP_DSC_RC_BUF_SIZE for this. 1912 */ 1913 vdsc_cfg->rc_model_size = DSC_RC_MODEL_SIZE_CONST; 1914 vdsc_cfg->pic_height = crtc_state->hw.adjusted_mode.crtc_vdisplay; 1915 1916 vdsc_cfg->slice_height = intel_dp_get_slice_height(vdsc_cfg->pic_height); 1917 1918 ret = intel_dsc_compute_params(crtc_state); 1919 if (ret) 1920 return ret; 1921 1922 vdsc_cfg->dsc_version_major = 1923 (connector->dp.dsc_dpcd[DP_DSC_REV - DP_DSC_SUPPORT] & 1924 DP_DSC_MAJOR_MASK) >> DP_DSC_MAJOR_SHIFT; 1925 vdsc_cfg->dsc_version_minor = 1926 min(intel_dp_source_dsc_version_minor(display), 1927 intel_dp_sink_dsc_version_minor(connector->dp.dsc_dpcd)); 1928 if (vdsc_cfg->convert_rgb) 1929 vdsc_cfg->convert_rgb = 1930 connector->dp.dsc_dpcd[DP_DSC_DEC_COLOR_FORMAT_CAP - DP_DSC_SUPPORT] & 1931 DP_DSC_RGB; 1932 1933 vdsc_cfg->line_buf_depth = min(INTEL_DP_DSC_MAX_LINE_BUF_DEPTH, 1934 drm_dp_dsc_sink_line_buf_depth(connector->dp.dsc_dpcd)); 1935 if (!vdsc_cfg->line_buf_depth) { 1936 drm_dbg_kms(display->drm, 1937 "DSC Sink Line Buffer Depth invalid\n"); 1938 return -EINVAL; 1939 } 1940 1941 vdsc_cfg->block_pred_enable = 1942 connector->dp.dsc_dpcd[DP_DSC_BLK_PREDICTION_SUPPORT - DP_DSC_SUPPORT] & 1943 DP_DSC_BLK_PREDICTION_IS_SUPPORTED; 1944 1945 return drm_dsc_compute_rc_parameters(vdsc_cfg); 1946 } 1947 1948 static bool intel_dp_dsc_supports_format(const struct intel_connector *connector, 1949 enum intel_output_format output_format) 1950 { 1951 struct intel_display *display = to_intel_display(connector); 1952 u8 sink_dsc_format; 1953 1954 switch (output_format) { 1955 case INTEL_OUTPUT_FORMAT_RGB: 1956 sink_dsc_format = DP_DSC_RGB; 1957 break; 1958 case INTEL_OUTPUT_FORMAT_YCBCR444: 1959 sink_dsc_format = DP_DSC_YCbCr444; 1960 break; 1961 case INTEL_OUTPUT_FORMAT_YCBCR420: 1962 if (min(intel_dp_source_dsc_version_minor(display), 1963 intel_dp_sink_dsc_version_minor(connector->dp.dsc_dpcd)) < 2) 1964 return false; 1965 sink_dsc_format = DP_DSC_YCbCr420_Native; 1966 break; 1967 default: 1968 return false; 1969 } 1970 1971 return drm_dp_dsc_sink_supports_format(connector->dp.dsc_dpcd, sink_dsc_format); 1972 } 1973 1974 static bool is_bw_sufficient_for_dsc_config(struct intel_dp *intel_dp, 1975 int link_clock, int lane_count, 1976 int mode_clock, int mode_hdisplay, 1977 int dsc_slice_count, int link_bpp_x16, 1978 unsigned long bw_overhead_flags) 1979 { 1980 int available_bw; 1981 int required_bw; 1982 1983 available_bw = intel_dp_max_link_data_rate(intel_dp, link_clock, lane_count); 1984 required_bw = intel_dp_link_required(link_clock, lane_count, 1985 mode_clock, mode_hdisplay, 1986 link_bpp_x16, bw_overhead_flags); 1987 1988 return available_bw >= required_bw; 1989 } 1990 1991 static int dsc_compute_link_config(struct intel_dp *intel_dp, 1992 struct intel_crtc_state *pipe_config, 1993 struct drm_connector_state *conn_state, 1994 const struct link_config_limits *limits, 1995 int dsc_bpp_x16) 1996 { 1997 const struct drm_display_mode *adjusted_mode = &pipe_config->hw.adjusted_mode; 1998 int link_rate, lane_count; 1999 int i; 2000 2001 for (i = 0; i < intel_dp->num_common_rates; i++) { 2002 link_rate = intel_dp_common_rate(intel_dp, i); 2003 if (link_rate < limits->min_rate || link_rate > limits->max_rate) 2004 continue; 2005 2006 for (lane_count = limits->min_lane_count; 2007 lane_count <= limits->max_lane_count; 2008 lane_count <<= 1) { 2009 2010 /* 2011 * FIXME: intel_dp_mtp_tu_compute_config() requires 2012 * ->lane_count and ->port_clock set before we know 2013 * they'll work. If we end up failing altogether, 2014 * they'll remain in crtc state. This shouldn't matter, 2015 * as we'd then bail out from compute config, but it's 2016 * just ugly. 2017 */ 2018 pipe_config->lane_count = lane_count; 2019 pipe_config->port_clock = link_rate; 2020 2021 if (drm_dp_is_uhbr_rate(link_rate)) { 2022 int ret; 2023 2024 ret = intel_dp_mtp_tu_compute_config(intel_dp, 2025 pipe_config, 2026 conn_state, 2027 dsc_bpp_x16, 2028 dsc_bpp_x16, 2029 0, true); 2030 if (ret) 2031 continue; 2032 } else { 2033 unsigned long bw_overhead_flags = 2034 pipe_config->fec_enable ? DRM_DP_BW_OVERHEAD_FEC : 0; 2035 2036 if (!is_bw_sufficient_for_dsc_config(intel_dp, 2037 link_rate, lane_count, 2038 adjusted_mode->crtc_clock, 2039 adjusted_mode->hdisplay, 2040 pipe_config->dsc.slice_count, 2041 dsc_bpp_x16, 2042 bw_overhead_flags)) 2043 continue; 2044 } 2045 2046 return 0; 2047 } 2048 } 2049 2050 return -EINVAL; 2051 } 2052 2053 static 2054 u16 intel_dp_dsc_max_sink_compressed_bppx16(const struct intel_connector *connector, 2055 enum intel_output_format output_format, 2056 int bpc) 2057 { 2058 u16 max_bppx16 = drm_edp_dsc_sink_output_bpp(connector->dp.dsc_dpcd); 2059 2060 if (max_bppx16) 2061 return max_bppx16; 2062 /* 2063 * If support not given in DPCD 67h, 68h use the Maximum Allowed bit rate 2064 * values as given in spec Table 2-157 DP v2.0 2065 */ 2066 switch (output_format) { 2067 case INTEL_OUTPUT_FORMAT_RGB: 2068 case INTEL_OUTPUT_FORMAT_YCBCR444: 2069 return (3 * bpc) << 4; 2070 case INTEL_OUTPUT_FORMAT_YCBCR420: 2071 return (3 * (bpc / 2)) << 4; 2072 default: 2073 MISSING_CASE(output_format); 2074 break; 2075 } 2076 2077 return 0; 2078 } 2079 2080 static int intel_dp_dsc_sink_min_compressed_bpp(enum intel_output_format output_format) 2081 { 2082 /* From Mandatory bit rate range Support Table 2-157 (DP v2.0) */ 2083 switch (output_format) { 2084 case INTEL_OUTPUT_FORMAT_RGB: 2085 case INTEL_OUTPUT_FORMAT_YCBCR444: 2086 return 8; 2087 case INTEL_OUTPUT_FORMAT_YCBCR420: 2088 return 6; 2089 default: 2090 MISSING_CASE(output_format); 2091 break; 2092 } 2093 2094 return 0; 2095 } 2096 2097 static int intel_dp_dsc_sink_max_compressed_bpp(const struct intel_connector *connector, 2098 enum intel_output_format output_format, 2099 int bpc) 2100 { 2101 return intel_dp_dsc_max_sink_compressed_bppx16(connector, 2102 output_format, bpc) >> 4; 2103 } 2104 2105 int intel_dp_dsc_min_src_compressed_bpp(void) 2106 { 2107 /* Min Compressed bpp supported by source is 8 */ 2108 return 8; 2109 } 2110 2111 static int dsc_src_max_compressed_bpp(struct intel_dp *intel_dp) 2112 { 2113 struct intel_display *display = to_intel_display(intel_dp); 2114 2115 /* 2116 * Forcing DSC and using the platform's max compressed bpp is seen to cause 2117 * underruns. Since DSC isn't needed in these cases, limit the 2118 * max compressed bpp to 18, which is a safe value across platforms with different 2119 * pipe bpps. 2120 */ 2121 if (intel_dp->force_dsc_en) 2122 return 18; 2123 2124 /* 2125 * Max Compressed bpp for Gen 13+ is 27bpp. 2126 * For earlier platform is 23bpp. (Bspec:49259). 2127 */ 2128 if (DISPLAY_VER(display) < 13) 2129 return 23; 2130 else 2131 return 27; 2132 } 2133 2134 /* 2135 * Note: for pre-13 display you still need to check the validity of each step. 2136 */ 2137 int intel_dp_dsc_bpp_step_x16(const struct intel_connector *connector) 2138 { 2139 struct intel_display *display = to_intel_display(connector); 2140 u8 incr = drm_dp_dsc_sink_bpp_incr(connector->dp.dsc_dpcd); 2141 2142 if (DISPLAY_VER(display) < 14 || !incr) 2143 return fxp_q4_from_int(1); 2144 2145 if (connector->mst.dp && 2146 !connector->link.force_bpp_x16 && !connector->mst.dp->force_dsc_fractional_bpp_en) 2147 return fxp_q4_from_int(1); 2148 2149 /* fxp q4 */ 2150 return fxp_q4_from_int(1) / incr; 2151 } 2152 2153 /* 2154 * Note: for bpp_x16 to be valid it must be also within the source/sink's 2155 * min..max bpp capability range. 2156 */ 2157 bool intel_dp_dsc_valid_compressed_bpp(struct intel_dp *intel_dp, int bpp_x16) 2158 { 2159 struct intel_display *display = to_intel_display(intel_dp); 2160 2161 if (DISPLAY_VER(display) >= 13) { 2162 if (intel_dp->force_dsc_fractional_bpp_en && !fxp_q4_to_frac(bpp_x16)) 2163 return false; 2164 2165 return true; 2166 } 2167 2168 if (fxp_q4_to_frac(bpp_x16)) 2169 return false; 2170 2171 return align_max_vesa_compressed_bpp_x16(bpp_x16) == bpp_x16; 2172 } 2173 2174 static int align_min_compressed_bpp_x16(const struct intel_connector *connector, int min_bpp_x16) 2175 { 2176 struct intel_display *display = to_intel_display(connector); 2177 2178 if (DISPLAY_VER(display) >= 13) { 2179 int bpp_step_x16 = intel_dp_dsc_bpp_step_x16(connector); 2180 2181 drm_WARN_ON(display->drm, !is_power_of_2(bpp_step_x16)); 2182 2183 return round_up(min_bpp_x16, bpp_step_x16); 2184 } else { 2185 return align_min_vesa_compressed_bpp_x16(min_bpp_x16); 2186 } 2187 } 2188 2189 static int align_max_compressed_bpp_x16(const struct intel_connector *connector, 2190 enum intel_output_format output_format, 2191 int pipe_bpp, int max_bpp_x16) 2192 { 2193 struct intel_display *display = to_intel_display(connector); 2194 int link_bpp_x16 = intel_dp_output_format_link_bpp_x16(output_format, pipe_bpp); 2195 int bpp_step_x16 = intel_dp_dsc_bpp_step_x16(connector); 2196 2197 max_bpp_x16 = min(max_bpp_x16, link_bpp_x16 - bpp_step_x16); 2198 2199 if (DISPLAY_VER(display) >= 13) { 2200 drm_WARN_ON(display->drm, !is_power_of_2(bpp_step_x16)); 2201 2202 return round_down(max_bpp_x16, bpp_step_x16); 2203 } else { 2204 return align_max_vesa_compressed_bpp_x16(max_bpp_x16); 2205 } 2206 } 2207 2208 /* 2209 * Find the max compressed BPP we can find a link configuration for. The BPPs to 2210 * try depend on the source (platform) and sink. 2211 */ 2212 static int dsc_compute_compressed_bpp(struct intel_dp *intel_dp, 2213 struct intel_crtc_state *pipe_config, 2214 struct drm_connector_state *conn_state, 2215 const struct link_config_limits *limits, 2216 int pipe_bpp) 2217 { 2218 struct intel_display *display = to_intel_display(intel_dp); 2219 const struct intel_connector *connector = to_intel_connector(conn_state->connector); 2220 int min_bpp_x16, max_bpp_x16, bpp_step_x16; 2221 int bpp_x16; 2222 int ret; 2223 2224 min_bpp_x16 = limits->link.min_bpp_x16; 2225 max_bpp_x16 = limits->link.max_bpp_x16; 2226 bpp_step_x16 = intel_dp_dsc_bpp_step_x16(connector); 2227 2228 max_bpp_x16 = align_max_compressed_bpp_x16(connector, pipe_config->output_format, 2229 pipe_bpp, max_bpp_x16); 2230 if (intel_dp_is_edp(intel_dp)) { 2231 pipe_config->port_clock = limits->max_rate; 2232 pipe_config->lane_count = limits->max_lane_count; 2233 2234 pipe_config->dsc.compressed_bpp_x16 = max_bpp_x16; 2235 2236 return 0; 2237 } 2238 2239 for (bpp_x16 = max_bpp_x16; bpp_x16 >= min_bpp_x16; bpp_x16 -= bpp_step_x16) { 2240 if (!intel_dp_dsc_valid_compressed_bpp(intel_dp, bpp_x16)) 2241 continue; 2242 2243 ret = dsc_compute_link_config(intel_dp, 2244 pipe_config, 2245 conn_state, 2246 limits, 2247 bpp_x16); 2248 if (ret == 0) { 2249 pipe_config->dsc.compressed_bpp_x16 = bpp_x16; 2250 if (intel_dp->force_dsc_fractional_bpp_en && 2251 fxp_q4_to_frac(bpp_x16)) 2252 drm_dbg_kms(display->drm, 2253 "Forcing DSC fractional bpp\n"); 2254 2255 return 0; 2256 } 2257 } 2258 2259 return -EINVAL; 2260 } 2261 2262 int intel_dp_dsc_min_src_input_bpc(void) 2263 { 2264 /* Min DSC Input BPC for ICL+ is 8 */ 2265 return 8; 2266 } 2267 2268 static 2269 bool is_dsc_pipe_bpp_sufficient(const struct link_config_limits *limits, 2270 int pipe_bpp) 2271 { 2272 return pipe_bpp >= limits->pipe.min_bpp && 2273 pipe_bpp <= limits->pipe.max_bpp; 2274 } 2275 2276 static 2277 int intel_dp_force_dsc_pipe_bpp(struct intel_dp *intel_dp, 2278 const struct link_config_limits *limits) 2279 { 2280 struct intel_display *display = to_intel_display(intel_dp); 2281 int forced_bpp; 2282 2283 if (!intel_dp->force_dsc_bpc) 2284 return 0; 2285 2286 forced_bpp = intel_dp->force_dsc_bpc * 3; 2287 2288 if (is_dsc_pipe_bpp_sufficient(limits, forced_bpp)) { 2289 drm_dbg_kms(display->drm, "Input DSC BPC forced to %d\n", 2290 intel_dp->force_dsc_bpc); 2291 return forced_bpp; 2292 } 2293 2294 drm_dbg_kms(display->drm, 2295 "Cannot force DSC BPC:%d, due to DSC BPC limits\n", 2296 intel_dp->force_dsc_bpc); 2297 2298 return 0; 2299 } 2300 2301 static int intel_dp_dsc_compute_pipe_bpp(struct intel_dp *intel_dp, 2302 struct intel_crtc_state *pipe_config, 2303 struct drm_connector_state *conn_state, 2304 const struct link_config_limits *limits) 2305 { 2306 int forced_bpp, pipe_bpp; 2307 int ret; 2308 2309 forced_bpp = intel_dp_force_dsc_pipe_bpp(intel_dp, limits); 2310 if (forced_bpp) 2311 pipe_bpp = forced_bpp; 2312 else 2313 pipe_bpp = limits->pipe.max_bpp; 2314 2315 ret = dsc_compute_compressed_bpp(intel_dp, pipe_config, conn_state, 2316 limits, pipe_bpp); 2317 if (ret) 2318 return -EINVAL; 2319 2320 pipe_config->pipe_bpp = pipe_bpp; 2321 2322 return 0; 2323 } 2324 2325 /* 2326 * Return whether FEC must be enabled for 8b10b SST or MST links. On 128b132b 2327 * links FEC is always enabled implicitly by the HW, so this function returns 2328 * false for that case. 2329 */ 2330 bool intel_dp_needs_8b10b_fec(const struct intel_crtc_state *crtc_state, 2331 bool dsc_enabled_on_crtc) 2332 { 2333 if (intel_dp_is_uhbr(crtc_state)) 2334 return false; 2335 2336 /* 2337 * Though eDP v1.5 supports FEC with DSC, unlike DP, it is optional. 2338 * Since, FEC is a bandwidth overhead, continue to not enable it for 2339 * eDP. Until, there is a good reason to do so. 2340 */ 2341 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_EDP)) 2342 return false; 2343 2344 return dsc_enabled_on_crtc || intel_dsc_enabled_on_link(crtc_state); 2345 } 2346 2347 int intel_dp_dsc_compute_config(struct intel_dp *intel_dp, 2348 struct intel_crtc_state *pipe_config, 2349 struct drm_connector_state *conn_state, 2350 const struct link_config_limits *limits, 2351 int timeslots) 2352 { 2353 struct intel_display *display = to_intel_display(intel_dp); 2354 const struct intel_connector *connector = 2355 to_intel_connector(conn_state->connector); 2356 const struct drm_display_mode *adjusted_mode = 2357 &pipe_config->hw.adjusted_mode; 2358 int num_joined_pipes = intel_crtc_num_joined_pipes(pipe_config); 2359 bool is_mst = intel_crtc_has_type(pipe_config, INTEL_OUTPUT_DP_MST); 2360 int ret; 2361 2362 /* 2363 * FIXME: set the FEC enabled state once pipe_config->port_clock is 2364 * already known, so the UHBR/non-UHBR mode can be determined. 2365 */ 2366 pipe_config->fec_enable = intel_dp_needs_8b10b_fec(pipe_config, true); 2367 2368 if (!intel_dp_dsc_supports_format(connector, pipe_config->output_format)) 2369 return -EINVAL; 2370 2371 /* 2372 * Link parameters, pipe bpp and compressed bpp have already been 2373 * figured out for DP MST DSC. 2374 */ 2375 if (!is_mst) { 2376 ret = intel_dp_dsc_compute_pipe_bpp(intel_dp, pipe_config, 2377 conn_state, limits); 2378 if (ret) { 2379 drm_dbg_kms(display->drm, 2380 "No Valid pipe bpp for given mode ret = %d\n", ret); 2381 return ret; 2382 } 2383 } 2384 2385 /* Calculate Slice count */ 2386 if (intel_dp_is_edp(intel_dp)) { 2387 pipe_config->dsc.slice_count = 2388 drm_dp_dsc_sink_max_slice_count(connector->dp.dsc_dpcd, 2389 true); 2390 if (!pipe_config->dsc.slice_count) { 2391 drm_dbg_kms(display->drm, 2392 "Unsupported Slice Count %d\n", 2393 pipe_config->dsc.slice_count); 2394 return -EINVAL; 2395 } 2396 } else { 2397 u8 dsc_dp_slice_count; 2398 2399 dsc_dp_slice_count = 2400 intel_dp_dsc_get_slice_count(connector, 2401 adjusted_mode->crtc_clock, 2402 adjusted_mode->crtc_hdisplay, 2403 num_joined_pipes); 2404 if (!dsc_dp_slice_count) { 2405 drm_dbg_kms(display->drm, 2406 "Compressed Slice Count not supported\n"); 2407 return -EINVAL; 2408 } 2409 2410 pipe_config->dsc.slice_count = dsc_dp_slice_count; 2411 } 2412 /* 2413 * VDSC engine operates at 1 Pixel per clock, so if peak pixel rate 2414 * is greater than the maximum Cdclock and if slice count is even 2415 * then we need to use 2 VDSC instances. 2416 * In case of Ultrajoiner along with 12 slices we need to use 3 2417 * VDSC instances. 2418 */ 2419 if (pipe_config->joiner_pipes && num_joined_pipes == 4 && 2420 pipe_config->dsc.slice_count == 12) 2421 pipe_config->dsc.num_streams = 3; 2422 else if (pipe_config->joiner_pipes || pipe_config->dsc.slice_count > 1) 2423 pipe_config->dsc.num_streams = 2; 2424 else 2425 pipe_config->dsc.num_streams = 1; 2426 2427 ret = intel_dp_dsc_compute_params(connector, pipe_config); 2428 if (ret < 0) { 2429 drm_dbg_kms(display->drm, 2430 "Cannot compute valid DSC parameters for Input Bpp = %d" 2431 "Compressed BPP = " FXP_Q4_FMT "\n", 2432 pipe_config->pipe_bpp, 2433 FXP_Q4_ARGS(pipe_config->dsc.compressed_bpp_x16)); 2434 return ret; 2435 } 2436 2437 intel_dsc_enable_on_crtc(pipe_config); 2438 2439 drm_dbg_kms(display->drm, "DP DSC computed with Input Bpp = %d " 2440 "Compressed Bpp = " FXP_Q4_FMT " Slice Count = %d\n", 2441 pipe_config->pipe_bpp, 2442 FXP_Q4_ARGS(pipe_config->dsc.compressed_bpp_x16), 2443 pipe_config->dsc.slice_count); 2444 2445 return 0; 2446 } 2447 2448 static int 2449 dsc_throughput_quirk_max_bpp_x16(const struct intel_connector *connector, 2450 int mode_clock) 2451 { 2452 if (!connector->dp.dsc_throughput_quirk) 2453 return INT_MAX; 2454 2455 /* 2456 * Synaptics Panamera branch devices have a problem decompressing a 2457 * stream with a compressed link-bpp higher than 12, if the pixel 2458 * clock is higher than ~50 % of the maximum overall throughput 2459 * reported by the branch device. Work around this by limiting the 2460 * maximum link bpp for such pixel clocks. 2461 * 2462 * TODO: Use the throughput value specific to the actual RGB/YUV 2463 * format of the output, after determining the pixel clock limit for 2464 * YUV modes. For now use the smaller of the throughput values, which 2465 * may result in limiting the link-bpp value already at a lower than 2466 * required mode clock in case of native YUV422/420 output formats. 2467 * The RGB/YUV444 throughput value should be always either equal or 2468 * smaller than the YUV422/420 value, but let's not depend on this 2469 * assumption. 2470 */ 2471 if (mode_clock < 2472 min(connector->dp.dsc_branch_caps.overall_throughput.rgb_yuv444, 2473 connector->dp.dsc_branch_caps.overall_throughput.yuv422_420) / 2) 2474 return INT_MAX; 2475 2476 return fxp_q4_from_int(12); 2477 } 2478 2479 static int compute_min_compressed_bpp_x16(struct intel_connector *connector, 2480 enum intel_output_format output_format) 2481 { 2482 int dsc_src_min_bpp, dsc_sink_min_bpp, dsc_min_bpp; 2483 int min_bpp_x16; 2484 2485 dsc_src_min_bpp = intel_dp_dsc_min_src_compressed_bpp(); 2486 dsc_sink_min_bpp = intel_dp_dsc_sink_min_compressed_bpp(output_format); 2487 dsc_min_bpp = max(dsc_src_min_bpp, dsc_sink_min_bpp); 2488 2489 min_bpp_x16 = fxp_q4_from_int(dsc_min_bpp); 2490 2491 min_bpp_x16 = align_min_compressed_bpp_x16(connector, min_bpp_x16); 2492 2493 return min_bpp_x16; 2494 } 2495 2496 static int compute_max_compressed_bpp_x16(struct intel_connector *connector, 2497 int mode_clock, int mode_hdisplay, 2498 int num_joined_pipes, 2499 enum intel_output_format output_format, 2500 int pipe_max_bpp, int max_link_bpp_x16) 2501 { 2502 struct intel_display *display = to_intel_display(connector); 2503 struct intel_dp *intel_dp = intel_attached_dp(connector); 2504 int dsc_src_max_bpp, dsc_sink_max_bpp, dsc_max_bpp; 2505 int throughput_max_bpp_x16; 2506 int joiner_max_bpp; 2507 2508 dsc_src_max_bpp = dsc_src_max_compressed_bpp(intel_dp); 2509 joiner_max_bpp = get_max_compressed_bpp_with_joiner(display, 2510 mode_clock, 2511 mode_hdisplay, 2512 num_joined_pipes); 2513 dsc_sink_max_bpp = intel_dp_dsc_sink_max_compressed_bpp(connector, 2514 output_format, 2515 pipe_max_bpp / 3); 2516 dsc_max_bpp = min(dsc_sink_max_bpp, dsc_src_max_bpp); 2517 dsc_max_bpp = min(dsc_max_bpp, joiner_max_bpp); 2518 2519 max_link_bpp_x16 = min(max_link_bpp_x16, fxp_q4_from_int(dsc_max_bpp)); 2520 2521 throughput_max_bpp_x16 = dsc_throughput_quirk_max_bpp_x16(connector, 2522 mode_clock); 2523 if (throughput_max_bpp_x16 < max_link_bpp_x16) { 2524 max_link_bpp_x16 = throughput_max_bpp_x16; 2525 2526 drm_dbg_kms(display->drm, 2527 "[CONNECTOR:%d:%s] Decreasing link max bpp to " FXP_Q4_FMT " due to DSC throughput quirk\n", 2528 connector->base.base.id, connector->base.name, 2529 FXP_Q4_ARGS(max_link_bpp_x16)); 2530 } 2531 2532 max_link_bpp_x16 = align_max_compressed_bpp_x16(connector, output_format, 2533 pipe_max_bpp, max_link_bpp_x16); 2534 2535 return max_link_bpp_x16; 2536 } 2537 2538 bool intel_dp_mode_valid_with_dsc(struct intel_connector *connector, 2539 int link_clock, int lane_count, 2540 int mode_clock, int mode_hdisplay, 2541 int num_joined_pipes, 2542 enum intel_output_format output_format, 2543 int pipe_bpp, unsigned long bw_overhead_flags) 2544 { 2545 struct intel_dp *intel_dp = intel_attached_dp(connector); 2546 int min_bpp_x16 = compute_min_compressed_bpp_x16(connector, output_format); 2547 int max_bpp_x16 = compute_max_compressed_bpp_x16(connector, 2548 mode_clock, mode_hdisplay, 2549 num_joined_pipes, 2550 output_format, 2551 pipe_bpp, INT_MAX); 2552 int dsc_slice_count = intel_dp_dsc_get_slice_count(connector, 2553 mode_clock, 2554 mode_hdisplay, 2555 num_joined_pipes); 2556 2557 if (min_bpp_x16 <= 0 || min_bpp_x16 > max_bpp_x16) 2558 return false; 2559 2560 if (dsc_slice_count == 0) 2561 return false; 2562 2563 return is_bw_sufficient_for_dsc_config(intel_dp, 2564 link_clock, lane_count, 2565 mode_clock, mode_hdisplay, 2566 dsc_slice_count, min_bpp_x16, 2567 bw_overhead_flags); 2568 } 2569 2570 /* 2571 * Calculate the output link min, max bpp values in limits based on the pipe bpp 2572 * range, crtc_state and dsc mode. Return true on success. 2573 */ 2574 static bool 2575 intel_dp_compute_config_link_bpp_limits(struct intel_connector *connector, 2576 const struct intel_crtc_state *crtc_state, 2577 bool dsc, 2578 struct link_config_limits *limits) 2579 { 2580 struct intel_display *display = to_intel_display(connector); 2581 struct intel_dp *intel_dp = intel_attached_dp(connector); 2582 const struct drm_display_mode *adjusted_mode = 2583 &crtc_state->hw.adjusted_mode; 2584 const struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc); 2585 const struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base; 2586 int max_link_bpp_x16; 2587 2588 max_link_bpp_x16 = min(crtc_state->max_link_bpp_x16, 2589 fxp_q4_from_int(limits->pipe.max_bpp)); 2590 2591 if (!dsc) { 2592 max_link_bpp_x16 = rounddown(max_link_bpp_x16, fxp_q4_from_int(2 * 3)); 2593 2594 if (max_link_bpp_x16 < fxp_q4_from_int(limits->pipe.min_bpp)) 2595 return false; 2596 2597 limits->link.min_bpp_x16 = fxp_q4_from_int(limits->pipe.min_bpp); 2598 } else { 2599 limits->link.min_bpp_x16 = 2600 compute_min_compressed_bpp_x16(connector, crtc_state->output_format); 2601 2602 max_link_bpp_x16 = 2603 compute_max_compressed_bpp_x16(connector, 2604 adjusted_mode->crtc_clock, 2605 adjusted_mode->hdisplay, 2606 intel_crtc_num_joined_pipes(crtc_state), 2607 crtc_state->output_format, 2608 limits->pipe.max_bpp, 2609 max_link_bpp_x16); 2610 } 2611 2612 limits->link.max_bpp_x16 = max_link_bpp_x16; 2613 2614 drm_dbg_kms(display->drm, 2615 "[ENCODER:%d:%s][CRTC:%d:%s] DP link limits: pixel clock %d kHz DSC %s max lanes %d max rate %d max pipe_bpp %d min link_bpp " FXP_Q4_FMT " max link_bpp " FXP_Q4_FMT "\n", 2616 encoder->base.base.id, encoder->base.name, 2617 crtc->base.base.id, crtc->base.name, 2618 adjusted_mode->crtc_clock, 2619 str_on_off(dsc), 2620 limits->max_lane_count, 2621 limits->max_rate, 2622 limits->pipe.max_bpp, 2623 FXP_Q4_ARGS(limits->link.min_bpp_x16), 2624 FXP_Q4_ARGS(limits->link.max_bpp_x16)); 2625 2626 if (limits->link.min_bpp_x16 <= 0 || 2627 limits->link.min_bpp_x16 > limits->link.max_bpp_x16) 2628 return false; 2629 2630 return true; 2631 } 2632 2633 static bool 2634 intel_dp_dsc_compute_pipe_bpp_limits(struct intel_connector *connector, 2635 struct link_config_limits *limits) 2636 { 2637 struct intel_display *display = to_intel_display(connector); 2638 const struct link_config_limits orig_limits = *limits; 2639 int dsc_min_bpc = intel_dp_dsc_min_src_input_bpc(); 2640 int dsc_max_bpc = intel_dp_dsc_max_src_input_bpc(display); 2641 2642 limits->pipe.min_bpp = max(limits->pipe.min_bpp, dsc_min_bpc * 3); 2643 limits->pipe.min_bpp = align_min_sink_dsc_input_bpp(connector, limits->pipe.min_bpp); 2644 2645 limits->pipe.max_bpp = min(limits->pipe.max_bpp, dsc_max_bpc * 3); 2646 limits->pipe.max_bpp = align_max_sink_dsc_input_bpp(connector, limits->pipe.max_bpp); 2647 2648 if (limits->pipe.min_bpp <= 0 || 2649 limits->pipe.min_bpp > limits->pipe.max_bpp) { 2650 drm_dbg_kms(display->drm, 2651 "[CONNECTOR:%d:%s] Invalid DSC src/sink input BPP (src:%d-%d pipe:%d-%d sink-align:%d-%d)\n", 2652 connector->base.base.id, connector->base.name, 2653 dsc_min_bpc * 3, dsc_max_bpc * 3, 2654 orig_limits.pipe.min_bpp, orig_limits.pipe.max_bpp, 2655 limits->pipe.min_bpp, limits->pipe.max_bpp); 2656 2657 return false; 2658 } 2659 2660 return true; 2661 } 2662 2663 bool 2664 intel_dp_compute_config_limits(struct intel_dp *intel_dp, 2665 struct drm_connector_state *conn_state, 2666 struct intel_crtc_state *crtc_state, 2667 bool respect_downstream_limits, 2668 bool dsc, 2669 struct link_config_limits *limits) 2670 { 2671 struct intel_display *display = to_intel_display(intel_dp); 2672 bool is_mst = intel_crtc_has_type(crtc_state, INTEL_OUTPUT_DP_MST); 2673 struct intel_connector *connector = 2674 to_intel_connector(conn_state->connector); 2675 2676 limits->min_rate = intel_dp_min_link_rate(intel_dp); 2677 limits->max_rate = intel_dp_max_link_rate(intel_dp); 2678 2679 limits->min_rate = min(limits->min_rate, limits->max_rate); 2680 2681 limits->min_lane_count = intel_dp_min_lane_count(intel_dp); 2682 limits->max_lane_count = intel_dp_max_lane_count(intel_dp); 2683 2684 limits->pipe.min_bpp = intel_dp_min_bpp(crtc_state->output_format); 2685 if (is_mst) { 2686 /* 2687 * FIXME: If all the streams can't fit into the link with their 2688 * current pipe_bpp we should reduce pipe_bpp across the board 2689 * until things start to fit. Until then we limit to <= 8bpc 2690 * since that's what was hardcoded for all MST streams 2691 * previously. This hack should be removed once we have the 2692 * proper retry logic in place. 2693 */ 2694 limits->pipe.max_bpp = min(crtc_state->pipe_bpp, 24); 2695 } else { 2696 limits->pipe.max_bpp = intel_dp_max_bpp(intel_dp, crtc_state, 2697 respect_downstream_limits); 2698 } 2699 2700 if (!dsc && intel_dp_in_hdr_mode(conn_state)) { 2701 if (intel_dp_supports_dsc(intel_dp, connector, crtc_state) && 2702 limits->pipe.max_bpp >= 30) 2703 limits->pipe.min_bpp = max(limits->pipe.min_bpp, 30); 2704 else 2705 drm_dbg_kms(display->drm, 2706 "[CONNECTOR:%d:%s] Can't force 30 bpp for HDR (pipe bpp: %d-%d DSC-support: %s)\n", 2707 connector->base.base.id, connector->base.name, 2708 limits->pipe.min_bpp, limits->pipe.max_bpp, 2709 str_yes_no(intel_dp_supports_dsc(intel_dp, connector, 2710 crtc_state))); 2711 } 2712 2713 if (dsc && !intel_dp_dsc_compute_pipe_bpp_limits(connector, limits)) 2714 return false; 2715 2716 if (is_mst || intel_dp->use_max_params) { 2717 /* 2718 * For MST we always configure max link bw - the spec doesn't 2719 * seem to suggest we should do otherwise. 2720 * 2721 * Use the maximum clock and number of lanes the eDP panel 2722 * advertizes being capable of in case the initial fast 2723 * optimal params failed us. The panels are generally 2724 * designed to support only a single clock and lane 2725 * configuration, and typically on older panels these 2726 * values correspond to the native resolution of the panel. 2727 */ 2728 limits->min_lane_count = limits->max_lane_count; 2729 limits->min_rate = limits->max_rate; 2730 } 2731 2732 intel_dp_test_compute_config(intel_dp, crtc_state, limits); 2733 2734 return intel_dp_compute_config_link_bpp_limits(connector, 2735 crtc_state, 2736 dsc, 2737 limits); 2738 } 2739 2740 int intel_dp_config_required_rate(const struct intel_crtc_state *crtc_state) 2741 { 2742 const struct drm_display_mode *adjusted_mode = 2743 &crtc_state->hw.adjusted_mode; 2744 int link_bpp_x16 = crtc_state->dsc.compression_enable ? 2745 crtc_state->dsc.compressed_bpp_x16 : 2746 fxp_q4_from_int(crtc_state->pipe_bpp); 2747 2748 return intel_dp_link_required(crtc_state->port_clock, crtc_state->lane_count, 2749 adjusted_mode->crtc_clock, adjusted_mode->hdisplay, 2750 link_bpp_x16, 0); 2751 } 2752 2753 bool intel_dp_joiner_needs_dsc(struct intel_display *display, 2754 int num_joined_pipes) 2755 { 2756 /* 2757 * Pipe joiner needs compression up to display 12 due to bandwidth 2758 * limitation. DG2 onwards pipe joiner can be enabled without 2759 * compression. 2760 * Ultrajoiner always needs compression. 2761 */ 2762 return (!HAS_UNCOMPRESSED_JOINER(display) && num_joined_pipes == 2) || 2763 num_joined_pipes == 4; 2764 } 2765 2766 static int 2767 intel_dp_compute_link_config(struct intel_encoder *encoder, 2768 struct intel_crtc_state *pipe_config, 2769 struct drm_connector_state *conn_state, 2770 bool respect_downstream_limits) 2771 { 2772 struct intel_display *display = to_intel_display(encoder); 2773 struct intel_crtc *crtc = to_intel_crtc(pipe_config->uapi.crtc); 2774 struct intel_connector *connector = 2775 to_intel_connector(conn_state->connector); 2776 const struct drm_display_mode *adjusted_mode = 2777 &pipe_config->hw.adjusted_mode; 2778 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 2779 struct link_config_limits limits; 2780 bool dsc_needed, joiner_needs_dsc; 2781 int num_joined_pipes; 2782 int ret = 0; 2783 2784 if (pipe_config->fec_enable && 2785 !intel_dp_supports_fec(intel_dp, connector, pipe_config)) 2786 return -EINVAL; 2787 2788 num_joined_pipes = intel_dp_num_joined_pipes(intel_dp, connector, 2789 adjusted_mode->crtc_hdisplay, 2790 adjusted_mode->crtc_clock); 2791 if (num_joined_pipes > 1) 2792 pipe_config->joiner_pipes = GENMASK(crtc->pipe + num_joined_pipes - 1, crtc->pipe); 2793 2794 joiner_needs_dsc = intel_dp_joiner_needs_dsc(display, num_joined_pipes); 2795 2796 dsc_needed = joiner_needs_dsc || intel_dp->force_dsc_en || 2797 !intel_dp_compute_config_limits(intel_dp, conn_state, pipe_config, 2798 respect_downstream_limits, 2799 false, 2800 &limits); 2801 2802 if (!dsc_needed) { 2803 /* 2804 * Optimize for slow and wide for everything, because there are some 2805 * eDP 1.3 and 1.4 panels don't work well with fast and narrow. 2806 */ 2807 ret = intel_dp_compute_link_config_wide(intel_dp, pipe_config, 2808 conn_state, &limits); 2809 if (!ret && intel_dp_is_uhbr(pipe_config)) 2810 ret = intel_dp_mtp_tu_compute_config(intel_dp, 2811 pipe_config, 2812 conn_state, 2813 fxp_q4_from_int(pipe_config->pipe_bpp), 2814 fxp_q4_from_int(pipe_config->pipe_bpp), 2815 0, false); 2816 if (ret) 2817 dsc_needed = true; 2818 } 2819 2820 if (dsc_needed && !intel_dp_supports_dsc(intel_dp, connector, pipe_config)) { 2821 drm_dbg_kms(display->drm, "DSC required but not available\n"); 2822 return -EINVAL; 2823 } 2824 2825 if (dsc_needed) { 2826 drm_dbg_kms(display->drm, 2827 "Try DSC (fallback=%s, joiner=%s, force=%s)\n", 2828 str_yes_no(ret), str_yes_no(joiner_needs_dsc), 2829 str_yes_no(intel_dp->force_dsc_en)); 2830 2831 if (!intel_dp_compute_config_limits(intel_dp, conn_state, pipe_config, 2832 respect_downstream_limits, 2833 true, 2834 &limits)) 2835 return -EINVAL; 2836 2837 ret = intel_dp_dsc_compute_config(intel_dp, pipe_config, 2838 conn_state, &limits, 64); 2839 if (ret < 0) 2840 return ret; 2841 } 2842 2843 drm_dbg_kms(display->drm, 2844 "DP lane count %d clock %d bpp input %d compressed " FXP_Q4_FMT " HDR %s link rate required %d available %d\n", 2845 pipe_config->lane_count, pipe_config->port_clock, 2846 pipe_config->pipe_bpp, 2847 FXP_Q4_ARGS(pipe_config->dsc.compressed_bpp_x16), 2848 str_yes_no(intel_dp_in_hdr_mode(conn_state)), 2849 intel_dp_config_required_rate(pipe_config), 2850 intel_dp_max_link_data_rate(intel_dp, 2851 pipe_config->port_clock, 2852 pipe_config->lane_count)); 2853 2854 return 0; 2855 } 2856 2857 bool intel_dp_limited_color_range(const struct intel_crtc_state *crtc_state, 2858 const struct drm_connector_state *conn_state) 2859 { 2860 const struct intel_digital_connector_state *intel_conn_state = 2861 to_intel_digital_connector_state(conn_state); 2862 const struct drm_display_mode *adjusted_mode = 2863 &crtc_state->hw.adjusted_mode; 2864 2865 /* 2866 * Our YCbCr output is always limited range. 2867 * crtc_state->limited_color_range only applies to RGB, 2868 * and it must never be set for YCbCr or we risk setting 2869 * some conflicting bits in TRANSCONF which will mess up 2870 * the colors on the monitor. 2871 */ 2872 if (crtc_state->output_format != INTEL_OUTPUT_FORMAT_RGB) 2873 return false; 2874 2875 if (intel_conn_state->broadcast_rgb == INTEL_BROADCAST_RGB_AUTO) { 2876 /* 2877 * See: 2878 * CEA-861-E - 5.1 Default Encoding Parameters 2879 * VESA DisplayPort Ver.1.2a - 5.1.1.1 Video Colorimetry 2880 */ 2881 return crtc_state->pipe_bpp != 18 && 2882 drm_default_rgb_quant_range(adjusted_mode) == 2883 HDMI_QUANTIZATION_RANGE_LIMITED; 2884 } else { 2885 return intel_conn_state->broadcast_rgb == 2886 INTEL_BROADCAST_RGB_LIMITED; 2887 } 2888 } 2889 2890 static bool intel_dp_port_has_audio(struct intel_display *display, enum port port) 2891 { 2892 if (display->platform.g4x) 2893 return false; 2894 if (DISPLAY_VER(display) < 12 && port == PORT_A) 2895 return false; 2896 2897 return true; 2898 } 2899 2900 static void intel_dp_compute_vsc_colorimetry(const struct intel_crtc_state *crtc_state, 2901 const struct drm_connector_state *conn_state, 2902 struct drm_dp_vsc_sdp *vsc) 2903 { 2904 struct intel_display *display = to_intel_display(crtc_state); 2905 2906 if (crtc_state->has_panel_replay) { 2907 /* 2908 * Prepare VSC Header for SU as per DP 2.0 spec, Table 2-223 2909 * VSC SDP supporting 3D stereo, Panel Replay, and Pixel 2910 * Encoding/Colorimetry Format indication. 2911 */ 2912 vsc->revision = 0x7; 2913 } else { 2914 /* 2915 * Prepare VSC Header for SU as per DP 1.4 spec, Table 2-118 2916 * VSC SDP supporting 3D stereo, PSR2, and Pixel Encoding/ 2917 * Colorimetry Format indication. 2918 */ 2919 vsc->revision = 0x5; 2920 } 2921 2922 vsc->length = 0x13; 2923 2924 /* DP 1.4a spec, Table 2-120 */ 2925 switch (crtc_state->output_format) { 2926 case INTEL_OUTPUT_FORMAT_YCBCR444: 2927 vsc->pixelformat = DP_PIXELFORMAT_YUV444; 2928 break; 2929 case INTEL_OUTPUT_FORMAT_YCBCR420: 2930 vsc->pixelformat = DP_PIXELFORMAT_YUV420; 2931 break; 2932 case INTEL_OUTPUT_FORMAT_RGB: 2933 default: 2934 vsc->pixelformat = DP_PIXELFORMAT_RGB; 2935 } 2936 2937 switch (conn_state->colorspace) { 2938 case DRM_MODE_COLORIMETRY_BT709_YCC: 2939 vsc->colorimetry = DP_COLORIMETRY_BT709_YCC; 2940 break; 2941 case DRM_MODE_COLORIMETRY_XVYCC_601: 2942 vsc->colorimetry = DP_COLORIMETRY_XVYCC_601; 2943 break; 2944 case DRM_MODE_COLORIMETRY_XVYCC_709: 2945 vsc->colorimetry = DP_COLORIMETRY_XVYCC_709; 2946 break; 2947 case DRM_MODE_COLORIMETRY_SYCC_601: 2948 vsc->colorimetry = DP_COLORIMETRY_SYCC_601; 2949 break; 2950 case DRM_MODE_COLORIMETRY_OPYCC_601: 2951 vsc->colorimetry = DP_COLORIMETRY_OPYCC_601; 2952 break; 2953 case DRM_MODE_COLORIMETRY_BT2020_CYCC: 2954 vsc->colorimetry = DP_COLORIMETRY_BT2020_CYCC; 2955 break; 2956 case DRM_MODE_COLORIMETRY_BT2020_RGB: 2957 vsc->colorimetry = DP_COLORIMETRY_BT2020_RGB; 2958 break; 2959 case DRM_MODE_COLORIMETRY_BT2020_YCC: 2960 vsc->colorimetry = DP_COLORIMETRY_BT2020_YCC; 2961 break; 2962 case DRM_MODE_COLORIMETRY_DCI_P3_RGB_D65: 2963 case DRM_MODE_COLORIMETRY_DCI_P3_RGB_THEATER: 2964 vsc->colorimetry = DP_COLORIMETRY_DCI_P3_RGB; 2965 break; 2966 default: 2967 /* 2968 * RGB->YCBCR color conversion uses the BT.709 2969 * color space. 2970 */ 2971 if (crtc_state->output_format == INTEL_OUTPUT_FORMAT_YCBCR420) 2972 vsc->colorimetry = DP_COLORIMETRY_BT709_YCC; 2973 else 2974 vsc->colorimetry = DP_COLORIMETRY_DEFAULT; 2975 break; 2976 } 2977 2978 vsc->bpc = crtc_state->pipe_bpp / 3; 2979 2980 /* only RGB pixelformat supports 6 bpc */ 2981 drm_WARN_ON(display->drm, 2982 vsc->bpc == 6 && vsc->pixelformat != DP_PIXELFORMAT_RGB); 2983 2984 /* all YCbCr are always limited range */ 2985 vsc->dynamic_range = DP_DYNAMIC_RANGE_CTA; 2986 vsc->content_type = DP_CONTENT_TYPE_NOT_DEFINED; 2987 } 2988 2989 static void intel_dp_compute_as_sdp(struct intel_dp *intel_dp, 2990 struct intel_crtc_state *crtc_state) 2991 { 2992 struct drm_dp_as_sdp *as_sdp = &crtc_state->infoframes.as_sdp; 2993 const struct drm_display_mode *adjusted_mode = 2994 &crtc_state->hw.adjusted_mode; 2995 2996 if (!crtc_state->vrr.enable || !intel_dp->as_sdp_supported) 2997 return; 2998 2999 crtc_state->infoframes.enable |= intel_hdmi_infoframe_enable(DP_SDP_ADAPTIVE_SYNC); 3000 3001 as_sdp->sdp_type = DP_SDP_ADAPTIVE_SYNC; 3002 as_sdp->length = 0x9; 3003 as_sdp->duration_incr_ms = 0; 3004 as_sdp->vtotal = intel_vrr_vmin_vtotal(crtc_state); 3005 3006 if (crtc_state->cmrr.enable) { 3007 as_sdp->mode = DP_AS_SDP_FAVT_TRR_REACHED; 3008 as_sdp->target_rr = drm_mode_vrefresh(adjusted_mode); 3009 as_sdp->target_rr_divider = true; 3010 } else { 3011 as_sdp->mode = DP_AS_SDP_AVT_DYNAMIC_VTOTAL; 3012 as_sdp->target_rr = 0; 3013 } 3014 } 3015 3016 static void intel_dp_compute_vsc_sdp(struct intel_dp *intel_dp, 3017 struct intel_crtc_state *crtc_state, 3018 const struct drm_connector_state *conn_state) 3019 { 3020 struct drm_dp_vsc_sdp *vsc; 3021 3022 if ((!intel_dp->colorimetry_support || 3023 !intel_dp_needs_vsc_sdp(crtc_state, conn_state)) && 3024 !crtc_state->has_psr) 3025 return; 3026 3027 vsc = &crtc_state->infoframes.vsc; 3028 3029 crtc_state->infoframes.enable |= intel_hdmi_infoframe_enable(DP_SDP_VSC); 3030 vsc->sdp_type = DP_SDP_VSC; 3031 3032 /* Needs colorimetry */ 3033 if (intel_dp_needs_vsc_sdp(crtc_state, conn_state)) { 3034 intel_dp_compute_vsc_colorimetry(crtc_state, conn_state, 3035 vsc); 3036 } else if (crtc_state->has_panel_replay) { 3037 /* 3038 * [Panel Replay without colorimetry info] 3039 * Prepare VSC Header for SU as per DP 2.0 spec, Table 2-223 3040 * VSC SDP supporting 3D stereo + Panel Replay. 3041 */ 3042 vsc->revision = 0x6; 3043 vsc->length = 0x10; 3044 } else if (crtc_state->has_sel_update) { 3045 /* 3046 * [PSR2 without colorimetry] 3047 * Prepare VSC Header for SU as per eDP 1.4 spec, Table 6-11 3048 * 3D stereo + PSR/PSR2 + Y-coordinate. 3049 */ 3050 vsc->revision = 0x4; 3051 vsc->length = 0xe; 3052 } else { 3053 /* 3054 * [PSR1] 3055 * Prepare VSC Header for SU as per DP 1.4 spec, Table 2-118 3056 * VSC SDP supporting 3D stereo + PSR (applies to eDP v1.3 or 3057 * higher). 3058 */ 3059 vsc->revision = 0x2; 3060 vsc->length = 0x8; 3061 } 3062 } 3063 3064 bool 3065 intel_dp_in_hdr_mode(const struct drm_connector_state *conn_state) 3066 { 3067 struct hdr_output_metadata *hdr_metadata; 3068 3069 if (!conn_state->hdr_output_metadata) 3070 return false; 3071 3072 hdr_metadata = conn_state->hdr_output_metadata->data; 3073 3074 return hdr_metadata->hdmi_metadata_type1.eotf == HDMI_EOTF_SMPTE_ST2084; 3075 } 3076 3077 static void 3078 intel_dp_compute_hdr_metadata_infoframe_sdp(struct intel_dp *intel_dp, 3079 struct intel_crtc_state *crtc_state, 3080 const struct drm_connector_state *conn_state) 3081 { 3082 struct intel_display *display = to_intel_display(intel_dp); 3083 int ret; 3084 struct hdmi_drm_infoframe *drm_infoframe = &crtc_state->infoframes.drm.drm; 3085 3086 if (!conn_state->hdr_output_metadata) 3087 return; 3088 3089 ret = drm_hdmi_infoframe_set_hdr_metadata(drm_infoframe, conn_state); 3090 3091 if (ret) { 3092 drm_dbg_kms(display->drm, 3093 "couldn't set HDR metadata in infoframe\n"); 3094 return; 3095 } 3096 3097 crtc_state->infoframes.enable |= 3098 intel_hdmi_infoframe_enable(HDMI_PACKET_TYPE_GAMUT_METADATA); 3099 } 3100 3101 static bool can_enable_drrs(struct intel_connector *connector, 3102 const struct intel_crtc_state *pipe_config, 3103 const struct drm_display_mode *downclock_mode) 3104 { 3105 struct intel_display *display = to_intel_display(connector); 3106 3107 if (pipe_config->vrr.enable) 3108 return false; 3109 3110 /* 3111 * DRRS and PSR can't be enable together, so giving preference to PSR 3112 * as it allows more power-savings by complete shutting down display, 3113 * so to guarantee this, intel_drrs_compute_config() must be called 3114 * after intel_psr_compute_config(). 3115 */ 3116 if (pipe_config->has_psr) 3117 return false; 3118 3119 /* FIXME missing FDI M2/N2 etc. */ 3120 if (pipe_config->has_pch_encoder) 3121 return false; 3122 3123 if (!intel_cpu_transcoder_has_drrs(display, pipe_config->cpu_transcoder)) 3124 return false; 3125 3126 return downclock_mode && 3127 intel_panel_drrs_type(connector) == DRRS_TYPE_SEAMLESS; 3128 } 3129 3130 static void 3131 intel_dp_drrs_compute_config(struct intel_connector *connector, 3132 struct intel_crtc_state *pipe_config, 3133 int link_bpp_x16) 3134 { 3135 struct intel_display *display = to_intel_display(connector); 3136 const struct drm_display_mode *downclock_mode = 3137 intel_panel_downclock_mode(connector, &pipe_config->hw.adjusted_mode); 3138 int pixel_clock; 3139 3140 /* 3141 * FIXME all joined pipes share the same transcoder. 3142 * Need to account for that when updating M/N live. 3143 */ 3144 if (has_seamless_m_n(connector) && !pipe_config->joiner_pipes) 3145 pipe_config->update_m_n = true; 3146 3147 if (!can_enable_drrs(connector, pipe_config, downclock_mode)) { 3148 if (intel_cpu_transcoder_has_m2_n2(display, pipe_config->cpu_transcoder)) 3149 intel_zero_m_n(&pipe_config->dp_m2_n2); 3150 return; 3151 } 3152 3153 if (display->platform.ironlake || display->platform.sandybridge || 3154 display->platform.ivybridge) 3155 pipe_config->msa_timing_delay = connector->panel.vbt.edp.drrs_msa_timing_delay; 3156 3157 pipe_config->has_drrs = true; 3158 3159 pixel_clock = downclock_mode->clock; 3160 if (pipe_config->splitter.enable) 3161 pixel_clock /= pipe_config->splitter.link_count; 3162 3163 intel_link_compute_m_n(link_bpp_x16, pipe_config->lane_count, pixel_clock, 3164 pipe_config->port_clock, 3165 intel_dp_bw_fec_overhead(pipe_config->fec_enable), 3166 &pipe_config->dp_m2_n2); 3167 3168 /* FIXME: abstract this better */ 3169 if (pipe_config->splitter.enable) 3170 pipe_config->dp_m2_n2.data_m *= pipe_config->splitter.link_count; 3171 } 3172 3173 static bool intel_dp_has_audio(struct intel_encoder *encoder, 3174 const struct drm_connector_state *conn_state) 3175 { 3176 struct intel_display *display = to_intel_display(encoder); 3177 const struct intel_digital_connector_state *intel_conn_state = 3178 to_intel_digital_connector_state(conn_state); 3179 struct intel_connector *connector = 3180 to_intel_connector(conn_state->connector); 3181 3182 if (!intel_dp_port_has_audio(display, encoder->port)) 3183 return false; 3184 3185 if (intel_conn_state->force_audio == HDMI_AUDIO_AUTO) 3186 return connector->base.display_info.has_audio; 3187 else 3188 return intel_conn_state->force_audio == HDMI_AUDIO_ON; 3189 } 3190 3191 static int 3192 intel_dp_compute_output_format(struct intel_encoder *encoder, 3193 struct intel_crtc_state *crtc_state, 3194 struct drm_connector_state *conn_state, 3195 bool respect_downstream_limits) 3196 { 3197 struct intel_display *display = to_intel_display(encoder); 3198 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 3199 struct intel_connector *connector = intel_dp->attached_connector; 3200 const struct drm_display_info *info = &connector->base.display_info; 3201 const struct drm_display_mode *adjusted_mode = &crtc_state->hw.adjusted_mode; 3202 bool ycbcr_420_only; 3203 int ret; 3204 3205 ycbcr_420_only = drm_mode_is_420_only(info, adjusted_mode); 3206 3207 if (ycbcr_420_only && !connector->base.ycbcr_420_allowed) { 3208 drm_dbg_kms(display->drm, 3209 "YCbCr 4:2:0 mode but YCbCr 4:2:0 output not possible. Falling back to RGB.\n"); 3210 crtc_state->sink_format = INTEL_OUTPUT_FORMAT_RGB; 3211 } else { 3212 crtc_state->sink_format = intel_dp_sink_format(connector, adjusted_mode); 3213 } 3214 3215 crtc_state->output_format = intel_dp_output_format(connector, crtc_state->sink_format); 3216 3217 ret = intel_dp_compute_link_config(encoder, crtc_state, conn_state, 3218 respect_downstream_limits); 3219 if (ret) { 3220 if (crtc_state->sink_format == INTEL_OUTPUT_FORMAT_YCBCR420 || 3221 !connector->base.ycbcr_420_allowed || 3222 !drm_mode_is_420_also(info, adjusted_mode)) 3223 return ret; 3224 3225 crtc_state->sink_format = INTEL_OUTPUT_FORMAT_YCBCR420; 3226 crtc_state->output_format = intel_dp_output_format(connector, 3227 crtc_state->sink_format); 3228 ret = intel_dp_compute_link_config(encoder, crtc_state, conn_state, 3229 respect_downstream_limits); 3230 } 3231 3232 return ret; 3233 } 3234 3235 void 3236 intel_dp_audio_compute_config(struct intel_encoder *encoder, 3237 struct intel_crtc_state *pipe_config, 3238 struct drm_connector_state *conn_state) 3239 { 3240 pipe_config->has_audio = 3241 intel_dp_has_audio(encoder, conn_state) && 3242 intel_audio_compute_config(encoder, pipe_config, conn_state); 3243 3244 pipe_config->sdp_split_enable = pipe_config->has_audio && 3245 intel_dp_is_uhbr(pipe_config); 3246 } 3247 3248 void 3249 intel_dp_queue_modeset_retry_for_link(struct intel_atomic_state *state, 3250 struct intel_encoder *encoder, 3251 const struct intel_crtc_state *crtc_state) 3252 { 3253 struct intel_connector *connector; 3254 struct intel_digital_connector_state *conn_state; 3255 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 3256 int i; 3257 3258 if (intel_dp->needs_modeset_retry) 3259 return; 3260 3261 intel_dp->needs_modeset_retry = true; 3262 3263 if (!intel_crtc_has_type(crtc_state, INTEL_OUTPUT_DP_MST)) { 3264 intel_connector_queue_modeset_retry_work(intel_dp->attached_connector); 3265 3266 return; 3267 } 3268 3269 for_each_new_intel_connector_in_state(state, connector, conn_state, i) { 3270 if (!conn_state->base.crtc) 3271 continue; 3272 3273 if (connector->mst.dp == intel_dp) 3274 intel_connector_queue_modeset_retry_work(connector); 3275 } 3276 } 3277 3278 int intel_dp_compute_min_hblank(struct intel_crtc_state *crtc_state, 3279 const struct drm_connector_state *conn_state) 3280 { 3281 struct intel_display *display = to_intel_display(crtc_state); 3282 const struct drm_display_mode *adjusted_mode = 3283 &crtc_state->hw.adjusted_mode; 3284 struct intel_connector *connector = to_intel_connector(conn_state->connector); 3285 int symbol_size = intel_dp_is_uhbr(crtc_state) ? 32 : 8; 3286 /* 3287 * min symbol cycles is 3(BS,VBID, BE) for 128b/132b and 3288 * 5(BS, VBID, MVID, MAUD, BE) for 8b/10b 3289 */ 3290 int min_sym_cycles = intel_dp_is_uhbr(crtc_state) ? 3 : 5; 3291 bool is_mst = intel_crtc_has_type(crtc_state, INTEL_OUTPUT_DP_MST); 3292 int num_joined_pipes = intel_crtc_num_joined_pipes(crtc_state); 3293 int min_hblank; 3294 int max_lane_count = 4; 3295 int hactive_sym_cycles, htotal_sym_cycles; 3296 int dsc_slices = 0; 3297 int link_bpp_x16; 3298 3299 if (DISPLAY_VER(display) < 30) 3300 return 0; 3301 3302 /* MIN_HBLANK should be set only for 8b/10b MST or for 128b/132b SST/MST */ 3303 if (!is_mst && !intel_dp_is_uhbr(crtc_state)) 3304 return 0; 3305 3306 if (crtc_state->dsc.compression_enable) { 3307 dsc_slices = intel_dp_dsc_get_slice_count(connector, 3308 adjusted_mode->crtc_clock, 3309 adjusted_mode->crtc_hdisplay, 3310 num_joined_pipes); 3311 if (!dsc_slices) { 3312 drm_dbg(display->drm, "failed to calculate dsc slice count\n"); 3313 return -EINVAL; 3314 } 3315 } 3316 3317 if (crtc_state->dsc.compression_enable) 3318 link_bpp_x16 = crtc_state->dsc.compressed_bpp_x16; 3319 else 3320 link_bpp_x16 = intel_dp_output_format_link_bpp_x16(crtc_state->output_format, 3321 crtc_state->pipe_bpp); 3322 3323 /* Calculate min Hblank Link Layer Symbol Cycle Count for 8b/10b MST & 128b/132b */ 3324 hactive_sym_cycles = drm_dp_link_symbol_cycles(max_lane_count, 3325 adjusted_mode->hdisplay, 3326 dsc_slices, 3327 link_bpp_x16, 3328 symbol_size, is_mst); 3329 htotal_sym_cycles = adjusted_mode->htotal * hactive_sym_cycles / 3330 adjusted_mode->hdisplay; 3331 3332 min_hblank = htotal_sym_cycles - hactive_sym_cycles; 3333 /* minimum Hblank calculation: https://groups.vesa.org/wg/DP/document/20494 */ 3334 min_hblank = max(min_hblank, min_sym_cycles); 3335 3336 /* 3337 * adjust the BlankingStart/BlankingEnd framing control from 3338 * the calculated value 3339 */ 3340 min_hblank = min_hblank - 2; 3341 3342 /* 3343 * min_hblank formula is undergoing a change, to avoid underrun use the 3344 * recomended value in spec to compare with the calculated one and use the 3345 * minimum value 3346 */ 3347 if (intel_dp_is_uhbr(crtc_state)) { 3348 /* 3349 * Note: Bspec requires a min_hblank of 2 for YCBCR420 3350 * with compressed bpp 6, but the minimum compressed bpp 3351 * supported by the driver is 8. 3352 */ 3353 drm_WARN_ON(display->drm, 3354 (crtc_state->dsc.compression_enable && 3355 crtc_state->output_format == INTEL_OUTPUT_FORMAT_YCBCR420 && 3356 crtc_state->dsc.compressed_bpp_x16 < fxp_q4_from_int(8))); 3357 min_hblank = min(3, min_hblank); 3358 } else { 3359 min_hblank = min(10, min_hblank); 3360 } 3361 3362 crtc_state->min_hblank = min_hblank; 3363 3364 return 0; 3365 } 3366 3367 int 3368 intel_dp_compute_config(struct intel_encoder *encoder, 3369 struct intel_crtc_state *pipe_config, 3370 struct drm_connector_state *conn_state) 3371 { 3372 struct intel_display *display = to_intel_display(encoder); 3373 struct intel_atomic_state *state = to_intel_atomic_state(conn_state->state); 3374 struct drm_display_mode *adjusted_mode = &pipe_config->hw.adjusted_mode; 3375 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 3376 const struct drm_display_mode *fixed_mode; 3377 struct intel_connector *connector = intel_dp->attached_connector; 3378 int ret = 0, link_bpp_x16; 3379 3380 fixed_mode = intel_panel_fixed_mode(connector, adjusted_mode); 3381 if (intel_dp_is_edp(intel_dp) && fixed_mode) { 3382 ret = intel_panel_compute_config(connector, adjusted_mode); 3383 if (ret) 3384 return ret; 3385 } 3386 3387 if (adjusted_mode->flags & DRM_MODE_FLAG_DBLSCAN) 3388 return -EINVAL; 3389 3390 if (!connector->base.interlace_allowed && 3391 adjusted_mode->flags & DRM_MODE_FLAG_INTERLACE) 3392 return -EINVAL; 3393 3394 if (adjusted_mode->flags & DRM_MODE_FLAG_DBLCLK) 3395 return -EINVAL; 3396 3397 if (intel_dp_hdisplay_bad(display, adjusted_mode->crtc_hdisplay)) 3398 return -EINVAL; 3399 3400 /* 3401 * Try to respect downstream TMDS clock limits first, if 3402 * that fails assume the user might know something we don't. 3403 */ 3404 ret = intel_dp_compute_output_format(encoder, pipe_config, conn_state, true); 3405 if (ret) 3406 ret = intel_dp_compute_output_format(encoder, pipe_config, conn_state, false); 3407 if (ret) 3408 return ret; 3409 3410 if ((intel_dp_is_edp(intel_dp) && fixed_mode) || 3411 pipe_config->output_format == INTEL_OUTPUT_FORMAT_YCBCR420) { 3412 ret = intel_pfit_compute_config(pipe_config, conn_state); 3413 if (ret) 3414 return ret; 3415 } 3416 3417 pipe_config->limited_color_range = 3418 intel_dp_limited_color_range(pipe_config, conn_state); 3419 3420 if (intel_dp_is_uhbr(pipe_config)) { 3421 /* 128b/132b SST also needs this */ 3422 pipe_config->mst_master_transcoder = pipe_config->cpu_transcoder; 3423 } else { 3424 pipe_config->enhanced_framing = 3425 drm_dp_enhanced_frame_cap(intel_dp->dpcd); 3426 } 3427 3428 if (pipe_config->dsc.compression_enable) 3429 link_bpp_x16 = pipe_config->dsc.compressed_bpp_x16; 3430 else 3431 link_bpp_x16 = intel_dp_output_format_link_bpp_x16(pipe_config->output_format, 3432 pipe_config->pipe_bpp); 3433 3434 if (intel_dp->mso_link_count) { 3435 int n = intel_dp->mso_link_count; 3436 int overlap = intel_dp->mso_pixel_overlap; 3437 3438 pipe_config->splitter.enable = true; 3439 pipe_config->splitter.link_count = n; 3440 pipe_config->splitter.pixel_overlap = overlap; 3441 3442 drm_dbg_kms(display->drm, 3443 "MSO link count %d, pixel overlap %d\n", 3444 n, overlap); 3445 3446 adjusted_mode->crtc_hdisplay = adjusted_mode->crtc_hdisplay / n + overlap; 3447 adjusted_mode->crtc_hblank_start = adjusted_mode->crtc_hblank_start / n + overlap; 3448 adjusted_mode->crtc_hblank_end = adjusted_mode->crtc_hblank_end / n + overlap; 3449 adjusted_mode->crtc_hsync_start = adjusted_mode->crtc_hsync_start / n + overlap; 3450 adjusted_mode->crtc_hsync_end = adjusted_mode->crtc_hsync_end / n + overlap; 3451 adjusted_mode->crtc_htotal = adjusted_mode->crtc_htotal / n + overlap; 3452 adjusted_mode->crtc_clock /= n; 3453 } 3454 3455 intel_dp_audio_compute_config(encoder, pipe_config, conn_state); 3456 3457 if (!intel_dp_is_uhbr(pipe_config)) { 3458 intel_link_compute_m_n(link_bpp_x16, 3459 pipe_config->lane_count, 3460 adjusted_mode->crtc_clock, 3461 pipe_config->port_clock, 3462 intel_dp_bw_fec_overhead(pipe_config->fec_enable), 3463 &pipe_config->dp_m_n); 3464 } 3465 3466 ret = intel_dp_compute_min_hblank(pipe_config, conn_state); 3467 if (ret) 3468 return ret; 3469 3470 /* FIXME: abstract this better */ 3471 if (pipe_config->splitter.enable) 3472 pipe_config->dp_m_n.data_m *= pipe_config->splitter.link_count; 3473 3474 intel_vrr_compute_config(pipe_config, conn_state); 3475 intel_dp_compute_as_sdp(intel_dp, pipe_config); 3476 intel_psr_compute_config(intel_dp, pipe_config, conn_state); 3477 intel_alpm_lobf_compute_config(intel_dp, pipe_config, conn_state); 3478 intel_dp_drrs_compute_config(connector, pipe_config, link_bpp_x16); 3479 intel_dp_compute_vsc_sdp(intel_dp, pipe_config, conn_state); 3480 intel_dp_compute_hdr_metadata_infoframe_sdp(intel_dp, pipe_config, conn_state); 3481 3482 return intel_dp_tunnel_atomic_compute_stream_bw(state, intel_dp, connector, 3483 pipe_config); 3484 } 3485 3486 void intel_dp_set_link_params(struct intel_dp *intel_dp, 3487 int link_rate, int lane_count) 3488 { 3489 memset(intel_dp->train_set, 0, sizeof(intel_dp->train_set)); 3490 intel_dp->link.active = false; 3491 intel_dp->needs_modeset_retry = false; 3492 intel_dp->link_rate = link_rate; 3493 intel_dp->lane_count = lane_count; 3494 } 3495 3496 void intel_dp_reset_link_params(struct intel_dp *intel_dp) 3497 { 3498 intel_dp->link.max_lane_count = intel_dp_max_common_lane_count(intel_dp); 3499 intel_dp->link.max_rate = intel_dp_max_common_rate(intel_dp); 3500 intel_dp->link.mst_probed_lane_count = 0; 3501 intel_dp->link.mst_probed_rate = 0; 3502 intel_dp->link.retrain_disabled = false; 3503 intel_dp->link.seq_train_failures = 0; 3504 } 3505 3506 /* Enable backlight PWM and backlight PP control. */ 3507 void intel_edp_backlight_on(const struct intel_crtc_state *crtc_state, 3508 const struct drm_connector_state *conn_state) 3509 { 3510 struct intel_display *display = to_intel_display(crtc_state); 3511 struct intel_dp *intel_dp = enc_to_intel_dp(to_intel_encoder(conn_state->best_encoder)); 3512 3513 if (!intel_dp_is_edp(intel_dp)) 3514 return; 3515 3516 drm_dbg_kms(display->drm, "\n"); 3517 3518 intel_backlight_enable(crtc_state, conn_state); 3519 intel_pps_backlight_on(intel_dp); 3520 } 3521 3522 /* Disable backlight PP control and backlight PWM. */ 3523 void intel_edp_backlight_off(const struct drm_connector_state *old_conn_state) 3524 { 3525 struct intel_dp *intel_dp = enc_to_intel_dp(to_intel_encoder(old_conn_state->best_encoder)); 3526 struct intel_display *display = to_intel_display(intel_dp); 3527 3528 if (!intel_dp_is_edp(intel_dp)) 3529 return; 3530 3531 drm_dbg_kms(display->drm, "\n"); 3532 3533 intel_pps_backlight_off(intel_dp); 3534 intel_backlight_disable(old_conn_state); 3535 } 3536 3537 static bool downstream_hpd_needs_d0(struct intel_dp *intel_dp) 3538 { 3539 /* 3540 * DPCD 1.2+ should support BRANCH_DEVICE_CTRL, and thus 3541 * be capable of signalling downstream hpd with a long pulse. 3542 * Whether or not that means D3 is safe to use is not clear, 3543 * but let's assume so until proven otherwise. 3544 * 3545 * FIXME should really check all downstream ports... 3546 */ 3547 return intel_dp->dpcd[DP_DPCD_REV] == 0x11 && 3548 drm_dp_is_branch(intel_dp->dpcd) && 3549 intel_dp->downstream_ports[0] & DP_DS_PORT_HPD; 3550 } 3551 3552 static int 3553 write_dsc_decompression_flag(struct drm_dp_aux *aux, u8 flag, bool set) 3554 { 3555 int err; 3556 u8 val; 3557 3558 err = drm_dp_dpcd_readb(aux, DP_DSC_ENABLE, &val); 3559 if (err < 0) 3560 return err; 3561 3562 if (set) 3563 val |= flag; 3564 else 3565 val &= ~flag; 3566 3567 return drm_dp_dpcd_writeb(aux, DP_DSC_ENABLE, val); 3568 } 3569 3570 static void 3571 intel_dp_sink_set_dsc_decompression(struct intel_connector *connector, 3572 bool enable) 3573 { 3574 struct intel_display *display = to_intel_display(connector); 3575 3576 if (write_dsc_decompression_flag(connector->dp.dsc_decompression_aux, 3577 DP_DECOMPRESSION_EN, enable) < 0) 3578 drm_dbg_kms(display->drm, 3579 "Failed to %s sink decompression state\n", 3580 str_enable_disable(enable)); 3581 } 3582 3583 static void 3584 intel_dp_sink_set_dsc_passthrough(const struct intel_connector *connector, 3585 bool enable) 3586 { 3587 struct intel_display *display = to_intel_display(connector); 3588 struct drm_dp_aux *aux = connector->mst.port ? 3589 connector->mst.port->passthrough_aux : NULL; 3590 3591 if (!aux) 3592 return; 3593 3594 if (write_dsc_decompression_flag(aux, 3595 DP_DSC_PASSTHROUGH_EN, enable) < 0) 3596 drm_dbg_kms(display->drm, 3597 "Failed to %s sink compression passthrough state\n", 3598 str_enable_disable(enable)); 3599 } 3600 3601 static int intel_dp_dsc_aux_ref_count(struct intel_atomic_state *state, 3602 const struct intel_connector *connector, 3603 bool for_get_ref) 3604 { 3605 struct intel_display *display = to_intel_display(state); 3606 struct drm_connector *_connector_iter; 3607 struct drm_connector_state *old_conn_state; 3608 struct drm_connector_state *new_conn_state; 3609 int ref_count = 0; 3610 int i; 3611 3612 /* 3613 * On SST the decompression AUX device won't be shared, each connector 3614 * uses for this its own AUX targeting the sink device. 3615 */ 3616 if (!connector->mst.dp) 3617 return connector->dp.dsc_decompression_enabled ? 1 : 0; 3618 3619 for_each_oldnew_connector_in_state(&state->base, _connector_iter, 3620 old_conn_state, new_conn_state, i) { 3621 const struct intel_connector * 3622 connector_iter = to_intel_connector(_connector_iter); 3623 3624 if (connector_iter->mst.dp != connector->mst.dp) 3625 continue; 3626 3627 if (!connector_iter->dp.dsc_decompression_enabled) 3628 continue; 3629 3630 drm_WARN_ON(display->drm, 3631 (for_get_ref && !new_conn_state->crtc) || 3632 (!for_get_ref && !old_conn_state->crtc)); 3633 3634 if (connector_iter->dp.dsc_decompression_aux == 3635 connector->dp.dsc_decompression_aux) 3636 ref_count++; 3637 } 3638 3639 return ref_count; 3640 } 3641 3642 static bool intel_dp_dsc_aux_get_ref(struct intel_atomic_state *state, 3643 struct intel_connector *connector) 3644 { 3645 bool ret = intel_dp_dsc_aux_ref_count(state, connector, true) == 0; 3646 3647 connector->dp.dsc_decompression_enabled = true; 3648 3649 return ret; 3650 } 3651 3652 static bool intel_dp_dsc_aux_put_ref(struct intel_atomic_state *state, 3653 struct intel_connector *connector) 3654 { 3655 connector->dp.dsc_decompression_enabled = false; 3656 3657 return intel_dp_dsc_aux_ref_count(state, connector, false) == 0; 3658 } 3659 3660 /** 3661 * intel_dp_sink_enable_decompression - Enable DSC decompression in sink/last branch device 3662 * @state: atomic state 3663 * @connector: connector to enable the decompression for 3664 * @new_crtc_state: new state for the CRTC driving @connector 3665 * 3666 * Enable the DSC decompression if required in the %DP_DSC_ENABLE DPCD 3667 * register of the appropriate sink/branch device. On SST this is always the 3668 * sink device, whereas on MST based on each device's DSC capabilities it's 3669 * either the last branch device (enabling decompression in it) or both the 3670 * last branch device (enabling passthrough in it) and the sink device 3671 * (enabling decompression in it). 3672 */ 3673 void intel_dp_sink_enable_decompression(struct intel_atomic_state *state, 3674 struct intel_connector *connector, 3675 const struct intel_crtc_state *new_crtc_state) 3676 { 3677 struct intel_display *display = to_intel_display(state); 3678 3679 if (!new_crtc_state->dsc.compression_enable) 3680 return; 3681 3682 if (drm_WARN_ON(display->drm, 3683 !connector->dp.dsc_decompression_aux || 3684 connector->dp.dsc_decompression_enabled)) 3685 return; 3686 3687 if (!intel_dp_dsc_aux_get_ref(state, connector)) 3688 return; 3689 3690 intel_dp_sink_set_dsc_passthrough(connector, true); 3691 intel_dp_sink_set_dsc_decompression(connector, true); 3692 } 3693 3694 /** 3695 * intel_dp_sink_disable_decompression - Disable DSC decompression in sink/last branch device 3696 * @state: atomic state 3697 * @connector: connector to disable the decompression for 3698 * @old_crtc_state: old state for the CRTC driving @connector 3699 * 3700 * Disable the DSC decompression if required in the %DP_DSC_ENABLE DPCD 3701 * register of the appropriate sink/branch device, corresponding to the 3702 * sequence in intel_dp_sink_enable_decompression(). 3703 */ 3704 void intel_dp_sink_disable_decompression(struct intel_atomic_state *state, 3705 struct intel_connector *connector, 3706 const struct intel_crtc_state *old_crtc_state) 3707 { 3708 struct intel_display *display = to_intel_display(state); 3709 3710 if (!old_crtc_state->dsc.compression_enable) 3711 return; 3712 3713 if (drm_WARN_ON(display->drm, 3714 !connector->dp.dsc_decompression_aux || 3715 !connector->dp.dsc_decompression_enabled)) 3716 return; 3717 3718 if (!intel_dp_dsc_aux_put_ref(state, connector)) 3719 return; 3720 3721 intel_dp_sink_set_dsc_decompression(connector, false); 3722 intel_dp_sink_set_dsc_passthrough(connector, false); 3723 } 3724 3725 static void 3726 intel_dp_init_source_oui(struct intel_dp *intel_dp) 3727 { 3728 struct intel_display *display = to_intel_display(intel_dp); 3729 u8 oui[] = { 0x00, 0xaa, 0x01 }; 3730 u8 buf[3] = {}; 3731 3732 if (READ_ONCE(intel_dp->oui_valid)) 3733 return; 3734 3735 WRITE_ONCE(intel_dp->oui_valid, true); 3736 3737 /* 3738 * During driver init, we want to be careful and avoid changing the source OUI if it's 3739 * already set to what we want, so as to avoid clearing any state by accident 3740 */ 3741 if (drm_dp_dpcd_read(&intel_dp->aux, DP_SOURCE_OUI, buf, sizeof(buf)) < 0) 3742 drm_dbg_kms(display->drm, "Failed to read source OUI\n"); 3743 3744 if (memcmp(oui, buf, sizeof(oui)) == 0) { 3745 /* Assume the OUI was written now. */ 3746 intel_dp->last_oui_write = jiffies; 3747 return; 3748 } 3749 3750 if (drm_dp_dpcd_write(&intel_dp->aux, DP_SOURCE_OUI, oui, sizeof(oui)) < 0) { 3751 drm_dbg_kms(display->drm, "Failed to write source OUI\n"); 3752 WRITE_ONCE(intel_dp->oui_valid, false); 3753 } 3754 3755 intel_dp->last_oui_write = jiffies; 3756 } 3757 3758 void intel_dp_invalidate_source_oui(struct intel_dp *intel_dp) 3759 { 3760 WRITE_ONCE(intel_dp->oui_valid, false); 3761 } 3762 3763 void intel_dp_wait_source_oui(struct intel_dp *intel_dp) 3764 { 3765 struct intel_display *display = to_intel_display(intel_dp); 3766 struct intel_connector *connector = intel_dp->attached_connector; 3767 3768 drm_dbg_kms(display->drm, 3769 "[CONNECTOR:%d:%s] Performing OUI wait (%u ms)\n", 3770 connector->base.base.id, connector->base.name, 3771 connector->panel.vbt.backlight.hdr_dpcd_refresh_timeout); 3772 3773 wait_remaining_ms_from_jiffies(intel_dp->last_oui_write, 3774 connector->panel.vbt.backlight.hdr_dpcd_refresh_timeout); 3775 } 3776 3777 /* If the device supports it, try to set the power state appropriately */ 3778 void intel_dp_set_power(struct intel_dp *intel_dp, u8 mode) 3779 { 3780 struct intel_display *display = to_intel_display(intel_dp); 3781 struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base; 3782 int ret, i; 3783 3784 /* Should have a valid DPCD by this point */ 3785 if (intel_dp->dpcd[DP_DPCD_REV] < 0x11) 3786 return; 3787 3788 if (mode != DP_SET_POWER_D0) { 3789 if (downstream_hpd_needs_d0(intel_dp)) 3790 return; 3791 3792 ret = drm_dp_dpcd_writeb(&intel_dp->aux, DP_SET_POWER, mode); 3793 } else { 3794 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp); 3795 3796 intel_lspcon_resume(dig_port); 3797 3798 /* Write the source OUI as early as possible */ 3799 intel_dp_init_source_oui(intel_dp); 3800 3801 /* 3802 * When turning on, we need to retry for 1ms to give the sink 3803 * time to wake up. 3804 */ 3805 for (i = 0; i < 3; i++) { 3806 ret = drm_dp_dpcd_writeb(&intel_dp->aux, DP_SET_POWER, mode); 3807 if (ret == 1) 3808 break; 3809 msleep(1); 3810 } 3811 3812 if (ret == 1 && intel_lspcon_active(dig_port)) 3813 intel_lspcon_wait_pcon_mode(dig_port); 3814 } 3815 3816 if (ret != 1) 3817 drm_dbg_kms(display->drm, 3818 "[ENCODER:%d:%s] Set power to %s failed\n", 3819 encoder->base.base.id, encoder->base.name, 3820 mode == DP_SET_POWER_D0 ? "D0" : "D3"); 3821 } 3822 3823 static bool 3824 intel_dp_get_dpcd(struct intel_dp *intel_dp); 3825 3826 /** 3827 * intel_dp_sync_state - sync the encoder state during init/resume 3828 * @encoder: intel encoder to sync 3829 * @crtc_state: state for the CRTC connected to the encoder 3830 * 3831 * Sync any state stored in the encoder wrt. HW state during driver init 3832 * and system resume. 3833 */ 3834 void intel_dp_sync_state(struct intel_encoder *encoder, 3835 const struct intel_crtc_state *crtc_state) 3836 { 3837 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 3838 bool dpcd_updated = false; 3839 3840 /* 3841 * Don't clobber DPCD if it's been already read out during output 3842 * setup (eDP) or detect. 3843 */ 3844 if (crtc_state && intel_dp->dpcd[DP_DPCD_REV] == 0) { 3845 intel_dp_get_dpcd(intel_dp); 3846 dpcd_updated = true; 3847 } 3848 3849 intel_dp_tunnel_resume(intel_dp, crtc_state, dpcd_updated); 3850 3851 if (crtc_state) { 3852 intel_dp_reset_link_params(intel_dp); 3853 intel_dp_set_link_params(intel_dp, crtc_state->port_clock, crtc_state->lane_count); 3854 intel_dp->link.active = true; 3855 } 3856 } 3857 3858 bool intel_dp_initial_fastset_check(struct intel_encoder *encoder, 3859 struct intel_crtc_state *crtc_state) 3860 { 3861 struct intel_display *display = to_intel_display(encoder); 3862 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 3863 bool fastset = true; 3864 3865 /* 3866 * If BIOS has set an unsupported or non-standard link rate for some 3867 * reason force an encoder recompute and full modeset. 3868 */ 3869 if (intel_dp_rate_index(intel_dp->source_rates, intel_dp->num_source_rates, 3870 crtc_state->port_clock) < 0) { 3871 drm_dbg_kms(display->drm, 3872 "[ENCODER:%d:%s] Forcing full modeset due to unsupported link rate\n", 3873 encoder->base.base.id, encoder->base.name); 3874 crtc_state->uapi.connectors_changed = true; 3875 fastset = false; 3876 } 3877 3878 /* 3879 * FIXME hack to force full modeset when DSC is being used. 3880 * 3881 * As long as we do not have full state readout and config comparison 3882 * of crtc_state->dsc, we have no way to ensure reliable fastset. 3883 * Remove once we have readout for DSC. 3884 */ 3885 if (crtc_state->dsc.compression_enable) { 3886 drm_dbg_kms(display->drm, 3887 "[ENCODER:%d:%s] Forcing full modeset due to DSC being enabled\n", 3888 encoder->base.base.id, encoder->base.name); 3889 crtc_state->uapi.mode_changed = true; 3890 fastset = false; 3891 } 3892 3893 if (CAN_PANEL_REPLAY(intel_dp)) { 3894 drm_dbg_kms(display->drm, 3895 "[ENCODER:%d:%s] Forcing full modeset to compute panel replay state\n", 3896 encoder->base.base.id, encoder->base.name); 3897 crtc_state->uapi.mode_changed = true; 3898 fastset = false; 3899 } 3900 3901 return fastset; 3902 } 3903 3904 static void intel_dp_get_pcon_dsc_cap(struct intel_dp *intel_dp) 3905 { 3906 struct intel_display *display = to_intel_display(intel_dp); 3907 3908 /* Clear the cached register set to avoid using stale values */ 3909 3910 memset(intel_dp->pcon_dsc_dpcd, 0, sizeof(intel_dp->pcon_dsc_dpcd)); 3911 3912 if (!drm_dp_is_branch(intel_dp->dpcd)) 3913 return; 3914 3915 if (drm_dp_dpcd_read(&intel_dp->aux, DP_PCON_DSC_ENCODER, 3916 intel_dp->pcon_dsc_dpcd, 3917 sizeof(intel_dp->pcon_dsc_dpcd)) < 0) 3918 drm_err(display->drm, "Failed to read DPCD register 0x%x\n", 3919 DP_PCON_DSC_ENCODER); 3920 3921 drm_dbg_kms(display->drm, "PCON ENCODER DSC DPCD: %*ph\n", 3922 (int)sizeof(intel_dp->pcon_dsc_dpcd), intel_dp->pcon_dsc_dpcd); 3923 } 3924 3925 static int intel_dp_pcon_get_frl_mask(u8 frl_bw_mask) 3926 { 3927 static const int bw_gbps[] = {9, 18, 24, 32, 40, 48}; 3928 int i; 3929 3930 for (i = ARRAY_SIZE(bw_gbps) - 1; i >= 0; i--) { 3931 if (frl_bw_mask & (1 << i)) 3932 return bw_gbps[i]; 3933 } 3934 return 0; 3935 } 3936 3937 static int intel_dp_pcon_set_frl_mask(int max_frl) 3938 { 3939 switch (max_frl) { 3940 case 48: 3941 return DP_PCON_FRL_BW_MASK_48GBPS; 3942 case 40: 3943 return DP_PCON_FRL_BW_MASK_40GBPS; 3944 case 32: 3945 return DP_PCON_FRL_BW_MASK_32GBPS; 3946 case 24: 3947 return DP_PCON_FRL_BW_MASK_24GBPS; 3948 case 18: 3949 return DP_PCON_FRL_BW_MASK_18GBPS; 3950 case 9: 3951 return DP_PCON_FRL_BW_MASK_9GBPS; 3952 } 3953 3954 return 0; 3955 } 3956 3957 static int intel_dp_hdmi_sink_max_frl(struct intel_dp *intel_dp) 3958 { 3959 struct intel_connector *connector = intel_dp->attached_connector; 3960 const struct drm_display_info *info = &connector->base.display_info; 3961 int max_frl_rate; 3962 int max_lanes, rate_per_lane; 3963 int max_dsc_lanes, dsc_rate_per_lane; 3964 3965 max_lanes = info->hdmi.max_lanes; 3966 rate_per_lane = info->hdmi.max_frl_rate_per_lane; 3967 max_frl_rate = max_lanes * rate_per_lane; 3968 3969 if (info->hdmi.dsc_cap.v_1p2) { 3970 max_dsc_lanes = info->hdmi.dsc_cap.max_lanes; 3971 dsc_rate_per_lane = info->hdmi.dsc_cap.max_frl_rate_per_lane; 3972 if (max_dsc_lanes && dsc_rate_per_lane) 3973 max_frl_rate = min(max_frl_rate, max_dsc_lanes * dsc_rate_per_lane); 3974 } 3975 3976 return max_frl_rate; 3977 } 3978 3979 static bool 3980 intel_dp_pcon_is_frl_trained(struct intel_dp *intel_dp, 3981 u8 max_frl_bw_mask, u8 *frl_trained_mask) 3982 { 3983 if (drm_dp_pcon_hdmi_link_active(&intel_dp->aux) && 3984 drm_dp_pcon_hdmi_link_mode(&intel_dp->aux, frl_trained_mask) == DP_PCON_HDMI_MODE_FRL && 3985 *frl_trained_mask >= max_frl_bw_mask) 3986 return true; 3987 3988 return false; 3989 } 3990 3991 static int intel_dp_pcon_start_frl_training(struct intel_dp *intel_dp) 3992 { 3993 struct intel_display *display = to_intel_display(intel_dp); 3994 #define TIMEOUT_FRL_READY_MS 500 3995 #define TIMEOUT_HDMI_LINK_ACTIVE_MS 1000 3996 int max_frl_bw, max_pcon_frl_bw, max_edid_frl_bw, ret; 3997 u8 max_frl_bw_mask = 0, frl_trained_mask; 3998 bool is_active; 3999 4000 max_pcon_frl_bw = intel_dp->dfp.pcon_max_frl_bw; 4001 drm_dbg(display->drm, "PCON max rate = %d Gbps\n", max_pcon_frl_bw); 4002 4003 max_edid_frl_bw = intel_dp_hdmi_sink_max_frl(intel_dp); 4004 drm_dbg(display->drm, "Sink max rate from EDID = %d Gbps\n", 4005 max_edid_frl_bw); 4006 4007 max_frl_bw = min(max_edid_frl_bw, max_pcon_frl_bw); 4008 4009 if (max_frl_bw <= 0) 4010 return -EINVAL; 4011 4012 max_frl_bw_mask = intel_dp_pcon_set_frl_mask(max_frl_bw); 4013 drm_dbg(display->drm, "MAX_FRL_BW_MASK = %u\n", max_frl_bw_mask); 4014 4015 if (intel_dp_pcon_is_frl_trained(intel_dp, max_frl_bw_mask, &frl_trained_mask)) 4016 goto frl_trained; 4017 4018 ret = drm_dp_pcon_frl_prepare(&intel_dp->aux, false); 4019 if (ret < 0) 4020 return ret; 4021 /* Wait for PCON to be FRL Ready */ 4022 ret = poll_timeout_us(is_active = drm_dp_pcon_is_frl_ready(&intel_dp->aux), 4023 is_active, 4024 1000, TIMEOUT_FRL_READY_MS * 1000, false); 4025 if (ret) 4026 return ret; 4027 4028 ret = drm_dp_pcon_frl_configure_1(&intel_dp->aux, max_frl_bw, 4029 DP_PCON_ENABLE_SEQUENTIAL_LINK); 4030 if (ret < 0) 4031 return ret; 4032 ret = drm_dp_pcon_frl_configure_2(&intel_dp->aux, max_frl_bw_mask, 4033 DP_PCON_FRL_LINK_TRAIN_NORMAL); 4034 if (ret < 0) 4035 return ret; 4036 ret = drm_dp_pcon_frl_enable(&intel_dp->aux); 4037 if (ret < 0) 4038 return ret; 4039 /* 4040 * Wait for FRL to be completed 4041 * Check if the HDMI Link is up and active. 4042 */ 4043 ret = poll_timeout_us(is_active = intel_dp_pcon_is_frl_trained(intel_dp, max_frl_bw_mask, &frl_trained_mask), 4044 is_active, 4045 1000, TIMEOUT_HDMI_LINK_ACTIVE_MS * 1000, false); 4046 if (ret) 4047 return ret; 4048 4049 frl_trained: 4050 drm_dbg(display->drm, "FRL_TRAINED_MASK = %u\n", frl_trained_mask); 4051 intel_dp->frl.trained_rate_gbps = intel_dp_pcon_get_frl_mask(frl_trained_mask); 4052 intel_dp->frl.is_trained = true; 4053 drm_dbg(display->drm, "FRL trained with : %d Gbps\n", 4054 intel_dp->frl.trained_rate_gbps); 4055 4056 return 0; 4057 } 4058 4059 static bool intel_dp_is_hdmi_2_1_sink(struct intel_dp *intel_dp) 4060 { 4061 if (drm_dp_is_branch(intel_dp->dpcd) && 4062 intel_dp_has_hdmi_sink(intel_dp) && 4063 intel_dp_hdmi_sink_max_frl(intel_dp) > 0) 4064 return true; 4065 4066 return false; 4067 } 4068 4069 static 4070 int intel_dp_pcon_set_tmds_mode(struct intel_dp *intel_dp) 4071 { 4072 int ret; 4073 u8 buf = 0; 4074 4075 /* Set PCON source control mode */ 4076 buf |= DP_PCON_ENABLE_SOURCE_CTL_MODE; 4077 4078 ret = drm_dp_dpcd_writeb(&intel_dp->aux, DP_PCON_HDMI_LINK_CONFIG_1, buf); 4079 if (ret < 0) 4080 return ret; 4081 4082 /* Set HDMI LINK ENABLE */ 4083 buf |= DP_PCON_ENABLE_HDMI_LINK; 4084 ret = drm_dp_dpcd_writeb(&intel_dp->aux, DP_PCON_HDMI_LINK_CONFIG_1, buf); 4085 if (ret < 0) 4086 return ret; 4087 4088 return 0; 4089 } 4090 4091 void intel_dp_check_frl_training(struct intel_dp *intel_dp) 4092 { 4093 struct intel_display *display = to_intel_display(intel_dp); 4094 4095 /* 4096 * Always go for FRL training if: 4097 * -PCON supports SRC_CTL_MODE (VESA DP2.0-HDMI2.1 PCON Spec Draft-1 Sec-7) 4098 * -sink is HDMI2.1 4099 */ 4100 if (!(intel_dp->downstream_ports[2] & DP_PCON_SOURCE_CTL_MODE) || 4101 !intel_dp_is_hdmi_2_1_sink(intel_dp) || 4102 intel_dp->frl.is_trained) 4103 return; 4104 4105 if (intel_dp_pcon_start_frl_training(intel_dp) < 0) { 4106 int ret, mode; 4107 4108 drm_dbg(display->drm, 4109 "Couldn't set FRL mode, continuing with TMDS mode\n"); 4110 ret = intel_dp_pcon_set_tmds_mode(intel_dp); 4111 mode = drm_dp_pcon_hdmi_link_mode(&intel_dp->aux, NULL); 4112 4113 if (ret < 0 || mode != DP_PCON_HDMI_MODE_TMDS) 4114 drm_dbg(display->drm, 4115 "Issue with PCON, cannot set TMDS mode\n"); 4116 } else { 4117 drm_dbg(display->drm, "FRL training Completed\n"); 4118 } 4119 } 4120 4121 static int 4122 intel_dp_pcon_dsc_enc_slice_height(const struct intel_crtc_state *crtc_state) 4123 { 4124 int vactive = crtc_state->hw.adjusted_mode.vdisplay; 4125 4126 return intel_hdmi_dsc_get_slice_height(vactive); 4127 } 4128 4129 static int 4130 intel_dp_pcon_dsc_enc_slices(struct intel_dp *intel_dp, 4131 const struct intel_crtc_state *crtc_state) 4132 { 4133 struct intel_connector *connector = intel_dp->attached_connector; 4134 const struct drm_display_info *info = &connector->base.display_info; 4135 int hdmi_throughput = info->hdmi.dsc_cap.clk_per_slice; 4136 int hdmi_max_slices = info->hdmi.dsc_cap.max_slices; 4137 int pcon_max_slices = drm_dp_pcon_dsc_max_slices(intel_dp->pcon_dsc_dpcd); 4138 int pcon_max_slice_width = drm_dp_pcon_dsc_max_slice_width(intel_dp->pcon_dsc_dpcd); 4139 4140 return intel_hdmi_dsc_get_num_slices(crtc_state, pcon_max_slices, 4141 pcon_max_slice_width, 4142 hdmi_max_slices, hdmi_throughput); 4143 } 4144 4145 static int 4146 intel_dp_pcon_dsc_enc_bpp(struct intel_dp *intel_dp, 4147 const struct intel_crtc_state *crtc_state, 4148 int num_slices, int slice_width) 4149 { 4150 struct intel_connector *connector = intel_dp->attached_connector; 4151 const struct drm_display_info *info = &connector->base.display_info; 4152 int output_format = crtc_state->output_format; 4153 bool hdmi_all_bpp = info->hdmi.dsc_cap.all_bpp; 4154 int pcon_fractional_bpp = drm_dp_pcon_dsc_bpp_incr(intel_dp->pcon_dsc_dpcd); 4155 int hdmi_max_chunk_bytes = 4156 info->hdmi.dsc_cap.total_chunk_kbytes * 1024; 4157 4158 return intel_hdmi_dsc_get_bpp(pcon_fractional_bpp, slice_width, 4159 num_slices, output_format, hdmi_all_bpp, 4160 hdmi_max_chunk_bytes); 4161 } 4162 4163 void 4164 intel_dp_pcon_dsc_configure(struct intel_dp *intel_dp, 4165 const struct intel_crtc_state *crtc_state) 4166 { 4167 struct intel_display *display = to_intel_display(intel_dp); 4168 struct intel_connector *connector = intel_dp->attached_connector; 4169 const struct drm_display_info *info; 4170 u8 pps_param[6]; 4171 int slice_height; 4172 int slice_width; 4173 int num_slices; 4174 int bits_per_pixel; 4175 int ret; 4176 bool hdmi_is_dsc_1_2; 4177 4178 if (!intel_dp_is_hdmi_2_1_sink(intel_dp)) 4179 return; 4180 4181 if (!connector) 4182 return; 4183 4184 info = &connector->base.display_info; 4185 4186 hdmi_is_dsc_1_2 = info->hdmi.dsc_cap.v_1p2; 4187 4188 if (!drm_dp_pcon_enc_is_dsc_1_2(intel_dp->pcon_dsc_dpcd) || 4189 !hdmi_is_dsc_1_2) 4190 return; 4191 4192 slice_height = intel_dp_pcon_dsc_enc_slice_height(crtc_state); 4193 if (!slice_height) 4194 return; 4195 4196 num_slices = intel_dp_pcon_dsc_enc_slices(intel_dp, crtc_state); 4197 if (!num_slices) 4198 return; 4199 4200 slice_width = DIV_ROUND_UP(crtc_state->hw.adjusted_mode.hdisplay, 4201 num_slices); 4202 4203 bits_per_pixel = intel_dp_pcon_dsc_enc_bpp(intel_dp, crtc_state, 4204 num_slices, slice_width); 4205 if (!bits_per_pixel) 4206 return; 4207 4208 pps_param[0] = slice_height & 0xFF; 4209 pps_param[1] = slice_height >> 8; 4210 pps_param[2] = slice_width & 0xFF; 4211 pps_param[3] = slice_width >> 8; 4212 pps_param[4] = bits_per_pixel & 0xFF; 4213 pps_param[5] = (bits_per_pixel >> 8) & 0x3; 4214 4215 ret = drm_dp_pcon_pps_override_param(&intel_dp->aux, pps_param); 4216 if (ret < 0) 4217 drm_dbg_kms(display->drm, "Failed to set pcon DSC\n"); 4218 } 4219 4220 void intel_dp_configure_protocol_converter(struct intel_dp *intel_dp, 4221 const struct intel_crtc_state *crtc_state) 4222 { 4223 struct intel_display *display = to_intel_display(intel_dp); 4224 bool ycbcr444_to_420 = false; 4225 bool rgb_to_ycbcr = false; 4226 u8 tmp; 4227 4228 if (intel_dp->dpcd[DP_DPCD_REV] < 0x13) 4229 return; 4230 4231 if (!drm_dp_is_branch(intel_dp->dpcd)) 4232 return; 4233 4234 tmp = intel_dp_has_hdmi_sink(intel_dp) ? DP_HDMI_DVI_OUTPUT_CONFIG : 0; 4235 4236 if (drm_dp_dpcd_writeb(&intel_dp->aux, 4237 DP_PROTOCOL_CONVERTER_CONTROL_0, tmp) != 1) 4238 drm_dbg_kms(display->drm, 4239 "Failed to %s protocol converter HDMI mode\n", 4240 str_enable_disable(intel_dp_has_hdmi_sink(intel_dp))); 4241 4242 if (crtc_state->sink_format == INTEL_OUTPUT_FORMAT_YCBCR420) { 4243 switch (crtc_state->output_format) { 4244 case INTEL_OUTPUT_FORMAT_YCBCR420: 4245 break; 4246 case INTEL_OUTPUT_FORMAT_YCBCR444: 4247 ycbcr444_to_420 = true; 4248 break; 4249 case INTEL_OUTPUT_FORMAT_RGB: 4250 rgb_to_ycbcr = true; 4251 ycbcr444_to_420 = true; 4252 break; 4253 default: 4254 MISSING_CASE(crtc_state->output_format); 4255 break; 4256 } 4257 } else if (crtc_state->sink_format == INTEL_OUTPUT_FORMAT_YCBCR444) { 4258 switch (crtc_state->output_format) { 4259 case INTEL_OUTPUT_FORMAT_YCBCR444: 4260 break; 4261 case INTEL_OUTPUT_FORMAT_RGB: 4262 rgb_to_ycbcr = true; 4263 break; 4264 default: 4265 MISSING_CASE(crtc_state->output_format); 4266 break; 4267 } 4268 } 4269 4270 tmp = ycbcr444_to_420 ? DP_CONVERSION_TO_YCBCR420_ENABLE : 0; 4271 4272 if (drm_dp_dpcd_writeb(&intel_dp->aux, 4273 DP_PROTOCOL_CONVERTER_CONTROL_1, tmp) != 1) 4274 drm_dbg_kms(display->drm, 4275 "Failed to %s protocol converter YCbCr 4:2:0 conversion mode\n", 4276 str_enable_disable(intel_dp->dfp.ycbcr_444_to_420)); 4277 4278 tmp = rgb_to_ycbcr ? DP_CONVERSION_BT709_RGB_YCBCR_ENABLE : 0; 4279 4280 if (drm_dp_pcon_convert_rgb_to_ycbcr(&intel_dp->aux, tmp) < 0) 4281 drm_dbg_kms(display->drm, 4282 "Failed to %s protocol converter RGB->YCbCr conversion mode\n", 4283 str_enable_disable(tmp)); 4284 } 4285 4286 static bool intel_dp_get_colorimetry_status(struct intel_dp *intel_dp) 4287 { 4288 u8 dprx = 0; 4289 4290 if (drm_dp_dpcd_readb(&intel_dp->aux, DP_DPRX_FEATURE_ENUMERATION_LIST, 4291 &dprx) != 1) 4292 return false; 4293 return dprx & DP_VSC_SDP_EXT_FOR_COLORIMETRY_SUPPORTED; 4294 } 4295 4296 static void intel_dp_read_dsc_dpcd(struct drm_dp_aux *aux, 4297 u8 dsc_dpcd[DP_DSC_RECEIVER_CAP_SIZE]) 4298 { 4299 if (drm_dp_dpcd_read(aux, DP_DSC_SUPPORT, dsc_dpcd, 4300 DP_DSC_RECEIVER_CAP_SIZE) < 0) { 4301 drm_err(aux->drm_dev, 4302 "Failed to read DPCD register 0x%x\n", 4303 DP_DSC_SUPPORT); 4304 return; 4305 } 4306 4307 drm_dbg_kms(aux->drm_dev, "DSC DPCD: %*ph\n", 4308 DP_DSC_RECEIVER_CAP_SIZE, 4309 dsc_dpcd); 4310 } 4311 4312 static void init_dsc_overall_throughput_limits(struct intel_connector *connector, bool is_branch) 4313 { 4314 u8 branch_caps[DP_DSC_BRANCH_CAP_SIZE]; 4315 int line_width; 4316 4317 connector->dp.dsc_branch_caps.overall_throughput.rgb_yuv444 = INT_MAX; 4318 connector->dp.dsc_branch_caps.overall_throughput.yuv422_420 = INT_MAX; 4319 connector->dp.dsc_branch_caps.max_line_width = INT_MAX; 4320 4321 if (!is_branch) 4322 return; 4323 4324 if (drm_dp_dpcd_read_data(connector->dp.dsc_decompression_aux, 4325 DP_DSC_BRANCH_OVERALL_THROUGHPUT_0, branch_caps, 4326 sizeof(branch_caps)) != 0) 4327 return; 4328 4329 connector->dp.dsc_branch_caps.overall_throughput.rgb_yuv444 = 4330 drm_dp_dsc_branch_max_overall_throughput(branch_caps, true) ? : INT_MAX; 4331 4332 connector->dp.dsc_branch_caps.overall_throughput.yuv422_420 = 4333 drm_dp_dsc_branch_max_overall_throughput(branch_caps, false) ? : INT_MAX; 4334 4335 line_width = drm_dp_dsc_branch_max_line_width(branch_caps); 4336 connector->dp.dsc_branch_caps.max_line_width = line_width > 0 ? line_width : INT_MAX; 4337 } 4338 4339 void intel_dp_get_dsc_sink_cap(u8 dpcd_rev, 4340 const struct drm_dp_desc *desc, bool is_branch, 4341 struct intel_connector *connector) 4342 { 4343 struct intel_display *display = to_intel_display(connector); 4344 4345 /* 4346 * Clear the cached register set to avoid using stale values 4347 * for the sinks that do not support DSC. 4348 */ 4349 memset(connector->dp.dsc_dpcd, 0, sizeof(connector->dp.dsc_dpcd)); 4350 4351 /* Clear fec_capable to avoid using stale values */ 4352 connector->dp.fec_capability = 0; 4353 4354 memset(&connector->dp.dsc_branch_caps, 0, sizeof(connector->dp.dsc_branch_caps)); 4355 connector->dp.dsc_throughput_quirk = false; 4356 4357 if (dpcd_rev < DP_DPCD_REV_14) 4358 return; 4359 4360 intel_dp_read_dsc_dpcd(connector->dp.dsc_decompression_aux, 4361 connector->dp.dsc_dpcd); 4362 4363 if (drm_dp_dpcd_readb(connector->dp.dsc_decompression_aux, DP_FEC_CAPABILITY, 4364 &connector->dp.fec_capability) < 0) { 4365 drm_err(display->drm, "Failed to read FEC DPCD register\n"); 4366 return; 4367 } 4368 4369 drm_dbg_kms(display->drm, "FEC CAPABILITY: %x\n", 4370 connector->dp.fec_capability); 4371 4372 if (!(connector->dp.dsc_dpcd[0] & DP_DSC_DECOMPRESSION_IS_SUPPORTED)) 4373 return; 4374 4375 init_dsc_overall_throughput_limits(connector, is_branch); 4376 4377 /* 4378 * TODO: Move the HW rev check as well to the DRM core quirk table if 4379 * that's required after clarifying the list of affected devices. 4380 */ 4381 if (drm_dp_has_quirk(desc, DP_DPCD_QUIRK_DSC_THROUGHPUT_BPP_LIMIT) && 4382 desc->ident.hw_rev == 0x10) 4383 connector->dp.dsc_throughput_quirk = true; 4384 } 4385 4386 static void intel_edp_get_dsc_sink_cap(u8 edp_dpcd_rev, struct intel_connector *connector) 4387 { 4388 if (edp_dpcd_rev < DP_EDP_14) 4389 return; 4390 4391 intel_dp_read_dsc_dpcd(connector->dp.dsc_decompression_aux, connector->dp.dsc_dpcd); 4392 4393 if (connector->dp.dsc_dpcd[0] & DP_DSC_DECOMPRESSION_IS_SUPPORTED) 4394 init_dsc_overall_throughput_limits(connector, false); 4395 } 4396 4397 static void 4398 intel_dp_detect_dsc_caps(struct intel_dp *intel_dp, struct intel_connector *connector) 4399 { 4400 struct intel_display *display = to_intel_display(intel_dp); 4401 4402 /* Read DP Sink DSC Cap DPCD regs for DP v1.4 */ 4403 if (!HAS_DSC(display)) 4404 return; 4405 4406 if (intel_dp_is_edp(intel_dp)) 4407 intel_edp_get_dsc_sink_cap(intel_dp->edp_dpcd[0], 4408 connector); 4409 else 4410 intel_dp_get_dsc_sink_cap(intel_dp->dpcd[DP_DPCD_REV], 4411 &intel_dp->desc, drm_dp_is_branch(intel_dp->dpcd), 4412 connector); 4413 } 4414 4415 static void intel_edp_mso_mode_fixup(struct intel_connector *connector, 4416 struct drm_display_mode *mode) 4417 { 4418 struct intel_display *display = to_intel_display(connector); 4419 struct intel_dp *intel_dp = intel_attached_dp(connector); 4420 int n = intel_dp->mso_link_count; 4421 int overlap = intel_dp->mso_pixel_overlap; 4422 4423 if (!mode || !n) 4424 return; 4425 4426 mode->hdisplay = (mode->hdisplay - overlap) * n; 4427 mode->hsync_start = (mode->hsync_start - overlap) * n; 4428 mode->hsync_end = (mode->hsync_end - overlap) * n; 4429 mode->htotal = (mode->htotal - overlap) * n; 4430 mode->clock *= n; 4431 4432 drm_mode_set_name(mode); 4433 4434 drm_dbg_kms(display->drm, 4435 "[CONNECTOR:%d:%s] using generated MSO mode: " DRM_MODE_FMT "\n", 4436 connector->base.base.id, connector->base.name, 4437 DRM_MODE_ARG(mode)); 4438 } 4439 4440 void intel_edp_fixup_vbt_bpp(struct intel_encoder *encoder, int pipe_bpp) 4441 { 4442 struct intel_display *display = to_intel_display(encoder); 4443 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 4444 struct intel_connector *connector = intel_dp->attached_connector; 4445 4446 if (connector->panel.vbt.edp.bpp && pipe_bpp > connector->panel.vbt.edp.bpp) { 4447 /* 4448 * This is a big fat ugly hack. 4449 * 4450 * Some machines in UEFI boot mode provide us a VBT that has 18 4451 * bpp and 1.62 GHz link bandwidth for eDP, which for reasons 4452 * unknown we fail to light up. Yet the same BIOS boots up with 4453 * 24 bpp and 2.7 GHz link. Use the same bpp as the BIOS uses as 4454 * max, not what it tells us to use. 4455 * 4456 * Note: This will still be broken if the eDP panel is not lit 4457 * up by the BIOS, and thus we can't get the mode at module 4458 * load. 4459 */ 4460 drm_dbg_kms(display->drm, 4461 "pipe has %d bpp for eDP panel, overriding BIOS-provided max %d bpp\n", 4462 pipe_bpp, connector->panel.vbt.edp.bpp); 4463 connector->panel.vbt.edp.bpp = pipe_bpp; 4464 } 4465 } 4466 4467 static void intel_edp_mso_init(struct intel_dp *intel_dp) 4468 { 4469 struct intel_display *display = to_intel_display(intel_dp); 4470 struct intel_connector *connector = intel_dp->attached_connector; 4471 struct drm_display_info *info = &connector->base.display_info; 4472 u8 mso; 4473 4474 if (intel_dp->edp_dpcd[0] < DP_EDP_14) 4475 return; 4476 4477 if (drm_dp_dpcd_readb(&intel_dp->aux, DP_EDP_MSO_LINK_CAPABILITIES, &mso) != 1) { 4478 drm_err(display->drm, "Failed to read MSO cap\n"); 4479 return; 4480 } 4481 4482 /* Valid configurations are SST or MSO 2x1, 2x2, 4x1 */ 4483 mso &= DP_EDP_MSO_NUMBER_OF_LINKS_MASK; 4484 if (mso % 2 || mso > drm_dp_max_lane_count(intel_dp->dpcd)) { 4485 drm_err(display->drm, "Invalid MSO link count cap %u\n", mso); 4486 mso = 0; 4487 } 4488 4489 if (mso) { 4490 drm_dbg_kms(display->drm, 4491 "Sink MSO %ux%u configuration, pixel overlap %u\n", 4492 mso, drm_dp_max_lane_count(intel_dp->dpcd) / mso, 4493 info->mso_pixel_overlap); 4494 if (!HAS_MSO(display)) { 4495 drm_err(display->drm, 4496 "No source MSO support, disabling\n"); 4497 mso = 0; 4498 } 4499 } 4500 4501 intel_dp->mso_link_count = mso; 4502 intel_dp->mso_pixel_overlap = mso ? info->mso_pixel_overlap : 0; 4503 } 4504 4505 static void 4506 intel_edp_set_data_override_rates(struct intel_dp *intel_dp) 4507 { 4508 struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base; 4509 int *sink_rates = intel_dp->sink_rates; 4510 int i, count = 0; 4511 4512 for (i = 0; i < intel_dp->num_sink_rates; i++) { 4513 if (intel_bios_encoder_reject_edp_rate(encoder->devdata, 4514 intel_dp->sink_rates[i])) 4515 continue; 4516 4517 sink_rates[count++] = intel_dp->sink_rates[i]; 4518 } 4519 intel_dp->num_sink_rates = count; 4520 } 4521 4522 static void 4523 intel_edp_set_sink_rates(struct intel_dp *intel_dp) 4524 { 4525 struct intel_display *display = to_intel_display(intel_dp); 4526 4527 intel_dp->num_sink_rates = 0; 4528 4529 if (intel_dp->edp_dpcd[0] >= DP_EDP_14) { 4530 __le16 sink_rates[DP_MAX_SUPPORTED_RATES]; 4531 int i; 4532 4533 drm_dp_dpcd_read(&intel_dp->aux, DP_SUPPORTED_LINK_RATES, 4534 sink_rates, sizeof(sink_rates)); 4535 4536 for (i = 0; i < ARRAY_SIZE(sink_rates); i++) { 4537 int rate; 4538 4539 /* Value read multiplied by 200kHz gives the per-lane 4540 * link rate in kHz. The source rates are, however, 4541 * stored in terms of LS_Clk kHz. The full conversion 4542 * back to symbols is 4543 * (val * 200kHz)*(8/10 ch. encoding)*(1/8 bit to Byte) 4544 */ 4545 rate = le16_to_cpu(sink_rates[i]) * 200 / 10; 4546 4547 if (rate == 0) 4548 break; 4549 4550 /* 4551 * Some platforms cannot reliably drive HBR3 rates due to PHY limitations, 4552 * even if the sink advertises support. Reject any sink rates above HBR2 on 4553 * the known machines for stable output. 4554 */ 4555 if (rate > 540000 && 4556 intel_has_quirk(display, QUIRK_EDP_LIMIT_RATE_HBR2)) 4557 break; 4558 4559 intel_dp->sink_rates[i] = rate; 4560 } 4561 intel_dp->num_sink_rates = i; 4562 } 4563 4564 /* 4565 * Use DP_LINK_RATE_SET if DP_SUPPORTED_LINK_RATES are available, 4566 * default to DP_MAX_LINK_RATE and DP_LINK_BW_SET otherwise. 4567 */ 4568 if (intel_dp->num_sink_rates) 4569 intel_dp->use_rate_select = true; 4570 else 4571 intel_dp_set_sink_rates(intel_dp); 4572 4573 intel_edp_set_data_override_rates(intel_dp); 4574 } 4575 4576 static bool 4577 intel_edp_init_dpcd(struct intel_dp *intel_dp, struct intel_connector *connector) 4578 { 4579 struct intel_display *display = to_intel_display(intel_dp); 4580 4581 /* this function is meant to be called only once */ 4582 drm_WARN_ON(display->drm, intel_dp->dpcd[DP_DPCD_REV] != 0); 4583 4584 if (drm_dp_read_dpcd_caps(&intel_dp->aux, intel_dp->dpcd) != 0) 4585 return false; 4586 4587 drm_dp_read_desc(&intel_dp->aux, &intel_dp->desc, 4588 drm_dp_is_branch(intel_dp->dpcd)); 4589 intel_init_dpcd_quirks(intel_dp, &intel_dp->desc.ident); 4590 4591 intel_dp->colorimetry_support = 4592 intel_dp_get_colorimetry_status(intel_dp); 4593 4594 /* 4595 * Read the eDP display control registers. 4596 * 4597 * Do this independent of DP_DPCD_DISPLAY_CONTROL_CAPABLE bit in 4598 * DP_EDP_CONFIGURATION_CAP, because some buggy displays do not have it 4599 * set, but require eDP 1.4+ detection (e.g. for supported link rates 4600 * method). The display control registers should read zero if they're 4601 * not supported anyway. 4602 */ 4603 if (drm_dp_dpcd_read(&intel_dp->aux, DP_EDP_DPCD_REV, 4604 intel_dp->edp_dpcd, sizeof(intel_dp->edp_dpcd)) == 4605 sizeof(intel_dp->edp_dpcd)) { 4606 drm_dbg_kms(display->drm, "eDP DPCD: %*ph\n", 4607 (int)sizeof(intel_dp->edp_dpcd), 4608 intel_dp->edp_dpcd); 4609 4610 intel_dp->use_max_params = intel_dp->edp_dpcd[0] < DP_EDP_14; 4611 } 4612 4613 /* 4614 * If needed, program our source OUI so we can make various Intel-specific AUX services 4615 * available (such as HDR backlight controls) 4616 */ 4617 intel_dp_init_source_oui(intel_dp); 4618 4619 /* 4620 * This has to be called after intel_dp->edp_dpcd is filled, PSR checks 4621 * for SET_POWER_CAPABLE bit in intel_dp->edp_dpcd[1] 4622 */ 4623 intel_psr_init_dpcd(intel_dp, connector); 4624 4625 intel_edp_set_sink_rates(intel_dp); 4626 intel_dp_set_max_sink_lane_count(intel_dp); 4627 4628 /* Read the eDP DSC DPCD registers */ 4629 intel_dp_detect_dsc_caps(intel_dp, connector); 4630 4631 return true; 4632 } 4633 4634 static bool 4635 intel_dp_has_sink_count(struct intel_dp *intel_dp) 4636 { 4637 if (!intel_dp->attached_connector) 4638 return false; 4639 4640 return drm_dp_read_sink_count_cap(&intel_dp->attached_connector->base, 4641 intel_dp->dpcd, 4642 &intel_dp->desc); 4643 } 4644 4645 void intel_dp_update_sink_caps(struct intel_dp *intel_dp) 4646 { 4647 intel_dp_set_sink_rates(intel_dp); 4648 intel_dp_set_max_sink_lane_count(intel_dp); 4649 intel_dp_set_common_rates(intel_dp); 4650 } 4651 4652 static bool 4653 intel_dp_get_dpcd(struct intel_dp *intel_dp) 4654 { 4655 int ret; 4656 4657 if (intel_dp_init_lttpr_and_dprx_caps(intel_dp) < 0) 4658 return false; 4659 4660 /* 4661 * Don't clobber cached eDP rates. Also skip re-reading 4662 * the OUI/ID since we know it won't change. 4663 */ 4664 if (!intel_dp_is_edp(intel_dp)) { 4665 drm_dp_read_desc(&intel_dp->aux, &intel_dp->desc, 4666 drm_dp_is_branch(intel_dp->dpcd)); 4667 4668 intel_init_dpcd_quirks(intel_dp, &intel_dp->desc.ident); 4669 4670 intel_dp->colorimetry_support = 4671 intel_dp_get_colorimetry_status(intel_dp); 4672 4673 intel_dp_update_sink_caps(intel_dp); 4674 } 4675 4676 if (intel_dp_has_sink_count(intel_dp)) { 4677 ret = drm_dp_read_sink_count(&intel_dp->aux); 4678 if (ret < 0) 4679 return false; 4680 4681 /* 4682 * Sink count can change between short pulse hpd hence 4683 * a member variable in intel_dp will track any changes 4684 * between short pulse interrupts. 4685 */ 4686 intel_dp->sink_count = ret; 4687 4688 /* 4689 * SINK_COUNT == 0 and DOWNSTREAM_PORT_PRESENT == 1 implies that 4690 * a dongle is present but no display. Unless we require to know 4691 * if a dongle is present or not, we don't need to update 4692 * downstream port information. So, an early return here saves 4693 * time from performing other operations which are not required. 4694 */ 4695 if (!intel_dp->sink_count) 4696 return false; 4697 } 4698 4699 return drm_dp_read_downstream_info(&intel_dp->aux, intel_dp->dpcd, 4700 intel_dp->downstream_ports) == 0; 4701 } 4702 4703 static const char *intel_dp_mst_mode_str(enum drm_dp_mst_mode mst_mode) 4704 { 4705 if (mst_mode == DRM_DP_MST) 4706 return "MST"; 4707 else if (mst_mode == DRM_DP_SST_SIDEBAND_MSG) 4708 return "SST w/ sideband messaging"; 4709 else 4710 return "SST"; 4711 } 4712 4713 static enum drm_dp_mst_mode 4714 intel_dp_mst_mode_choose(struct intel_dp *intel_dp, 4715 enum drm_dp_mst_mode sink_mst_mode) 4716 { 4717 struct intel_display *display = to_intel_display(intel_dp); 4718 4719 if (!display->params.enable_dp_mst) 4720 return DRM_DP_SST; 4721 4722 if (!intel_dp_mst_source_support(intel_dp)) 4723 return DRM_DP_SST; 4724 4725 if (sink_mst_mode == DRM_DP_SST_SIDEBAND_MSG && 4726 !(intel_dp->dpcd[DP_MAIN_LINK_CHANNEL_CODING] & DP_CAP_ANSI_128B132B)) 4727 return DRM_DP_SST; 4728 4729 return sink_mst_mode; 4730 } 4731 4732 static enum drm_dp_mst_mode 4733 intel_dp_mst_detect(struct intel_dp *intel_dp) 4734 { 4735 struct intel_display *display = to_intel_display(intel_dp); 4736 struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base; 4737 enum drm_dp_mst_mode sink_mst_mode; 4738 enum drm_dp_mst_mode mst_detect; 4739 4740 sink_mst_mode = drm_dp_read_mst_cap(&intel_dp->aux, intel_dp->dpcd); 4741 4742 mst_detect = intel_dp_mst_mode_choose(intel_dp, sink_mst_mode); 4743 4744 drm_dbg_kms(display->drm, 4745 "[ENCODER:%d:%s] MST support: port: %s, sink: %s, modparam: %s -> enable: %s\n", 4746 encoder->base.base.id, encoder->base.name, 4747 str_yes_no(intel_dp_mst_source_support(intel_dp)), 4748 intel_dp_mst_mode_str(sink_mst_mode), 4749 str_yes_no(display->params.enable_dp_mst), 4750 intel_dp_mst_mode_str(mst_detect)); 4751 4752 return mst_detect; 4753 } 4754 4755 static void 4756 intel_dp_mst_configure(struct intel_dp *intel_dp) 4757 { 4758 if (!intel_dp_mst_source_support(intel_dp)) 4759 return; 4760 4761 intel_dp->is_mst = intel_dp->mst_detect != DRM_DP_SST; 4762 4763 if (intel_dp->is_mst) 4764 intel_dp_mst_prepare_probe(intel_dp); 4765 4766 drm_dp_mst_topology_mgr_set_mst(&intel_dp->mst.mgr, intel_dp->is_mst); 4767 4768 /* Avoid stale info on the next detect cycle. */ 4769 intel_dp->mst_detect = DRM_DP_SST; 4770 } 4771 4772 static void 4773 intel_dp_mst_disconnect(struct intel_dp *intel_dp) 4774 { 4775 struct intel_display *display = to_intel_display(intel_dp); 4776 4777 if (!intel_dp->is_mst) 4778 return; 4779 4780 drm_dbg_kms(display->drm, 4781 "MST device may have disappeared %d vs %d\n", 4782 intel_dp->is_mst, intel_dp->mst.mgr.mst_state); 4783 intel_dp->is_mst = false; 4784 drm_dp_mst_topology_mgr_set_mst(&intel_dp->mst.mgr, intel_dp->is_mst); 4785 } 4786 4787 static bool 4788 intel_dp_get_sink_irq_esi(struct intel_dp *intel_dp, u8 *esi) 4789 { 4790 struct intel_display *display = to_intel_display(intel_dp); 4791 4792 /* 4793 * Display WA for HSD #13013007775: mtl/arl/lnl 4794 * Read the sink count and link service IRQ registers in separate 4795 * transactions to prevent disconnecting the sink on a TBT link 4796 * inadvertently. 4797 */ 4798 if (IS_DISPLAY_VER(display, 14, 20) && !display->platform.battlemage) { 4799 if (drm_dp_dpcd_read(&intel_dp->aux, DP_SINK_COUNT_ESI, esi, 3) != 3) 4800 return false; 4801 4802 /* DP_SINK_COUNT_ESI + 3 == DP_LINK_SERVICE_IRQ_VECTOR_ESI0 */ 4803 return drm_dp_dpcd_readb(&intel_dp->aux, DP_LINK_SERVICE_IRQ_VECTOR_ESI0, 4804 &esi[3]) == 1; 4805 } 4806 4807 return drm_dp_dpcd_read(&intel_dp->aux, DP_SINK_COUNT_ESI, esi, 4) == 4; 4808 } 4809 4810 static bool intel_dp_ack_sink_irq_esi(struct intel_dp *intel_dp, u8 esi[4]) 4811 { 4812 int retry; 4813 4814 for (retry = 0; retry < 3; retry++) { 4815 if (drm_dp_dpcd_write(&intel_dp->aux, DP_SINK_COUNT_ESI + 1, 4816 &esi[1], 3) == 3) 4817 return true; 4818 } 4819 4820 return false; 4821 } 4822 4823 bool 4824 intel_dp_needs_vsc_sdp(const struct intel_crtc_state *crtc_state, 4825 const struct drm_connector_state *conn_state) 4826 { 4827 /* 4828 * As per DP 1.4a spec section 2.2.4.3 [MSA Field for Indication 4829 * of Color Encoding Format and Content Color Gamut], in order to 4830 * sending YCBCR 420 or HDR BT.2020 signals we should use DP VSC SDP. 4831 */ 4832 if (crtc_state->output_format == INTEL_OUTPUT_FORMAT_YCBCR420) 4833 return true; 4834 4835 switch (conn_state->colorspace) { 4836 case DRM_MODE_COLORIMETRY_SYCC_601: 4837 case DRM_MODE_COLORIMETRY_OPYCC_601: 4838 case DRM_MODE_COLORIMETRY_BT2020_YCC: 4839 case DRM_MODE_COLORIMETRY_BT2020_RGB: 4840 case DRM_MODE_COLORIMETRY_BT2020_CYCC: 4841 return true; 4842 default: 4843 break; 4844 } 4845 4846 return false; 4847 } 4848 4849 static ssize_t intel_dp_as_sdp_pack(const struct drm_dp_as_sdp *as_sdp, 4850 struct dp_sdp *sdp, size_t size) 4851 { 4852 size_t length = sizeof(struct dp_sdp); 4853 4854 if (size < length) 4855 return -ENOSPC; 4856 4857 memset(sdp, 0, size); 4858 4859 /* Prepare AS (Adaptive Sync) SDP Header */ 4860 sdp->sdp_header.HB0 = 0; 4861 sdp->sdp_header.HB1 = as_sdp->sdp_type; 4862 sdp->sdp_header.HB2 = 0x02; 4863 sdp->sdp_header.HB3 = as_sdp->length; 4864 4865 /* Fill AS (Adaptive Sync) SDP Payload */ 4866 sdp->db[0] = as_sdp->mode; 4867 sdp->db[1] = as_sdp->vtotal & 0xFF; 4868 sdp->db[2] = (as_sdp->vtotal >> 8) & 0xFF; 4869 sdp->db[3] = as_sdp->target_rr & 0xFF; 4870 sdp->db[4] = (as_sdp->target_rr >> 8) & 0x3; 4871 4872 if (as_sdp->target_rr_divider) 4873 sdp->db[4] |= 0x20; 4874 4875 return length; 4876 } 4877 4878 static ssize_t 4879 intel_dp_hdr_metadata_infoframe_sdp_pack(struct intel_display *display, 4880 const struct hdmi_drm_infoframe *drm_infoframe, 4881 struct dp_sdp *sdp, 4882 size_t size) 4883 { 4884 size_t length = sizeof(struct dp_sdp); 4885 const int infoframe_size = HDMI_INFOFRAME_HEADER_SIZE + HDMI_DRM_INFOFRAME_SIZE; 4886 unsigned char buf[HDMI_INFOFRAME_HEADER_SIZE + HDMI_DRM_INFOFRAME_SIZE]; 4887 ssize_t len; 4888 4889 if (size < length) 4890 return -ENOSPC; 4891 4892 memset(sdp, 0, size); 4893 4894 len = hdmi_drm_infoframe_pack_only(drm_infoframe, buf, sizeof(buf)); 4895 if (len < 0) { 4896 drm_dbg_kms(display->drm, 4897 "buffer size is smaller than hdr metadata infoframe\n"); 4898 return -ENOSPC; 4899 } 4900 4901 if (len != infoframe_size) { 4902 drm_dbg_kms(display->drm, "wrong static hdr metadata size\n"); 4903 return -ENOSPC; 4904 } 4905 4906 /* 4907 * Set up the infoframe sdp packet for HDR static metadata. 4908 * Prepare VSC Header for SU as per DP 1.4a spec, 4909 * Table 2-100 and Table 2-101 4910 */ 4911 4912 /* Secondary-Data Packet ID, 00h for non-Audio INFOFRAME */ 4913 sdp->sdp_header.HB0 = 0; 4914 /* 4915 * Packet Type 80h + Non-audio INFOFRAME Type value 4916 * HDMI_INFOFRAME_TYPE_DRM: 0x87 4917 * - 80h + Non-audio INFOFRAME Type value 4918 * - InfoFrame Type: 0x07 4919 * [CTA-861-G Table-42 Dynamic Range and Mastering InfoFrame] 4920 */ 4921 sdp->sdp_header.HB1 = drm_infoframe->type; 4922 /* 4923 * Least Significant Eight Bits of (Data Byte Count – 1) 4924 * infoframe_size - 1 4925 */ 4926 sdp->sdp_header.HB2 = 0x1D; 4927 /* INFOFRAME SDP Version Number */ 4928 sdp->sdp_header.HB3 = (0x13 << 2); 4929 /* CTA Header Byte 2 (INFOFRAME Version Number) */ 4930 sdp->db[0] = drm_infoframe->version; 4931 /* CTA Header Byte 3 (Length of INFOFRAME): HDMI_DRM_INFOFRAME_SIZE */ 4932 sdp->db[1] = drm_infoframe->length; 4933 /* 4934 * Copy HDMI_DRM_INFOFRAME_SIZE size from a buffer after 4935 * HDMI_INFOFRAME_HEADER_SIZE 4936 */ 4937 BUILD_BUG_ON(sizeof(sdp->db) < HDMI_DRM_INFOFRAME_SIZE + 2); 4938 memcpy(&sdp->db[2], &buf[HDMI_INFOFRAME_HEADER_SIZE], 4939 HDMI_DRM_INFOFRAME_SIZE); 4940 4941 /* 4942 * Size of DP infoframe sdp packet for HDR static metadata consists of 4943 * - DP SDP Header(struct dp_sdp_header): 4 bytes 4944 * - Two Data Blocks: 2 bytes 4945 * CTA Header Byte2 (INFOFRAME Version Number) 4946 * CTA Header Byte3 (Length of INFOFRAME) 4947 * - HDMI_DRM_INFOFRAME_SIZE: 26 bytes 4948 * 4949 * Prior to GEN11's GMP register size is identical to DP HDR static metadata 4950 * infoframe size. But GEN11+ has larger than that size, write_infoframe 4951 * will pad rest of the size. 4952 */ 4953 return sizeof(struct dp_sdp_header) + 2 + HDMI_DRM_INFOFRAME_SIZE; 4954 } 4955 4956 static void intel_write_dp_sdp(struct intel_encoder *encoder, 4957 const struct intel_crtc_state *crtc_state, 4958 unsigned int type) 4959 { 4960 struct intel_display *display = to_intel_display(encoder); 4961 struct intel_digital_port *dig_port = enc_to_dig_port(encoder); 4962 struct dp_sdp sdp = {}; 4963 ssize_t len; 4964 4965 if ((crtc_state->infoframes.enable & 4966 intel_hdmi_infoframe_enable(type)) == 0) 4967 return; 4968 4969 switch (type) { 4970 case DP_SDP_VSC: 4971 len = drm_dp_vsc_sdp_pack(&crtc_state->infoframes.vsc, &sdp); 4972 break; 4973 case HDMI_PACKET_TYPE_GAMUT_METADATA: 4974 len = intel_dp_hdr_metadata_infoframe_sdp_pack(display, 4975 &crtc_state->infoframes.drm.drm, 4976 &sdp, sizeof(sdp)); 4977 break; 4978 case DP_SDP_ADAPTIVE_SYNC: 4979 len = intel_dp_as_sdp_pack(&crtc_state->infoframes.as_sdp, &sdp, 4980 sizeof(sdp)); 4981 break; 4982 default: 4983 MISSING_CASE(type); 4984 return; 4985 } 4986 4987 if (drm_WARN_ON(display->drm, len < 0)) 4988 return; 4989 4990 dig_port->write_infoframe(encoder, crtc_state, type, &sdp, len); 4991 } 4992 4993 void intel_dp_set_infoframes(struct intel_encoder *encoder, 4994 bool enable, 4995 const struct intel_crtc_state *crtc_state, 4996 const struct drm_connector_state *conn_state) 4997 { 4998 struct intel_display *display = to_intel_display(encoder); 4999 i915_reg_t reg = HSW_TVIDEO_DIP_CTL(display, crtc_state->cpu_transcoder); 5000 u32 dip_enable = VIDEO_DIP_ENABLE_AVI_HSW | VIDEO_DIP_ENABLE_GCP_HSW | 5001 VIDEO_DIP_ENABLE_VS_HSW | VIDEO_DIP_ENABLE_GMP_HSW | 5002 VIDEO_DIP_ENABLE_SPD_HSW | VIDEO_DIP_ENABLE_DRM_GLK; 5003 5004 if (HAS_AS_SDP(display)) 5005 dip_enable |= VIDEO_DIP_ENABLE_AS_ADL; 5006 5007 u32 val = intel_de_read(display, reg) & ~dip_enable; 5008 5009 /* TODO: Sanitize DSC enabling wrt. intel_dsc_dp_pps_write(). */ 5010 if (!enable && HAS_DSC(display)) 5011 val &= ~VDIP_ENABLE_PPS; 5012 5013 /* 5014 * This routine disables VSC DIP if the function is called 5015 * to disable SDP or if it does not have PSR 5016 */ 5017 if (!enable || !crtc_state->has_psr) 5018 val &= ~VIDEO_DIP_ENABLE_VSC_HSW; 5019 5020 intel_de_write(display, reg, val); 5021 intel_de_posting_read(display, reg); 5022 5023 if (!enable) 5024 return; 5025 5026 intel_write_dp_sdp(encoder, crtc_state, DP_SDP_VSC); 5027 intel_write_dp_sdp(encoder, crtc_state, DP_SDP_ADAPTIVE_SYNC); 5028 5029 intel_write_dp_sdp(encoder, crtc_state, HDMI_PACKET_TYPE_GAMUT_METADATA); 5030 } 5031 5032 static 5033 int intel_dp_as_sdp_unpack(struct drm_dp_as_sdp *as_sdp, 5034 const void *buffer, size_t size) 5035 { 5036 const struct dp_sdp *sdp = buffer; 5037 5038 if (size < sizeof(struct dp_sdp)) 5039 return -EINVAL; 5040 5041 memset(as_sdp, 0, sizeof(*as_sdp)); 5042 5043 if (sdp->sdp_header.HB0 != 0) 5044 return -EINVAL; 5045 5046 if (sdp->sdp_header.HB1 != DP_SDP_ADAPTIVE_SYNC) 5047 return -EINVAL; 5048 5049 if (sdp->sdp_header.HB2 != 0x02) 5050 return -EINVAL; 5051 5052 if ((sdp->sdp_header.HB3 & 0x3F) != 9) 5053 return -EINVAL; 5054 5055 as_sdp->length = sdp->sdp_header.HB3 & DP_ADAPTIVE_SYNC_SDP_LENGTH; 5056 as_sdp->mode = sdp->db[0] & DP_ADAPTIVE_SYNC_SDP_OPERATION_MODE; 5057 as_sdp->vtotal = (sdp->db[2] << 8) | sdp->db[1]; 5058 as_sdp->target_rr = (u64)sdp->db[3] | ((u64)sdp->db[4] & 0x3); 5059 as_sdp->target_rr_divider = sdp->db[4] & 0x20 ? true : false; 5060 5061 return 0; 5062 } 5063 5064 static int intel_dp_vsc_sdp_unpack(struct drm_dp_vsc_sdp *vsc, 5065 const void *buffer, size_t size) 5066 { 5067 const struct dp_sdp *sdp = buffer; 5068 5069 if (size < sizeof(struct dp_sdp)) 5070 return -EINVAL; 5071 5072 memset(vsc, 0, sizeof(*vsc)); 5073 5074 if (sdp->sdp_header.HB0 != 0) 5075 return -EINVAL; 5076 5077 if (sdp->sdp_header.HB1 != DP_SDP_VSC) 5078 return -EINVAL; 5079 5080 vsc->sdp_type = sdp->sdp_header.HB1; 5081 vsc->revision = sdp->sdp_header.HB2; 5082 vsc->length = sdp->sdp_header.HB3; 5083 5084 if ((sdp->sdp_header.HB2 == 0x2 && sdp->sdp_header.HB3 == 0x8) || 5085 (sdp->sdp_header.HB2 == 0x4 && sdp->sdp_header.HB3 == 0xe) || 5086 (sdp->sdp_header.HB2 == 0x6 && sdp->sdp_header.HB3 == 0x10)) { 5087 /* 5088 * - HB2 = 0x2, HB3 = 0x8 5089 * VSC SDP supporting 3D stereo + PSR 5090 * - HB2 = 0x4, HB3 = 0xe 5091 * VSC SDP supporting 3D stereo + PSR2 with Y-coordinate of 5092 * first scan line of the SU region (applies to eDP v1.4b 5093 * and higher). 5094 * - HB2 = 0x6, HB3 = 0x10 5095 * VSC SDP supporting 3D stereo + Panel Replay. 5096 */ 5097 return 0; 5098 } else if (sdp->sdp_header.HB2 == 0x5 && sdp->sdp_header.HB3 == 0x13) { 5099 /* 5100 * - HB2 = 0x5, HB3 = 0x13 5101 * VSC SDP supporting 3D stereo + PSR2 + Pixel Encoding/Colorimetry 5102 * Format. 5103 */ 5104 vsc->pixelformat = (sdp->db[16] >> 4) & 0xf; 5105 vsc->colorimetry = sdp->db[16] & 0xf; 5106 vsc->dynamic_range = (sdp->db[17] >> 7) & 0x1; 5107 5108 switch (sdp->db[17] & 0x7) { 5109 case 0x0: 5110 vsc->bpc = 6; 5111 break; 5112 case 0x1: 5113 vsc->bpc = 8; 5114 break; 5115 case 0x2: 5116 vsc->bpc = 10; 5117 break; 5118 case 0x3: 5119 vsc->bpc = 12; 5120 break; 5121 case 0x4: 5122 vsc->bpc = 16; 5123 break; 5124 default: 5125 MISSING_CASE(sdp->db[17] & 0x7); 5126 return -EINVAL; 5127 } 5128 5129 vsc->content_type = sdp->db[18] & 0x7; 5130 } else { 5131 return -EINVAL; 5132 } 5133 5134 return 0; 5135 } 5136 5137 static void 5138 intel_read_dp_as_sdp(struct intel_encoder *encoder, 5139 struct intel_crtc_state *crtc_state, 5140 struct drm_dp_as_sdp *as_sdp) 5141 { 5142 struct intel_display *display = to_intel_display(encoder); 5143 struct intel_digital_port *dig_port = enc_to_dig_port(encoder); 5144 unsigned int type = DP_SDP_ADAPTIVE_SYNC; 5145 struct dp_sdp sdp = {}; 5146 int ret; 5147 5148 if ((crtc_state->infoframes.enable & 5149 intel_hdmi_infoframe_enable(type)) == 0) 5150 return; 5151 5152 dig_port->read_infoframe(encoder, crtc_state, type, &sdp, 5153 sizeof(sdp)); 5154 5155 ret = intel_dp_as_sdp_unpack(as_sdp, &sdp, sizeof(sdp)); 5156 if (ret) 5157 drm_dbg_kms(display->drm, "Failed to unpack DP AS SDP\n"); 5158 } 5159 5160 static int 5161 intel_dp_hdr_metadata_infoframe_sdp_unpack(struct hdmi_drm_infoframe *drm_infoframe, 5162 const void *buffer, size_t size) 5163 { 5164 int ret; 5165 5166 const struct dp_sdp *sdp = buffer; 5167 5168 if (size < sizeof(struct dp_sdp)) 5169 return -EINVAL; 5170 5171 if (sdp->sdp_header.HB0 != 0) 5172 return -EINVAL; 5173 5174 if (sdp->sdp_header.HB1 != HDMI_INFOFRAME_TYPE_DRM) 5175 return -EINVAL; 5176 5177 /* 5178 * Least Significant Eight Bits of (Data Byte Count – 1) 5179 * 1Dh (i.e., Data Byte Count = 30 bytes). 5180 */ 5181 if (sdp->sdp_header.HB2 != 0x1D) 5182 return -EINVAL; 5183 5184 /* Most Significant Two Bits of (Data Byte Count – 1), Clear to 00b. */ 5185 if ((sdp->sdp_header.HB3 & 0x3) != 0) 5186 return -EINVAL; 5187 5188 /* INFOFRAME SDP Version Number */ 5189 if (((sdp->sdp_header.HB3 >> 2) & 0x3f) != 0x13) 5190 return -EINVAL; 5191 5192 /* CTA Header Byte 2 (INFOFRAME Version Number) */ 5193 if (sdp->db[0] != 1) 5194 return -EINVAL; 5195 5196 /* CTA Header Byte 3 (Length of INFOFRAME): HDMI_DRM_INFOFRAME_SIZE */ 5197 if (sdp->db[1] != HDMI_DRM_INFOFRAME_SIZE) 5198 return -EINVAL; 5199 5200 ret = hdmi_drm_infoframe_unpack_only(drm_infoframe, &sdp->db[2], 5201 HDMI_DRM_INFOFRAME_SIZE); 5202 5203 return ret; 5204 } 5205 5206 static void intel_read_dp_vsc_sdp(struct intel_encoder *encoder, 5207 struct intel_crtc_state *crtc_state, 5208 struct drm_dp_vsc_sdp *vsc) 5209 { 5210 struct intel_display *display = to_intel_display(encoder); 5211 struct intel_digital_port *dig_port = enc_to_dig_port(encoder); 5212 unsigned int type = DP_SDP_VSC; 5213 struct dp_sdp sdp = {}; 5214 int ret; 5215 5216 if ((crtc_state->infoframes.enable & 5217 intel_hdmi_infoframe_enable(type)) == 0) 5218 return; 5219 5220 dig_port->read_infoframe(encoder, crtc_state, type, &sdp, sizeof(sdp)); 5221 5222 ret = intel_dp_vsc_sdp_unpack(vsc, &sdp, sizeof(sdp)); 5223 5224 if (ret) 5225 drm_dbg_kms(display->drm, "Failed to unpack DP VSC SDP\n"); 5226 } 5227 5228 static void intel_read_dp_hdr_metadata_infoframe_sdp(struct intel_encoder *encoder, 5229 struct intel_crtc_state *crtc_state, 5230 struct hdmi_drm_infoframe *drm_infoframe) 5231 { 5232 struct intel_display *display = to_intel_display(encoder); 5233 struct intel_digital_port *dig_port = enc_to_dig_port(encoder); 5234 unsigned int type = HDMI_PACKET_TYPE_GAMUT_METADATA; 5235 struct dp_sdp sdp = {}; 5236 int ret; 5237 5238 if ((crtc_state->infoframes.enable & 5239 intel_hdmi_infoframe_enable(type)) == 0) 5240 return; 5241 5242 dig_port->read_infoframe(encoder, crtc_state, type, &sdp, 5243 sizeof(sdp)); 5244 5245 ret = intel_dp_hdr_metadata_infoframe_sdp_unpack(drm_infoframe, &sdp, 5246 sizeof(sdp)); 5247 5248 if (ret) 5249 drm_dbg_kms(display->drm, 5250 "Failed to unpack DP HDR Metadata Infoframe SDP\n"); 5251 } 5252 5253 void intel_read_dp_sdp(struct intel_encoder *encoder, 5254 struct intel_crtc_state *crtc_state, 5255 unsigned int type) 5256 { 5257 switch (type) { 5258 case DP_SDP_VSC: 5259 intel_read_dp_vsc_sdp(encoder, crtc_state, 5260 &crtc_state->infoframes.vsc); 5261 break; 5262 case HDMI_PACKET_TYPE_GAMUT_METADATA: 5263 intel_read_dp_hdr_metadata_infoframe_sdp(encoder, crtc_state, 5264 &crtc_state->infoframes.drm.drm); 5265 break; 5266 case DP_SDP_ADAPTIVE_SYNC: 5267 intel_read_dp_as_sdp(encoder, crtc_state, 5268 &crtc_state->infoframes.as_sdp); 5269 break; 5270 default: 5271 MISSING_CASE(type); 5272 break; 5273 } 5274 } 5275 5276 static bool intel_dp_link_ok(struct intel_dp *intel_dp, 5277 u8 link_status[DP_LINK_STATUS_SIZE]) 5278 { 5279 struct intel_display *display = to_intel_display(intel_dp); 5280 struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base; 5281 bool uhbr = intel_dp->link_rate >= 1000000; 5282 bool ok; 5283 5284 if (uhbr) 5285 ok = drm_dp_128b132b_lane_channel_eq_done(link_status, 5286 intel_dp->lane_count); 5287 else 5288 ok = drm_dp_channel_eq_ok(link_status, intel_dp->lane_count); 5289 5290 if (ok) 5291 return true; 5292 5293 intel_dp_dump_link_status(intel_dp, DP_PHY_DPRX, link_status); 5294 drm_dbg_kms(display->drm, 5295 "[ENCODER:%d:%s] %s link not ok, retraining\n", 5296 encoder->base.base.id, encoder->base.name, 5297 uhbr ? "128b/132b" : "8b/10b"); 5298 5299 return false; 5300 } 5301 5302 static void 5303 intel_dp_mst_hpd_irq(struct intel_dp *intel_dp, u8 *esi, u8 *ack) 5304 { 5305 bool handled = false; 5306 5307 drm_dp_mst_hpd_irq_handle_event(&intel_dp->mst.mgr, esi, ack, &handled); 5308 5309 if (esi[1] & DP_CP_IRQ) { 5310 intel_hdcp_handle_cp_irq(intel_dp->attached_connector); 5311 ack[1] |= DP_CP_IRQ; 5312 } 5313 } 5314 5315 static bool intel_dp_mst_link_status(struct intel_dp *intel_dp) 5316 { 5317 struct intel_display *display = to_intel_display(intel_dp); 5318 struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base; 5319 u8 link_status[DP_LINK_STATUS_SIZE] = {}; 5320 const size_t esi_link_status_size = DP_LINK_STATUS_SIZE - 2; 5321 5322 if (drm_dp_dpcd_read(&intel_dp->aux, DP_LANE0_1_STATUS_ESI, link_status, 5323 esi_link_status_size) != esi_link_status_size) { 5324 drm_err(display->drm, 5325 "[ENCODER:%d:%s] Failed to read link status\n", 5326 encoder->base.base.id, encoder->base.name); 5327 return false; 5328 } 5329 5330 return intel_dp_link_ok(intel_dp, link_status); 5331 } 5332 5333 /** 5334 * intel_dp_check_mst_status - service any pending MST interrupts, check link status 5335 * @intel_dp: Intel DP struct 5336 * 5337 * Read any pending MST interrupts, call MST core to handle these and ack the 5338 * interrupts. Check if the main and AUX link state is ok. 5339 * 5340 * Returns: 5341 * - %true if pending interrupts were serviced (or no interrupts were 5342 * pending) w/o detecting an error condition. 5343 * - %false if an error condition - like AUX failure or a loss of link - is 5344 * detected, or another condition - like a DP tunnel BW state change - needs 5345 * servicing from the hotplug work. 5346 */ 5347 static bool 5348 intel_dp_check_mst_status(struct intel_dp *intel_dp) 5349 { 5350 struct intel_display *display = to_intel_display(intel_dp); 5351 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp); 5352 struct intel_encoder *encoder = &dig_port->base; 5353 bool link_ok = true; 5354 bool reprobe_needed = false; 5355 5356 for (;;) { 5357 u8 esi[4] = {}; 5358 u8 ack[4] = {}; 5359 5360 if (!intel_dp_get_sink_irq_esi(intel_dp, esi)) { 5361 drm_dbg_kms(display->drm, 5362 "failed to get ESI - device may have failed\n"); 5363 link_ok = false; 5364 5365 break; 5366 } 5367 5368 drm_dbg_kms(display->drm, "DPRX ESI: %4ph\n", esi); 5369 5370 if (intel_dp_mst_active_streams(intel_dp) > 0 && link_ok && 5371 esi[3] & LINK_STATUS_CHANGED) { 5372 if (!intel_dp_mst_link_status(intel_dp)) 5373 link_ok = false; 5374 ack[3] |= LINK_STATUS_CHANGED; 5375 } 5376 5377 intel_dp_mst_hpd_irq(intel_dp, esi, ack); 5378 5379 if (esi[3] & DP_TUNNELING_IRQ) { 5380 if (drm_dp_tunnel_handle_irq(display->dp_tunnel_mgr, 5381 &intel_dp->aux)) 5382 reprobe_needed = true; 5383 ack[3] |= DP_TUNNELING_IRQ; 5384 } 5385 5386 if (mem_is_zero(ack, sizeof(ack))) 5387 break; 5388 5389 if (!intel_dp_ack_sink_irq_esi(intel_dp, ack)) 5390 drm_dbg_kms(display->drm, "Failed to ack ESI\n"); 5391 5392 if (ack[1] & (DP_DOWN_REP_MSG_RDY | DP_UP_REQ_MSG_RDY)) 5393 drm_dp_mst_hpd_irq_send_new_request(&intel_dp->mst.mgr); 5394 } 5395 5396 if (!link_ok || intel_dp->link.force_retrain) 5397 intel_encoder_link_check_queue_work(encoder, 0); 5398 5399 return !reprobe_needed; 5400 } 5401 5402 static void 5403 intel_dp_handle_hdmi_link_status_change(struct intel_dp *intel_dp) 5404 { 5405 bool is_active; 5406 u8 buf = 0; 5407 5408 is_active = drm_dp_pcon_hdmi_link_active(&intel_dp->aux); 5409 if (intel_dp->frl.is_trained && !is_active) { 5410 if (drm_dp_dpcd_readb(&intel_dp->aux, DP_PCON_HDMI_LINK_CONFIG_1, &buf) < 0) 5411 return; 5412 5413 buf &= ~DP_PCON_ENABLE_HDMI_LINK; 5414 if (drm_dp_dpcd_writeb(&intel_dp->aux, DP_PCON_HDMI_LINK_CONFIG_1, buf) < 0) 5415 return; 5416 5417 drm_dp_pcon_hdmi_frl_link_error_count(&intel_dp->aux, &intel_dp->attached_connector->base); 5418 5419 intel_dp->frl.is_trained = false; 5420 5421 /* Restart FRL training or fall back to TMDS mode */ 5422 intel_dp_check_frl_training(intel_dp); 5423 } 5424 } 5425 5426 static bool 5427 intel_dp_needs_link_retrain(struct intel_dp *intel_dp) 5428 { 5429 u8 link_status[DP_LINK_STATUS_SIZE]; 5430 5431 if (!intel_dp->link.active) 5432 return false; 5433 5434 /* 5435 * While PSR source HW is enabled, it will control main-link sending 5436 * frames, enabling and disabling it so trying to do a retrain will fail 5437 * as the link would or not be on or it could mix training patterns 5438 * and frame data at the same time causing retrain to fail. 5439 * Also when exiting PSR, HW will retrain the link anyways fixing 5440 * any link status error. 5441 */ 5442 if (intel_psr_enabled(intel_dp)) 5443 return false; 5444 5445 if (intel_dp->link.force_retrain) 5446 return true; 5447 5448 if (drm_dp_dpcd_read_phy_link_status(&intel_dp->aux, DP_PHY_DPRX, 5449 link_status) < 0) 5450 return false; 5451 5452 /* 5453 * Validate the cached values of intel_dp->link_rate and 5454 * intel_dp->lane_count before attempting to retrain. 5455 * 5456 * FIXME would be nice to user the crtc state here, but since 5457 * we need to call this from the short HPD handler that seems 5458 * a bit hard. 5459 */ 5460 if (!intel_dp_link_params_valid(intel_dp, intel_dp->link_rate, 5461 intel_dp->lane_count)) 5462 return false; 5463 5464 if (intel_dp->link.retrain_disabled) 5465 return false; 5466 5467 if (intel_dp->link.seq_train_failures) 5468 return true; 5469 5470 /* Retrain if link not ok */ 5471 return !intel_dp_link_ok(intel_dp, link_status) && 5472 !intel_psr_link_ok(intel_dp); 5473 } 5474 5475 bool intel_dp_has_connector(struct intel_dp *intel_dp, 5476 const struct drm_connector_state *conn_state) 5477 { 5478 struct intel_display *display = to_intel_display(intel_dp); 5479 struct intel_encoder *encoder; 5480 enum pipe pipe; 5481 5482 if (!conn_state->best_encoder) 5483 return false; 5484 5485 /* SST */ 5486 encoder = &dp_to_dig_port(intel_dp)->base; 5487 if (conn_state->best_encoder == &encoder->base) 5488 return true; 5489 5490 /* MST */ 5491 for_each_pipe(display, pipe) { 5492 encoder = &intel_dp->mst.stream_encoders[pipe]->base; 5493 if (conn_state->best_encoder == &encoder->base) 5494 return true; 5495 } 5496 5497 return false; 5498 } 5499 5500 static void wait_for_connector_hw_done(const struct drm_connector_state *conn_state) 5501 { 5502 struct intel_connector *connector = to_intel_connector(conn_state->connector); 5503 struct intel_display *display = to_intel_display(connector); 5504 5505 drm_modeset_lock_assert_held(&display->drm->mode_config.connection_mutex); 5506 5507 if (!conn_state->commit) 5508 return; 5509 5510 drm_WARN_ON(display->drm, 5511 !wait_for_completion_timeout(&conn_state->commit->hw_done, 5512 msecs_to_jiffies(5000))); 5513 } 5514 5515 int intel_dp_get_active_pipes(struct intel_dp *intel_dp, 5516 struct drm_modeset_acquire_ctx *ctx, 5517 u8 *pipe_mask) 5518 { 5519 struct intel_display *display = to_intel_display(intel_dp); 5520 struct drm_connector_list_iter conn_iter; 5521 struct intel_connector *connector; 5522 int ret = 0; 5523 5524 *pipe_mask = 0; 5525 5526 drm_connector_list_iter_begin(display->drm, &conn_iter); 5527 for_each_intel_connector_iter(connector, &conn_iter) { 5528 struct drm_connector_state *conn_state = 5529 connector->base.state; 5530 struct intel_crtc_state *crtc_state; 5531 struct intel_crtc *crtc; 5532 5533 if (!intel_dp_has_connector(intel_dp, conn_state)) 5534 continue; 5535 5536 crtc = to_intel_crtc(conn_state->crtc); 5537 if (!crtc) 5538 continue; 5539 5540 ret = drm_modeset_lock(&crtc->base.mutex, ctx); 5541 if (ret) 5542 break; 5543 5544 crtc_state = to_intel_crtc_state(crtc->base.state); 5545 5546 drm_WARN_ON(display->drm, 5547 !intel_crtc_has_dp_encoder(crtc_state)); 5548 5549 if (!crtc_state->hw.active) 5550 continue; 5551 5552 wait_for_connector_hw_done(conn_state); 5553 5554 *pipe_mask |= BIT(crtc->pipe); 5555 } 5556 drm_connector_list_iter_end(&conn_iter); 5557 5558 return ret; 5559 } 5560 5561 void intel_dp_flush_connector_commits(struct intel_connector *connector) 5562 { 5563 wait_for_connector_hw_done(connector->base.state); 5564 } 5565 5566 static bool intel_dp_is_connected(struct intel_dp *intel_dp) 5567 { 5568 struct intel_connector *connector = intel_dp->attached_connector; 5569 5570 return connector->base.status == connector_status_connected || 5571 intel_dp->is_mst; 5572 } 5573 5574 static int intel_dp_retrain_link(struct intel_encoder *encoder, 5575 struct drm_modeset_acquire_ctx *ctx) 5576 { 5577 struct intel_display *display = to_intel_display(encoder); 5578 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 5579 u8 pipe_mask; 5580 int ret; 5581 5582 if (!intel_dp_is_connected(intel_dp)) 5583 return 0; 5584 5585 ret = drm_modeset_lock(&display->drm->mode_config.connection_mutex, 5586 ctx); 5587 if (ret) 5588 return ret; 5589 5590 if (!intel_dp_needs_link_retrain(intel_dp)) 5591 return 0; 5592 5593 ret = intel_dp_get_active_pipes(intel_dp, ctx, &pipe_mask); 5594 if (ret) 5595 return ret; 5596 5597 if (pipe_mask == 0) 5598 return 0; 5599 5600 if (!intel_dp_needs_link_retrain(intel_dp)) 5601 return 0; 5602 5603 drm_dbg_kms(display->drm, 5604 "[ENCODER:%d:%s] retraining link (forced %s)\n", 5605 encoder->base.base.id, encoder->base.name, 5606 str_yes_no(intel_dp->link.force_retrain)); 5607 5608 ret = intel_modeset_commit_pipes(display, pipe_mask, ctx); 5609 if (ret == -EDEADLK) 5610 return ret; 5611 5612 intel_dp->link.force_retrain = false; 5613 5614 if (ret) 5615 drm_dbg_kms(display->drm, 5616 "[ENCODER:%d:%s] link retraining failed: %pe\n", 5617 encoder->base.base.id, encoder->base.name, 5618 ERR_PTR(ret)); 5619 5620 return ret; 5621 } 5622 5623 void intel_dp_link_check(struct intel_encoder *encoder) 5624 { 5625 struct drm_modeset_acquire_ctx ctx; 5626 int ret; 5627 5628 intel_modeset_lock_ctx_retry(&ctx, NULL, 0, ret) 5629 ret = intel_dp_retrain_link(encoder, &ctx); 5630 } 5631 5632 void intel_dp_check_link_state(struct intel_dp *intel_dp) 5633 { 5634 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp); 5635 struct intel_encoder *encoder = &dig_port->base; 5636 5637 if (!intel_dp_is_connected(intel_dp)) 5638 return; 5639 5640 if (!intel_dp_needs_link_retrain(intel_dp)) 5641 return; 5642 5643 intel_encoder_link_check_queue_work(encoder, 0); 5644 } 5645 5646 static void intel_dp_check_device_service_irq(struct intel_dp *intel_dp) 5647 { 5648 struct intel_display *display = to_intel_display(intel_dp); 5649 u8 val; 5650 5651 if (intel_dp->dpcd[DP_DPCD_REV] < 0x11) 5652 return; 5653 5654 if (drm_dp_dpcd_readb(&intel_dp->aux, 5655 DP_DEVICE_SERVICE_IRQ_VECTOR, &val) != 1 || !val) 5656 return; 5657 5658 drm_dp_dpcd_writeb(&intel_dp->aux, DP_DEVICE_SERVICE_IRQ_VECTOR, val); 5659 5660 if (val & DP_AUTOMATED_TEST_REQUEST) 5661 intel_dp_test_request(intel_dp); 5662 5663 if (val & DP_CP_IRQ) 5664 intel_hdcp_handle_cp_irq(intel_dp->attached_connector); 5665 5666 if (val & DP_SINK_SPECIFIC_IRQ) 5667 drm_dbg_kms(display->drm, "Sink specific irq unhandled\n"); 5668 } 5669 5670 static bool intel_dp_check_link_service_irq(struct intel_dp *intel_dp) 5671 { 5672 struct intel_display *display = to_intel_display(intel_dp); 5673 bool reprobe_needed = false; 5674 u8 val; 5675 5676 if (intel_dp->dpcd[DP_DPCD_REV] < 0x11) 5677 return false; 5678 5679 if (drm_dp_dpcd_readb(&intel_dp->aux, 5680 DP_LINK_SERVICE_IRQ_VECTOR_ESI0, &val) != 1 || !val) 5681 return false; 5682 5683 if ((val & DP_TUNNELING_IRQ) && 5684 drm_dp_tunnel_handle_irq(display->dp_tunnel_mgr, 5685 &intel_dp->aux)) 5686 reprobe_needed = true; 5687 5688 if (drm_dp_dpcd_writeb(&intel_dp->aux, 5689 DP_LINK_SERVICE_IRQ_VECTOR_ESI0, val) != 1) 5690 return reprobe_needed; 5691 5692 if (val & HDMI_LINK_STATUS_CHANGED) 5693 intel_dp_handle_hdmi_link_status_change(intel_dp); 5694 5695 return reprobe_needed; 5696 } 5697 5698 /* 5699 * According to DP spec 5700 * 5.1.2: 5701 * 1. Read DPCD 5702 * 2. Configure link according to Receiver Capabilities 5703 * 3. Use Link Training from 2.5.3.3 and 3.5.1.3 5704 * 4. Check link status on receipt of hot-plug interrupt 5705 * 5706 * intel_dp_short_pulse - handles short pulse interrupts 5707 * when full detection is not required. 5708 * Returns %true if short pulse is handled and full detection 5709 * is NOT required and %false otherwise. 5710 */ 5711 static bool 5712 intel_dp_short_pulse(struct intel_dp *intel_dp) 5713 { 5714 u8 old_sink_count = intel_dp->sink_count; 5715 bool reprobe_needed = false; 5716 bool ret; 5717 5718 intel_dp_test_reset(intel_dp); 5719 5720 /* 5721 * Now read the DPCD to see if it's actually running 5722 * If the current value of sink count doesn't match with 5723 * the value that was stored earlier or dpcd read failed 5724 * we need to do full detection 5725 */ 5726 ret = intel_dp_get_dpcd(intel_dp); 5727 5728 if ((old_sink_count != intel_dp->sink_count) || !ret) { 5729 /* No need to proceed if we are going to do full detect */ 5730 return false; 5731 } 5732 5733 intel_dp_check_device_service_irq(intel_dp); 5734 reprobe_needed = intel_dp_check_link_service_irq(intel_dp); 5735 5736 /* Handle CEC interrupts, if any */ 5737 drm_dp_cec_irq(&intel_dp->aux); 5738 5739 intel_dp_check_link_state(intel_dp); 5740 5741 intel_psr_short_pulse(intel_dp); 5742 5743 if (intel_alpm_get_error(intel_dp)) { 5744 intel_alpm_disable(intel_dp); 5745 intel_dp->alpm.sink_alpm_error = true; 5746 } 5747 5748 if (intel_dp_test_short_pulse(intel_dp)) 5749 reprobe_needed = true; 5750 5751 return !reprobe_needed; 5752 } 5753 5754 /* XXX this is probably wrong for multiple downstream ports */ 5755 static enum drm_connector_status 5756 intel_dp_detect_dpcd(struct intel_dp *intel_dp) 5757 { 5758 struct intel_display *display = to_intel_display(intel_dp); 5759 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp); 5760 u8 *dpcd = intel_dp->dpcd; 5761 u8 type; 5762 5763 if (drm_WARN_ON(display->drm, intel_dp_is_edp(intel_dp))) 5764 return connector_status_connected; 5765 5766 intel_lspcon_resume(dig_port); 5767 5768 if (!intel_dp_get_dpcd(intel_dp)) 5769 return connector_status_disconnected; 5770 5771 intel_dp->mst_detect = intel_dp_mst_detect(intel_dp); 5772 5773 /* if there's no downstream port, we're done */ 5774 if (!drm_dp_is_branch(dpcd)) 5775 return connector_status_connected; 5776 5777 /* If we're HPD-aware, SINK_COUNT changes dynamically */ 5778 if (intel_dp_has_sink_count(intel_dp) && 5779 intel_dp->downstream_ports[0] & DP_DS_PORT_HPD) { 5780 return intel_dp->sink_count ? 5781 connector_status_connected : connector_status_disconnected; 5782 } 5783 5784 if (intel_dp->mst_detect == DRM_DP_MST) 5785 return connector_status_connected; 5786 5787 /* If no HPD, poke DDC gently */ 5788 if (drm_probe_ddc(&intel_dp->aux.ddc)) 5789 return connector_status_connected; 5790 5791 /* Well we tried, say unknown for unreliable port types */ 5792 if (intel_dp->dpcd[DP_DPCD_REV] >= 0x11) { 5793 type = intel_dp->downstream_ports[0] & DP_DS_PORT_TYPE_MASK; 5794 if (type == DP_DS_PORT_TYPE_VGA || 5795 type == DP_DS_PORT_TYPE_NON_EDID) 5796 return connector_status_unknown; 5797 } else { 5798 type = intel_dp->dpcd[DP_DOWNSTREAMPORT_PRESENT] & 5799 DP_DWN_STRM_PORT_TYPE_MASK; 5800 if (type == DP_DWN_STRM_PORT_TYPE_ANALOG || 5801 type == DP_DWN_STRM_PORT_TYPE_OTHER) 5802 return connector_status_unknown; 5803 } 5804 5805 /* Anything else is out of spec, warn and ignore */ 5806 drm_dbg_kms(display->drm, "Broken DP branch device, ignoring\n"); 5807 return connector_status_disconnected; 5808 } 5809 5810 static enum drm_connector_status 5811 edp_detect(struct intel_dp *intel_dp) 5812 { 5813 return connector_status_connected; 5814 } 5815 5816 void intel_digital_port_lock(struct intel_encoder *encoder) 5817 { 5818 struct intel_digital_port *dig_port = enc_to_dig_port(encoder); 5819 5820 if (dig_port->lock) 5821 dig_port->lock(dig_port); 5822 } 5823 5824 void intel_digital_port_unlock(struct intel_encoder *encoder) 5825 { 5826 struct intel_digital_port *dig_port = enc_to_dig_port(encoder); 5827 5828 if (dig_port->unlock) 5829 dig_port->unlock(dig_port); 5830 } 5831 5832 /* 5833 * intel_digital_port_connected_locked - is the specified port connected? 5834 * @encoder: intel_encoder 5835 * 5836 * In cases where there's a connector physically connected but it can't be used 5837 * by our hardware we also return false, since the rest of the driver should 5838 * pretty much treat the port as disconnected. This is relevant for type-C 5839 * (starting on ICL) where there's ownership involved. 5840 * 5841 * The caller must hold the lock acquired by calling intel_digital_port_lock() 5842 * when calling this function. 5843 * 5844 * Return %true if port is connected, %false otherwise. 5845 */ 5846 bool intel_digital_port_connected_locked(struct intel_encoder *encoder) 5847 { 5848 struct intel_display *display = to_intel_display(encoder); 5849 struct intel_digital_port *dig_port = enc_to_dig_port(encoder); 5850 bool is_glitch_free = intel_tc_port_handles_hpd_glitches(dig_port); 5851 bool is_connected = false; 5852 5853 with_intel_display_power(display, POWER_DOMAIN_DISPLAY_CORE) { 5854 poll_timeout_us(is_connected = dig_port->connected(encoder), 5855 is_connected || is_glitch_free, 5856 30, 4000, false); 5857 } 5858 5859 return is_connected; 5860 } 5861 5862 bool intel_digital_port_connected(struct intel_encoder *encoder) 5863 { 5864 bool ret; 5865 5866 intel_digital_port_lock(encoder); 5867 ret = intel_digital_port_connected_locked(encoder); 5868 intel_digital_port_unlock(encoder); 5869 5870 return ret; 5871 } 5872 5873 static const struct drm_edid * 5874 intel_dp_get_edid(struct intel_dp *intel_dp) 5875 { 5876 struct intel_connector *connector = intel_dp->attached_connector; 5877 const struct drm_edid *fixed_edid = connector->panel.fixed_edid; 5878 5879 /* Use panel fixed edid if we have one */ 5880 if (fixed_edid) { 5881 /* invalid edid */ 5882 if (IS_ERR(fixed_edid)) 5883 return NULL; 5884 5885 return drm_edid_dup(fixed_edid); 5886 } 5887 5888 return drm_edid_read_ddc(&connector->base, &intel_dp->aux.ddc); 5889 } 5890 5891 static void 5892 intel_dp_update_dfp(struct intel_dp *intel_dp, 5893 const struct drm_edid *drm_edid) 5894 { 5895 struct intel_display *display = to_intel_display(intel_dp); 5896 struct intel_connector *connector = intel_dp->attached_connector; 5897 5898 intel_dp->dfp.max_bpc = 5899 drm_dp_downstream_max_bpc(intel_dp->dpcd, 5900 intel_dp->downstream_ports, drm_edid); 5901 5902 intel_dp->dfp.max_dotclock = 5903 drm_dp_downstream_max_dotclock(intel_dp->dpcd, 5904 intel_dp->downstream_ports); 5905 5906 intel_dp->dfp.min_tmds_clock = 5907 drm_dp_downstream_min_tmds_clock(intel_dp->dpcd, 5908 intel_dp->downstream_ports, 5909 drm_edid); 5910 intel_dp->dfp.max_tmds_clock = 5911 drm_dp_downstream_max_tmds_clock(intel_dp->dpcd, 5912 intel_dp->downstream_ports, 5913 drm_edid); 5914 5915 intel_dp->dfp.pcon_max_frl_bw = 5916 drm_dp_get_pcon_max_frl_bw(intel_dp->dpcd, 5917 intel_dp->downstream_ports); 5918 5919 drm_dbg_kms(display->drm, 5920 "[CONNECTOR:%d:%s] DFP max bpc %d, max dotclock %d, TMDS clock %d-%d, PCON Max FRL BW %dGbps\n", 5921 connector->base.base.id, connector->base.name, 5922 intel_dp->dfp.max_bpc, 5923 intel_dp->dfp.max_dotclock, 5924 intel_dp->dfp.min_tmds_clock, 5925 intel_dp->dfp.max_tmds_clock, 5926 intel_dp->dfp.pcon_max_frl_bw); 5927 5928 intel_dp_get_pcon_dsc_cap(intel_dp); 5929 } 5930 5931 static bool 5932 intel_dp_can_ycbcr420(struct intel_dp *intel_dp) 5933 { 5934 if (source_can_output(intel_dp, INTEL_OUTPUT_FORMAT_YCBCR420) && 5935 (!drm_dp_is_branch(intel_dp->dpcd) || intel_dp->dfp.ycbcr420_passthrough)) 5936 return true; 5937 5938 if (source_can_output(intel_dp, INTEL_OUTPUT_FORMAT_RGB) && 5939 dfp_can_convert_from_rgb(intel_dp, INTEL_OUTPUT_FORMAT_YCBCR420)) 5940 return true; 5941 5942 if (source_can_output(intel_dp, INTEL_OUTPUT_FORMAT_YCBCR444) && 5943 dfp_can_convert_from_ycbcr444(intel_dp, INTEL_OUTPUT_FORMAT_YCBCR420)) 5944 return true; 5945 5946 return false; 5947 } 5948 5949 static void 5950 intel_dp_update_420(struct intel_dp *intel_dp) 5951 { 5952 struct intel_display *display = to_intel_display(intel_dp); 5953 struct intel_connector *connector = intel_dp->attached_connector; 5954 5955 intel_dp->dfp.ycbcr420_passthrough = 5956 drm_dp_downstream_420_passthrough(intel_dp->dpcd, 5957 intel_dp->downstream_ports); 5958 /* on-board LSPCON always assumed to support 4:4:4->4:2:0 conversion */ 5959 intel_dp->dfp.ycbcr_444_to_420 = 5960 intel_lspcon_active(dp_to_dig_port(intel_dp)) || 5961 drm_dp_downstream_444_to_420_conversion(intel_dp->dpcd, 5962 intel_dp->downstream_ports); 5963 intel_dp->dfp.rgb_to_ycbcr = 5964 drm_dp_downstream_rgb_to_ycbcr_conversion(intel_dp->dpcd, 5965 intel_dp->downstream_ports, 5966 DP_DS_HDMI_BT709_RGB_YCBCR_CONV); 5967 5968 connector->base.ycbcr_420_allowed = intel_dp_can_ycbcr420(intel_dp); 5969 5970 drm_dbg_kms(display->drm, 5971 "[CONNECTOR:%d:%s] RGB->YcbCr conversion? %s, YCbCr 4:2:0 allowed? %s, YCbCr 4:4:4->4:2:0 conversion? %s\n", 5972 connector->base.base.id, connector->base.name, 5973 str_yes_no(intel_dp->dfp.rgb_to_ycbcr), 5974 str_yes_no(connector->base.ycbcr_420_allowed), 5975 str_yes_no(intel_dp->dfp.ycbcr_444_to_420)); 5976 } 5977 5978 static void 5979 intel_dp_set_edid(struct intel_dp *intel_dp) 5980 { 5981 struct intel_display *display = to_intel_display(intel_dp); 5982 struct intel_connector *connector = intel_dp->attached_connector; 5983 const struct drm_edid *drm_edid; 5984 bool vrr_capable; 5985 5986 intel_dp_unset_edid(intel_dp); 5987 drm_edid = intel_dp_get_edid(intel_dp); 5988 connector->detect_edid = drm_edid; 5989 5990 /* Below we depend on display info having been updated */ 5991 drm_edid_connector_update(&connector->base, drm_edid); 5992 5993 vrr_capable = intel_vrr_is_capable(connector); 5994 drm_dbg_kms(display->drm, "[CONNECTOR:%d:%s] VRR capable: %s\n", 5995 connector->base.base.id, connector->base.name, str_yes_no(vrr_capable)); 5996 drm_connector_set_vrr_capable_property(&connector->base, vrr_capable); 5997 5998 intel_dp_update_dfp(intel_dp, drm_edid); 5999 intel_dp_update_420(intel_dp); 6000 6001 drm_dp_cec_attach(&intel_dp->aux, 6002 connector->base.display_info.source_physical_address); 6003 } 6004 6005 static void 6006 intel_dp_unset_edid(struct intel_dp *intel_dp) 6007 { 6008 struct intel_connector *connector = intel_dp->attached_connector; 6009 6010 drm_dp_cec_unset_edid(&intel_dp->aux); 6011 drm_edid_free(connector->detect_edid); 6012 connector->detect_edid = NULL; 6013 6014 intel_dp->dfp.max_bpc = 0; 6015 intel_dp->dfp.max_dotclock = 0; 6016 intel_dp->dfp.min_tmds_clock = 0; 6017 intel_dp->dfp.max_tmds_clock = 0; 6018 6019 intel_dp->dfp.pcon_max_frl_bw = 0; 6020 6021 intel_dp->dfp.ycbcr_444_to_420 = false; 6022 connector->base.ycbcr_420_allowed = false; 6023 6024 drm_connector_set_vrr_capable_property(&connector->base, 6025 false); 6026 } 6027 6028 static void 6029 intel_dp_detect_sdp_caps(struct intel_dp *intel_dp) 6030 { 6031 struct intel_display *display = to_intel_display(intel_dp); 6032 6033 intel_dp->as_sdp_supported = HAS_AS_SDP(display) && 6034 drm_dp_as_sdp_supported(&intel_dp->aux, intel_dp->dpcd); 6035 } 6036 6037 static bool intel_dp_needs_dpcd_probe(struct intel_dp *intel_dp, bool force_on_external) 6038 { 6039 struct intel_connector *connector = intel_dp->attached_connector; 6040 6041 if (intel_dp_is_edp(intel_dp)) 6042 return false; 6043 6044 if (force_on_external) 6045 return true; 6046 6047 if (intel_dp->is_mst) 6048 return false; 6049 6050 return drm_edid_has_quirk(&connector->base, DRM_EDID_QUIRK_DP_DPCD_PROBE); 6051 } 6052 6053 void intel_dp_dpcd_set_probe(struct intel_dp *intel_dp, bool force_on_external) 6054 { 6055 drm_dp_dpcd_set_probe(&intel_dp->aux, 6056 intel_dp_needs_dpcd_probe(intel_dp, force_on_external)); 6057 } 6058 6059 static int 6060 intel_dp_detect(struct drm_connector *_connector, 6061 struct drm_modeset_acquire_ctx *ctx, 6062 bool force) 6063 { 6064 struct intel_display *display = to_intel_display(_connector->dev); 6065 struct intel_connector *connector = to_intel_connector(_connector); 6066 struct intel_dp *intel_dp = intel_attached_dp(connector); 6067 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp); 6068 struct intel_encoder *encoder = &dig_port->base; 6069 enum drm_connector_status status; 6070 int ret; 6071 6072 drm_dbg_kms(display->drm, "[CONNECTOR:%d:%s]\n", 6073 connector->base.base.id, connector->base.name); 6074 drm_WARN_ON(display->drm, 6075 !drm_modeset_is_locked(&display->drm->mode_config.connection_mutex)); 6076 6077 if (!intel_display_device_enabled(display)) 6078 return connector_status_disconnected; 6079 6080 if (!intel_display_driver_check_access(display)) 6081 return connector->base.status; 6082 6083 intel_dp_flush_connector_commits(connector); 6084 6085 intel_pps_vdd_on(intel_dp); 6086 6087 /* Can't disconnect eDP */ 6088 if (intel_dp_is_edp(intel_dp)) 6089 status = edp_detect(intel_dp); 6090 else if (intel_digital_port_connected(encoder)) 6091 status = intel_dp_detect_dpcd(intel_dp); 6092 else 6093 status = connector_status_disconnected; 6094 6095 if (status != connector_status_disconnected && 6096 !intel_dp_mst_verify_dpcd_state(intel_dp)) 6097 /* 6098 * This requires retrying detection for instance to re-enable 6099 * the MST mode that got reset via a long HPD pulse. The retry 6100 * will happen either via the hotplug handler's retry logic, 6101 * ensured by setting the connector here to SST/disconnected, 6102 * or via a userspace connector probing in response to the 6103 * hotplug uevent sent when removing the MST connectors. 6104 */ 6105 status = connector_status_disconnected; 6106 6107 if (status == connector_status_disconnected) { 6108 intel_dp_test_reset(intel_dp); 6109 /* 6110 * FIXME: Resetting these caps here cause 6111 * state computation fail if the connector need to be 6112 * modeset after sink disconnect. Move resetting them 6113 * to where new sink is connected. 6114 */ 6115 memset(connector->dp.dsc_dpcd, 0, sizeof(connector->dp.dsc_dpcd)); 6116 memset(connector->dp.panel_replay_caps.dpcd, 0, 6117 sizeof(connector->dp.panel_replay_caps.dpcd)); 6118 intel_dp->psr.sink_panel_replay_support = false; 6119 connector->dp.panel_replay_caps.support = false; 6120 connector->dp.panel_replay_caps.su_support = false; 6121 connector->dp.panel_replay_caps.dsc_support = 6122 INTEL_DP_PANEL_REPLAY_DSC_NOT_SUPPORTED; 6123 6124 intel_dp_mst_disconnect(intel_dp); 6125 6126 intel_dp_tunnel_disconnect(intel_dp); 6127 6128 goto out_unset_edid; 6129 } 6130 6131 intel_dp_init_source_oui(intel_dp); 6132 6133 ret = intel_dp_tunnel_detect(intel_dp, ctx); 6134 if (ret == -EDEADLK) { 6135 status = ret; 6136 6137 goto out_vdd_off; 6138 } 6139 6140 if (ret == 1) 6141 connector->base.epoch_counter++; 6142 6143 if (!intel_dp_is_edp(intel_dp)) 6144 intel_psr_init_dpcd(intel_dp, connector); 6145 6146 intel_dp_detect_dsc_caps(intel_dp, connector); 6147 6148 intel_dp_detect_sdp_caps(intel_dp); 6149 6150 if (intel_dp->reset_link_params) { 6151 intel_dp_reset_link_params(intel_dp); 6152 intel_dp->reset_link_params = false; 6153 } 6154 6155 intel_dp_mst_configure(intel_dp); 6156 6157 intel_dp_print_rates(intel_dp); 6158 6159 if (intel_dp->is_mst) { 6160 /* 6161 * If we are in MST mode then this connector 6162 * won't appear connected or have anything 6163 * with EDID on it 6164 */ 6165 status = connector_status_disconnected; 6166 goto out_unset_edid; 6167 } 6168 6169 /* 6170 * Some external monitors do not signal loss of link synchronization 6171 * with an IRQ_HPD, so force a link status check. 6172 * 6173 * TODO: this probably became redundant, so remove it: the link state 6174 * is rechecked/recovered now after modesets, where the loss of 6175 * synchronization tends to occur. 6176 */ 6177 if (!intel_dp_is_edp(intel_dp)) 6178 intel_dp_check_link_state(intel_dp); 6179 6180 /* 6181 * Clearing NACK and defer counts to get their exact values 6182 * while reading EDID which are required by Compliance tests 6183 * 4.2.2.4 and 4.2.2.5 6184 */ 6185 intel_dp->aux.i2c_nack_count = 0; 6186 intel_dp->aux.i2c_defer_count = 0; 6187 6188 intel_dp_set_edid(intel_dp); 6189 if (intel_dp_is_edp(intel_dp) || connector->detect_edid) 6190 status = connector_status_connected; 6191 6192 intel_dp_check_device_service_irq(intel_dp); 6193 6194 out_unset_edid: 6195 if (status != connector_status_connected && !intel_dp->is_mst) 6196 intel_dp_unset_edid(intel_dp); 6197 6198 intel_dp_dpcd_set_probe(intel_dp, false); 6199 6200 if (!intel_dp_is_edp(intel_dp)) 6201 drm_dp_set_subconnector_property(&connector->base, 6202 status, 6203 intel_dp->dpcd, 6204 intel_dp->downstream_ports); 6205 out_vdd_off: 6206 intel_pps_vdd_off(intel_dp); 6207 6208 return status; 6209 } 6210 6211 static void 6212 intel_dp_force(struct drm_connector *_connector) 6213 { 6214 struct intel_connector *connector = to_intel_connector(_connector); 6215 struct intel_display *display = to_intel_display(connector); 6216 struct intel_dp *intel_dp = intel_attached_dp(connector); 6217 6218 drm_dbg_kms(display->drm, "[CONNECTOR:%d:%s]\n", 6219 connector->base.base.id, connector->base.name); 6220 6221 if (!intel_display_driver_check_access(display)) 6222 return; 6223 6224 intel_dp_unset_edid(intel_dp); 6225 6226 if (connector->base.status != connector_status_connected) 6227 return; 6228 6229 intel_dp_set_edid(intel_dp); 6230 6231 intel_dp_dpcd_set_probe(intel_dp, false); 6232 } 6233 6234 static int intel_dp_get_modes(struct drm_connector *_connector) 6235 { 6236 struct intel_display *display = to_intel_display(_connector->dev); 6237 struct intel_connector *connector = to_intel_connector(_connector); 6238 struct intel_dp *intel_dp = intel_attached_dp(connector); 6239 int num_modes; 6240 6241 /* drm_edid_connector_update() done in ->detect() or ->force() */ 6242 num_modes = drm_edid_connector_add_modes(&connector->base); 6243 6244 /* Also add fixed mode, which may or may not be present in EDID */ 6245 if (intel_dp_is_edp(intel_dp)) 6246 num_modes += intel_panel_get_modes(connector); 6247 6248 if (num_modes) 6249 return num_modes; 6250 6251 if (!connector->detect_edid) { 6252 struct drm_display_mode *mode; 6253 6254 mode = drm_dp_downstream_mode(display->drm, 6255 intel_dp->dpcd, 6256 intel_dp->downstream_ports); 6257 if (mode) { 6258 drm_mode_probed_add(&connector->base, mode); 6259 num_modes++; 6260 } 6261 } 6262 6263 return num_modes; 6264 } 6265 6266 static int 6267 intel_dp_connector_register(struct drm_connector *_connector) 6268 { 6269 struct intel_connector *connector = to_intel_connector(_connector); 6270 struct intel_display *display = to_intel_display(connector); 6271 struct intel_dp *intel_dp = intel_attached_dp(connector); 6272 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp); 6273 int ret; 6274 6275 ret = intel_connector_register(&connector->base); 6276 if (ret) 6277 return ret; 6278 6279 drm_dbg_kms(display->drm, "registering %s bus for %s\n", 6280 intel_dp->aux.name, connector->base.kdev->kobj.name); 6281 6282 intel_dp->aux.dev = connector->base.kdev; 6283 ret = drm_dp_aux_register(&intel_dp->aux); 6284 if (!ret) 6285 drm_dp_cec_register_connector(&intel_dp->aux, &connector->base); 6286 6287 if (!intel_bios_encoder_is_lspcon(dig_port->base.devdata)) 6288 return ret; 6289 6290 /* 6291 * ToDo: Clean this up to handle lspcon init and resume more 6292 * efficiently and streamlined. 6293 */ 6294 if (intel_lspcon_init(dig_port)) { 6295 if (intel_lspcon_detect_hdr_capability(dig_port)) 6296 drm_connector_attach_hdr_output_metadata_property(&connector->base); 6297 } 6298 6299 return ret; 6300 } 6301 6302 static void 6303 intel_dp_connector_unregister(struct drm_connector *_connector) 6304 { 6305 struct intel_connector *connector = to_intel_connector(_connector); 6306 struct intel_dp *intel_dp = intel_attached_dp(connector); 6307 6308 drm_dp_cec_unregister_connector(&intel_dp->aux); 6309 drm_dp_aux_unregister(&intel_dp->aux); 6310 intel_connector_unregister(&connector->base); 6311 } 6312 6313 void intel_dp_connector_sync_state(struct intel_connector *connector, 6314 const struct intel_crtc_state *crtc_state) 6315 { 6316 struct intel_display *display = to_intel_display(connector); 6317 6318 if (crtc_state && crtc_state->dsc.compression_enable) { 6319 drm_WARN_ON(display->drm, 6320 !connector->dp.dsc_decompression_aux); 6321 connector->dp.dsc_decompression_enabled = true; 6322 } else { 6323 connector->dp.dsc_decompression_enabled = false; 6324 } 6325 } 6326 6327 void intel_dp_encoder_flush_work(struct drm_encoder *_encoder) 6328 { 6329 struct intel_encoder *encoder = to_intel_encoder(_encoder); 6330 struct intel_digital_port *dig_port = enc_to_dig_port(encoder); 6331 struct intel_dp *intel_dp = &dig_port->dp; 6332 6333 intel_encoder_link_check_flush_work(encoder); 6334 6335 intel_dp_mst_encoder_cleanup(dig_port); 6336 6337 intel_dp_tunnel_destroy(intel_dp); 6338 6339 intel_pps_vdd_off_sync(intel_dp); 6340 6341 /* 6342 * Ensure power off delay is respected on module remove, so that we can 6343 * reduce delays at driver probe. See pps_init_timestamps(). 6344 */ 6345 intel_pps_wait_power_cycle(intel_dp); 6346 6347 intel_dp_aux_fini(intel_dp); 6348 } 6349 6350 void intel_dp_encoder_suspend(struct intel_encoder *encoder) 6351 { 6352 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 6353 6354 intel_pps_vdd_off_sync(intel_dp); 6355 6356 intel_dp_tunnel_suspend(intel_dp); 6357 } 6358 6359 void intel_dp_encoder_shutdown(struct intel_encoder *encoder) 6360 { 6361 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 6362 6363 intel_pps_wait_power_cycle(intel_dp); 6364 } 6365 6366 static int intel_modeset_tile_group(struct intel_atomic_state *state, 6367 int tile_group_id) 6368 { 6369 struct intel_display *display = to_intel_display(state); 6370 struct drm_connector_list_iter conn_iter; 6371 struct intel_connector *connector; 6372 int ret = 0; 6373 6374 drm_connector_list_iter_begin(display->drm, &conn_iter); 6375 for_each_intel_connector_iter(connector, &conn_iter) { 6376 struct drm_connector_state *conn_state; 6377 struct intel_crtc_state *crtc_state; 6378 struct intel_crtc *crtc; 6379 6380 if (!connector->base.has_tile || 6381 connector->base.tile_group->id != tile_group_id) 6382 continue; 6383 6384 conn_state = drm_atomic_get_connector_state(&state->base, 6385 &connector->base); 6386 if (IS_ERR(conn_state)) { 6387 ret = PTR_ERR(conn_state); 6388 break; 6389 } 6390 6391 crtc = to_intel_crtc(conn_state->crtc); 6392 6393 if (!crtc) 6394 continue; 6395 6396 crtc_state = intel_atomic_get_new_crtc_state(state, crtc); 6397 crtc_state->uapi.mode_changed = true; 6398 6399 ret = drm_atomic_add_affected_planes(&state->base, &crtc->base); 6400 if (ret) 6401 break; 6402 } 6403 drm_connector_list_iter_end(&conn_iter); 6404 6405 return ret; 6406 } 6407 6408 static int intel_modeset_affected_transcoders(struct intel_atomic_state *state, u8 transcoders) 6409 { 6410 struct intel_display *display = to_intel_display(state); 6411 struct intel_crtc *crtc; 6412 6413 if (transcoders == 0) 6414 return 0; 6415 6416 for_each_intel_crtc(display->drm, crtc) { 6417 struct intel_crtc_state *crtc_state; 6418 int ret; 6419 6420 crtc_state = intel_atomic_get_crtc_state(&state->base, crtc); 6421 if (IS_ERR(crtc_state)) 6422 return PTR_ERR(crtc_state); 6423 6424 if (!crtc_state->hw.enable) 6425 continue; 6426 6427 if (!(transcoders & BIT(crtc_state->cpu_transcoder))) 6428 continue; 6429 6430 crtc_state->uapi.mode_changed = true; 6431 6432 ret = drm_atomic_add_affected_connectors(&state->base, &crtc->base); 6433 if (ret) 6434 return ret; 6435 6436 ret = drm_atomic_add_affected_planes(&state->base, &crtc->base); 6437 if (ret) 6438 return ret; 6439 6440 transcoders &= ~BIT(crtc_state->cpu_transcoder); 6441 } 6442 6443 drm_WARN_ON(display->drm, transcoders != 0); 6444 6445 return 0; 6446 } 6447 6448 static int intel_modeset_synced_crtcs(struct intel_atomic_state *state, 6449 struct drm_connector *_connector) 6450 { 6451 struct intel_connector *connector = to_intel_connector(_connector); 6452 const struct drm_connector_state *old_conn_state = 6453 drm_atomic_get_old_connector_state(&state->base, &connector->base); 6454 const struct intel_crtc_state *old_crtc_state; 6455 struct intel_crtc *crtc; 6456 u8 transcoders; 6457 6458 crtc = to_intel_crtc(old_conn_state->crtc); 6459 if (!crtc) 6460 return 0; 6461 6462 old_crtc_state = intel_atomic_get_old_crtc_state(state, crtc); 6463 6464 if (!old_crtc_state->hw.active) 6465 return 0; 6466 6467 transcoders = old_crtc_state->sync_mode_slaves_mask; 6468 if (old_crtc_state->master_transcoder != INVALID_TRANSCODER) 6469 transcoders |= BIT(old_crtc_state->master_transcoder); 6470 6471 return intel_modeset_affected_transcoders(state, 6472 transcoders); 6473 } 6474 6475 static int intel_dp_connector_atomic_check(struct drm_connector *_connector, 6476 struct drm_atomic_state *_state) 6477 { 6478 struct intel_connector *connector = to_intel_connector(_connector); 6479 struct intel_display *display = to_intel_display(connector); 6480 struct intel_atomic_state *state = to_intel_atomic_state(_state); 6481 struct drm_connector_state *conn_state = 6482 drm_atomic_get_new_connector_state(_state, &connector->base); 6483 struct intel_dp *intel_dp = enc_to_intel_dp(connector->encoder); 6484 int ret; 6485 6486 ret = intel_digital_connector_atomic_check(&connector->base, &state->base); 6487 if (ret) 6488 return ret; 6489 6490 if (intel_dp_mst_source_support(intel_dp)) { 6491 ret = drm_dp_mst_root_conn_atomic_check(conn_state, &intel_dp->mst.mgr); 6492 if (ret) 6493 return ret; 6494 } 6495 6496 if (!intel_connector_needs_modeset(state, &connector->base)) 6497 return 0; 6498 6499 ret = intel_dp_tunnel_atomic_check_state(state, 6500 intel_dp, 6501 connector); 6502 if (ret) 6503 return ret; 6504 6505 /* 6506 * We don't enable port sync on BDW due to missing w/as and 6507 * due to not having adjusted the modeset sequence appropriately. 6508 */ 6509 if (DISPLAY_VER(display) < 9) 6510 return 0; 6511 6512 if (connector->base.has_tile) { 6513 ret = intel_modeset_tile_group(state, connector->base.tile_group->id); 6514 if (ret) 6515 return ret; 6516 } 6517 6518 return intel_modeset_synced_crtcs(state, &connector->base); 6519 } 6520 6521 static void intel_dp_oob_hotplug_event(struct drm_connector *_connector, 6522 enum drm_connector_status hpd_state) 6523 { 6524 struct intel_connector *connector = to_intel_connector(_connector); 6525 struct intel_display *display = to_intel_display(connector); 6526 struct intel_encoder *encoder = intel_attached_encoder(connector); 6527 bool hpd_high = hpd_state == connector_status_connected; 6528 unsigned int hpd_pin = encoder->hpd_pin; 6529 bool need_work = false; 6530 6531 spin_lock_irq(&display->irq.lock); 6532 if (hpd_high != test_bit(hpd_pin, &display->hotplug.oob_hotplug_last_state)) { 6533 display->hotplug.event_bits |= BIT(hpd_pin); 6534 6535 __assign_bit(hpd_pin, 6536 &display->hotplug.oob_hotplug_last_state, 6537 hpd_high); 6538 need_work = true; 6539 } 6540 spin_unlock_irq(&display->irq.lock); 6541 6542 if (need_work) 6543 intel_hpd_schedule_detection(display); 6544 } 6545 6546 static const struct drm_connector_funcs intel_dp_connector_funcs = { 6547 .force = intel_dp_force, 6548 .fill_modes = drm_helper_probe_single_connector_modes, 6549 .atomic_get_property = intel_digital_connector_atomic_get_property, 6550 .atomic_set_property = intel_digital_connector_atomic_set_property, 6551 .late_register = intel_dp_connector_register, 6552 .early_unregister = intel_dp_connector_unregister, 6553 .destroy = intel_connector_destroy, 6554 .atomic_destroy_state = drm_atomic_helper_connector_destroy_state, 6555 .atomic_duplicate_state = intel_digital_connector_duplicate_state, 6556 .oob_hotplug_event = intel_dp_oob_hotplug_event, 6557 }; 6558 6559 static const struct drm_connector_helper_funcs intel_dp_connector_helper_funcs = { 6560 .detect_ctx = intel_dp_detect, 6561 .get_modes = intel_dp_get_modes, 6562 .mode_valid = intel_dp_mode_valid, 6563 .atomic_check = intel_dp_connector_atomic_check, 6564 }; 6565 6566 enum irqreturn 6567 intel_dp_hpd_pulse(struct intel_digital_port *dig_port, bool long_hpd) 6568 { 6569 struct intel_display *display = to_intel_display(dig_port); 6570 struct intel_dp *intel_dp = &dig_port->dp; 6571 u8 dpcd[DP_RECEIVER_CAP_SIZE]; 6572 6573 if (dig_port->base.type == INTEL_OUTPUT_EDP && 6574 (long_hpd || 6575 intel_display_rpm_suspended(display) || 6576 !intel_pps_have_panel_power_or_vdd(intel_dp))) { 6577 /* 6578 * vdd off can generate a long/short pulse on eDP which 6579 * would require vdd on to handle it, and thus we 6580 * would end up in an endless cycle of 6581 * "vdd off -> long/short hpd -> vdd on -> detect -> vdd off -> ..." 6582 */ 6583 drm_dbg_kms(display->drm, 6584 "ignoring %s hpd on eDP [ENCODER:%d:%s]\n", 6585 long_hpd ? "long" : "short", 6586 dig_port->base.base.base.id, 6587 dig_port->base.base.name); 6588 return IRQ_HANDLED; 6589 } 6590 6591 drm_dbg_kms(display->drm, "got hpd irq on [ENCODER:%d:%s] - %s\n", 6592 dig_port->base.base.base.id, 6593 dig_port->base.base.name, 6594 long_hpd ? "long" : "short"); 6595 6596 /* 6597 * TBT DP tunnels require the GFX driver to read out the DPRX caps in 6598 * response to long HPD pulses. The DP hotplug handler does that, 6599 * however the hotplug handler may be blocked by another 6600 * connector's/encoder's hotplug handler. Since the TBT CM may not 6601 * complete the DP tunnel BW request for the latter connector/encoder 6602 * waiting for this encoder's DPRX read, perform a dummy read here. 6603 */ 6604 if (long_hpd) { 6605 intel_dp_dpcd_set_probe(intel_dp, true); 6606 6607 intel_dp_read_dprx_caps(intel_dp, dpcd); 6608 6609 intel_dp->reset_link_params = true; 6610 intel_dp_invalidate_source_oui(intel_dp); 6611 6612 return IRQ_NONE; 6613 } 6614 6615 if (intel_dp->is_mst) { 6616 if (!intel_dp_check_mst_status(intel_dp)) 6617 return IRQ_NONE; 6618 } else if (!intel_dp_short_pulse(intel_dp)) { 6619 return IRQ_NONE; 6620 } 6621 6622 return IRQ_HANDLED; 6623 } 6624 6625 static bool _intel_dp_is_port_edp(struct intel_display *display, 6626 const struct intel_bios_encoder_data *devdata, 6627 enum port port) 6628 { 6629 /* 6630 * eDP not supported on g4x. so bail out early just 6631 * for a bit extra safety in case the VBT is bonkers. 6632 */ 6633 if (DISPLAY_VER(display) < 5) 6634 return false; 6635 6636 if (DISPLAY_VER(display) < 9 && port == PORT_A) 6637 return true; 6638 6639 return devdata && intel_bios_encoder_supports_edp(devdata); 6640 } 6641 6642 bool intel_dp_is_port_edp(struct intel_display *display, enum port port) 6643 { 6644 const struct intel_bios_encoder_data *devdata = 6645 intel_bios_encoder_data_lookup(display, port); 6646 6647 return _intel_dp_is_port_edp(display, devdata, port); 6648 } 6649 6650 bool 6651 intel_dp_has_gamut_metadata_dip(struct intel_encoder *encoder) 6652 { 6653 struct intel_display *display = to_intel_display(encoder); 6654 enum port port = encoder->port; 6655 6656 if (intel_bios_encoder_is_lspcon(encoder->devdata)) 6657 return false; 6658 6659 if (DISPLAY_VER(display) >= 11) 6660 return true; 6661 6662 if (port == PORT_A) 6663 return false; 6664 6665 if (display->platform.haswell || display->platform.broadwell || 6666 DISPLAY_VER(display) >= 9) 6667 return true; 6668 6669 return false; 6670 } 6671 6672 static void 6673 intel_dp_add_properties(struct intel_dp *intel_dp, struct drm_connector *_connector) 6674 { 6675 struct intel_connector *connector = to_intel_connector(_connector); 6676 struct intel_display *display = to_intel_display(intel_dp); 6677 enum port port = dp_to_dig_port(intel_dp)->base.port; 6678 6679 if (!intel_dp_is_edp(intel_dp)) 6680 drm_connector_attach_dp_subconnector_property(&connector->base); 6681 6682 if (!display->platform.g4x && port != PORT_A) 6683 intel_attach_force_audio_property(&connector->base); 6684 6685 intel_attach_broadcast_rgb_property(&connector->base); 6686 if (HAS_GMCH(display)) 6687 drm_connector_attach_max_bpc_property(&connector->base, 6, 10); 6688 else if (DISPLAY_VER(display) >= 5) 6689 drm_connector_attach_max_bpc_property(&connector->base, 6, 12); 6690 6691 /* Register HDMI colorspace for case of lspcon */ 6692 if (intel_bios_encoder_is_lspcon(dp_to_dig_port(intel_dp)->base.devdata)) { 6693 drm_connector_attach_content_type_property(&connector->base); 6694 intel_attach_hdmi_colorspace_property(&connector->base); 6695 } else { 6696 intel_attach_dp_colorspace_property(&connector->base); 6697 } 6698 6699 if (intel_dp_has_gamut_metadata_dip(&dp_to_dig_port(intel_dp)->base)) 6700 drm_connector_attach_hdr_output_metadata_property(&connector->base); 6701 6702 if (HAS_VRR(display)) 6703 drm_connector_attach_vrr_capable_property(&connector->base); 6704 } 6705 6706 static void 6707 intel_edp_add_properties(struct intel_dp *intel_dp) 6708 { 6709 struct intel_display *display = to_intel_display(intel_dp); 6710 struct intel_connector *connector = intel_dp->attached_connector; 6711 const struct drm_display_mode *fixed_mode = 6712 intel_panel_preferred_fixed_mode(connector); 6713 6714 intel_attach_scaling_mode_property(&connector->base); 6715 6716 drm_connector_set_panel_orientation_with_quirk(&connector->base, 6717 display->vbt.orientation, 6718 fixed_mode->hdisplay, 6719 fixed_mode->vdisplay); 6720 } 6721 6722 static void intel_edp_backlight_setup(struct intel_dp *intel_dp, 6723 struct intel_connector *connector) 6724 { 6725 struct intel_display *display = to_intel_display(intel_dp); 6726 enum pipe pipe = INVALID_PIPE; 6727 6728 if (display->platform.valleyview || display->platform.cherryview) 6729 pipe = vlv_pps_backlight_initial_pipe(intel_dp); 6730 6731 intel_backlight_setup(connector, pipe); 6732 } 6733 6734 static bool intel_edp_init_connector(struct intel_dp *intel_dp, 6735 struct intel_connector *connector) 6736 { 6737 struct intel_display *display = to_intel_display(intel_dp); 6738 struct drm_display_mode *fixed_mode; 6739 struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base; 6740 bool has_dpcd; 6741 const struct drm_edid *drm_edid; 6742 6743 if (!intel_dp_is_edp(intel_dp)) 6744 return true; 6745 6746 /* 6747 * On IBX/CPT we may get here with LVDS already registered. Since the 6748 * driver uses the only internal power sequencer available for both 6749 * eDP and LVDS bail out early in this case to prevent interfering 6750 * with an already powered-on LVDS power sequencer. 6751 */ 6752 if (intel_get_lvds_encoder(display)) { 6753 drm_WARN_ON(display->drm, 6754 !(HAS_PCH_IBX(display) || HAS_PCH_CPT(display))); 6755 drm_info(display->drm, 6756 "LVDS was detected, not registering eDP\n"); 6757 6758 return false; 6759 } 6760 6761 intel_bios_init_panel_early(display, &connector->panel, 6762 encoder->devdata); 6763 6764 if (!intel_pps_init(intel_dp)) { 6765 drm_info(display->drm, 6766 "[ENCODER:%d:%s] unusable PPS, disabling eDP\n", 6767 encoder->base.base.id, encoder->base.name); 6768 /* 6769 * The BIOS may have still enabled VDD on the PPS even 6770 * though it's unusable. Make sure we turn it back off 6771 * and to release the power domain references/etc. 6772 */ 6773 goto out_vdd_off; 6774 } 6775 6776 /* 6777 * Enable HPD sense for live status check. 6778 * intel_hpd_irq_setup() will turn it off again 6779 * if it's no longer needed later. 6780 * 6781 * The DPCD probe below will make sure VDD is on. 6782 */ 6783 intel_hpd_enable_detection(encoder); 6784 6785 intel_alpm_init(intel_dp); 6786 6787 /* Cache DPCD and EDID for edp. */ 6788 has_dpcd = intel_edp_init_dpcd(intel_dp, connector); 6789 6790 if (!has_dpcd) { 6791 /* if this fails, presume the device is a ghost */ 6792 drm_info(display->drm, 6793 "[ENCODER:%d:%s] failed to retrieve link info, disabling eDP\n", 6794 encoder->base.base.id, encoder->base.name); 6795 goto out_vdd_off; 6796 } 6797 6798 /* 6799 * VBT and straps are liars. Also check HPD as that seems 6800 * to be the most reliable piece of information available. 6801 * 6802 * ... expect on devices that forgot to hook HPD up for eDP 6803 * (eg. Acer Chromebook C710), so we'll check it only if multiple 6804 * ports are attempting to use the same AUX CH, according to VBT. 6805 */ 6806 if (intel_bios_dp_has_shared_aux_ch(encoder->devdata)) { 6807 /* 6808 * If this fails, presume the DPCD answer came 6809 * from some other port using the same AUX CH. 6810 * 6811 * FIXME maybe cleaner to check this before the 6812 * DPCD read? Would need sort out the VDD handling... 6813 */ 6814 if (!intel_digital_port_connected(encoder)) { 6815 drm_info(display->drm, 6816 "[ENCODER:%d:%s] HPD is down, disabling eDP\n", 6817 encoder->base.base.id, encoder->base.name); 6818 goto out_vdd_off; 6819 } 6820 6821 /* 6822 * Unfortunately even the HPD based detection fails on 6823 * eg. Asus B360M-A (CFL+CNP), so as a last resort fall 6824 * back to checking for a VGA branch device. Only do this 6825 * on known affected platforms to minimize false positives. 6826 */ 6827 if (DISPLAY_VER(display) == 9 && drm_dp_is_branch(intel_dp->dpcd) && 6828 (intel_dp->dpcd[DP_DOWNSTREAMPORT_PRESENT] & DP_DWN_STRM_PORT_TYPE_MASK) == 6829 DP_DWN_STRM_PORT_TYPE_ANALOG) { 6830 drm_info(display->drm, 6831 "[ENCODER:%d:%s] VGA converter detected, disabling eDP\n", 6832 encoder->base.base.id, encoder->base.name); 6833 goto out_vdd_off; 6834 } 6835 } 6836 6837 mutex_lock(&display->drm->mode_config.mutex); 6838 drm_edid = drm_edid_read_ddc(&connector->base, connector->base.ddc); 6839 if (!drm_edid) { 6840 /* Fallback to EDID from ACPI OpRegion, if any */ 6841 drm_edid = intel_opregion_get_edid(connector); 6842 if (drm_edid) 6843 drm_dbg_kms(display->drm, 6844 "[CONNECTOR:%d:%s] Using OpRegion EDID\n", 6845 connector->base.base.id, connector->base.name); 6846 } 6847 if (drm_edid) { 6848 if (drm_edid_connector_update(&connector->base, drm_edid) || 6849 !drm_edid_connector_add_modes(&connector->base)) { 6850 drm_edid_connector_update(&connector->base, NULL); 6851 drm_edid_free(drm_edid); 6852 drm_edid = ERR_PTR(-EINVAL); 6853 } 6854 } else { 6855 drm_edid = ERR_PTR(-ENOENT); 6856 } 6857 6858 intel_bios_init_panel_late(display, &connector->panel, encoder->devdata, 6859 IS_ERR(drm_edid) ? NULL : drm_edid); 6860 6861 intel_panel_add_edid_fixed_modes(connector, true); 6862 6863 /* MSO requires information from the EDID */ 6864 intel_edp_mso_init(intel_dp); 6865 6866 /* multiply the mode clock and horizontal timings for MSO */ 6867 list_for_each_entry(fixed_mode, &connector->panel.fixed_modes, head) 6868 intel_edp_mso_mode_fixup(connector, fixed_mode); 6869 6870 /* fallback to VBT if available for eDP */ 6871 if (!intel_panel_preferred_fixed_mode(connector)) 6872 intel_panel_add_vbt_lfp_fixed_mode(connector); 6873 6874 mutex_unlock(&display->drm->mode_config.mutex); 6875 6876 if (!intel_panel_preferred_fixed_mode(connector)) { 6877 drm_info(display->drm, 6878 "[ENCODER:%d:%s] failed to find fixed mode for the panel, disabling eDP\n", 6879 encoder->base.base.id, encoder->base.name); 6880 goto out_vdd_off; 6881 } 6882 6883 intel_panel_init(connector, drm_edid); 6884 6885 intel_edp_backlight_setup(intel_dp, connector); 6886 6887 intel_edp_add_properties(intel_dp); 6888 6889 intel_pps_init_late(intel_dp); 6890 6891 return true; 6892 6893 out_vdd_off: 6894 intel_pps_vdd_off_sync(intel_dp); 6895 intel_bios_fini_panel(&connector->panel); 6896 6897 return false; 6898 } 6899 6900 bool 6901 intel_dp_init_connector(struct intel_digital_port *dig_port, 6902 struct intel_connector *connector) 6903 { 6904 struct intel_display *display = to_intel_display(dig_port); 6905 struct intel_dp *intel_dp = &dig_port->dp; 6906 struct intel_encoder *encoder = &dig_port->base; 6907 struct drm_device *dev = encoder->base.dev; 6908 enum port port = encoder->port; 6909 int type; 6910 6911 if (drm_WARN(dev, dig_port->max_lanes < 1, 6912 "Not enough lanes (%d) for DP on [ENCODER:%d:%s]\n", 6913 dig_port->max_lanes, encoder->base.base.id, 6914 encoder->base.name)) 6915 return false; 6916 6917 intel_dp->reset_link_params = true; 6918 6919 /* Preserve the current hw state. */ 6920 intel_dp->DP = intel_de_read(display, intel_dp->output_reg); 6921 intel_dp->attached_connector = connector; 6922 6923 if (_intel_dp_is_port_edp(display, encoder->devdata, port)) { 6924 /* 6925 * Currently we don't support eDP on TypeC ports for DISPLAY_VER < 30, 6926 * although in theory it could work on TypeC legacy ports. 6927 */ 6928 drm_WARN_ON(dev, intel_encoder_is_tc(encoder) && 6929 DISPLAY_VER(display) < 30); 6930 type = DRM_MODE_CONNECTOR_eDP; 6931 encoder->type = INTEL_OUTPUT_EDP; 6932 6933 /* eDP only on port B and/or C on vlv/chv */ 6934 if (drm_WARN_ON(dev, (display->platform.valleyview || 6935 display->platform.cherryview) && 6936 port != PORT_B && port != PORT_C)) 6937 return false; 6938 } else { 6939 type = DRM_MODE_CONNECTOR_DisplayPort; 6940 } 6941 6942 intel_dp_set_default_sink_rates(intel_dp); 6943 intel_dp_set_default_max_sink_lane_count(intel_dp); 6944 6945 if (display->platform.valleyview || display->platform.cherryview) 6946 vlv_pps_pipe_init(intel_dp); 6947 6948 intel_dp_aux_init(intel_dp); 6949 connector->dp.dsc_decompression_aux = &intel_dp->aux; 6950 6951 drm_dbg_kms(display->drm, 6952 "Adding %s connector on [ENCODER:%d:%s]\n", 6953 type == DRM_MODE_CONNECTOR_eDP ? "eDP" : "DP", 6954 encoder->base.base.id, encoder->base.name); 6955 6956 drm_connector_init_with_ddc(dev, &connector->base, &intel_dp_connector_funcs, 6957 type, &intel_dp->aux.ddc); 6958 drm_connector_helper_add(&connector->base, &intel_dp_connector_helper_funcs); 6959 6960 if (!HAS_GMCH(display) && DISPLAY_VER(display) < 12) 6961 connector->base.interlace_allowed = true; 6962 6963 if (type != DRM_MODE_CONNECTOR_eDP) 6964 connector->polled = DRM_CONNECTOR_POLL_HPD; 6965 connector->base.polled = connector->polled; 6966 6967 intel_connector_attach_encoder(connector, encoder); 6968 6969 if (HAS_DDI(display)) 6970 connector->get_hw_state = intel_ddi_connector_get_hw_state; 6971 else 6972 connector->get_hw_state = intel_connector_get_hw_state; 6973 connector->sync_state = intel_dp_connector_sync_state; 6974 6975 if (!intel_edp_init_connector(intel_dp, connector)) { 6976 intel_dp_aux_fini(intel_dp); 6977 goto fail; 6978 } 6979 6980 intel_dp_set_source_rates(intel_dp); 6981 intel_dp_set_common_rates(intel_dp); 6982 intel_dp_reset_link_params(intel_dp); 6983 6984 /* init MST on ports that can support it */ 6985 intel_dp_mst_encoder_init(dig_port, connector->base.base.id); 6986 6987 intel_dp_add_properties(intel_dp, &connector->base); 6988 6989 if (is_hdcp_supported(display, port) && !intel_dp_is_edp(intel_dp)) { 6990 int ret = intel_dp_hdcp_init(dig_port, connector); 6991 if (ret) 6992 drm_dbg_kms(display->drm, 6993 "HDCP init failed, skipping.\n"); 6994 } 6995 6996 intel_dp->frl.is_trained = false; 6997 intel_dp->frl.trained_rate_gbps = 0; 6998 6999 intel_psr_init(intel_dp); 7000 7001 return true; 7002 7003 fail: 7004 intel_display_power_flush_work(display); 7005 drm_connector_cleanup(&connector->base); 7006 7007 return false; 7008 } 7009 7010 void intel_dp_mst_suspend(struct intel_display *display) 7011 { 7012 struct intel_encoder *encoder; 7013 7014 if (!HAS_DISPLAY(display)) 7015 return; 7016 7017 for_each_intel_encoder(display->drm, encoder) { 7018 struct intel_dp *intel_dp; 7019 7020 if (encoder->type != INTEL_OUTPUT_DDI) 7021 continue; 7022 7023 intel_dp = enc_to_intel_dp(encoder); 7024 7025 if (!intel_dp_mst_source_support(intel_dp)) 7026 continue; 7027 7028 if (intel_dp->is_mst) 7029 drm_dp_mst_topology_mgr_suspend(&intel_dp->mst.mgr); 7030 } 7031 } 7032 7033 void intel_dp_mst_resume(struct intel_display *display) 7034 { 7035 struct intel_encoder *encoder; 7036 7037 if (!HAS_DISPLAY(display)) 7038 return; 7039 7040 for_each_intel_encoder(display->drm, encoder) { 7041 struct intel_dp *intel_dp; 7042 int ret; 7043 7044 if (encoder->type != INTEL_OUTPUT_DDI) 7045 continue; 7046 7047 intel_dp = enc_to_intel_dp(encoder); 7048 7049 if (!intel_dp_mst_source_support(intel_dp)) 7050 continue; 7051 7052 ret = drm_dp_mst_topology_mgr_resume(&intel_dp->mst.mgr, true); 7053 if (ret) { 7054 intel_dp->is_mst = false; 7055 drm_dp_mst_topology_mgr_set_mst(&intel_dp->mst.mgr, false); 7056 } 7057 } 7058 } 7059 7060 static 7061 int intel_dp_sdp_compute_config_late(struct intel_crtc_state *crtc_state) 7062 { 7063 struct intel_display *display = to_intel_display(crtc_state); 7064 int guardband = intel_crtc_vblank_length(crtc_state); 7065 int min_sdp_guardband = intel_dp_sdp_min_guardband(crtc_state, false); 7066 7067 if (guardband < min_sdp_guardband) { 7068 drm_dbg_kms(display->drm, "guardband %d < min sdp guardband %d\n", 7069 guardband, min_sdp_guardband); 7070 return -EINVAL; 7071 } 7072 7073 return 0; 7074 } 7075 7076 int intel_dp_compute_config_late(struct intel_encoder *encoder, 7077 struct intel_crtc_state *crtc_state, 7078 struct drm_connector_state *conn_state) 7079 { 7080 struct intel_dp *intel_dp = enc_to_intel_dp(encoder); 7081 int ret; 7082 7083 intel_psr_compute_config_late(intel_dp, crtc_state); 7084 7085 ret = intel_dp_sdp_compute_config_late(crtc_state); 7086 if (ret) 7087 return ret; 7088 7089 return 0; 7090 } 7091 7092 static 7093 int intel_dp_get_lines_for_sdp(const struct intel_crtc_state *crtc_state, u32 type) 7094 { 7095 switch (type) { 7096 case DP_SDP_VSC_EXT_VESA: 7097 case DP_SDP_VSC_EXT_CEA: 7098 return 10; 7099 case HDMI_PACKET_TYPE_GAMUT_METADATA: 7100 return 8; 7101 case DP_SDP_PPS: 7102 return 7; 7103 case DP_SDP_ADAPTIVE_SYNC: 7104 return crtc_state->vrr.vsync_start + 1; 7105 default: 7106 break; 7107 } 7108 7109 return 0; 7110 } 7111 7112 int intel_dp_sdp_min_guardband(const struct intel_crtc_state *crtc_state, 7113 bool assume_all_enabled) 7114 { 7115 struct intel_display *display = to_intel_display(crtc_state); 7116 int sdp_guardband = 0; 7117 7118 if (assume_all_enabled || 7119 crtc_state->infoframes.enable & 7120 intel_hdmi_infoframe_enable(HDMI_PACKET_TYPE_GAMUT_METADATA)) 7121 sdp_guardband = max(sdp_guardband, 7122 intel_dp_get_lines_for_sdp(crtc_state, 7123 HDMI_PACKET_TYPE_GAMUT_METADATA)); 7124 7125 if (assume_all_enabled || 7126 crtc_state->dsc.compression_enable) 7127 sdp_guardband = max(sdp_guardband, 7128 intel_dp_get_lines_for_sdp(crtc_state, DP_SDP_PPS)); 7129 7130 if ((assume_all_enabled && HAS_AS_SDP(display)) || 7131 crtc_state->infoframes.enable & intel_hdmi_infoframe_enable(DP_SDP_ADAPTIVE_SYNC)) 7132 sdp_guardband = max(sdp_guardband, 7133 intel_dp_get_lines_for_sdp(crtc_state, DP_SDP_ADAPTIVE_SYNC)); 7134 7135 return sdp_guardband; 7136 } 7137