1 /* 2 * Copyright © 2008 Intel Corporation 3 * 2014 Red Hat Inc. 4 * 5 * Permission is hereby granted, free of charge, to any person obtaining a 6 * copy of this software and associated documentation files (the "Software"), 7 * to deal in the Software without restriction, including without limitation 8 * the rights to use, copy, modify, merge, publish, distribute, sublicense, 9 * and/or sell copies of the Software, and to permit persons to whom the 10 * Software is furnished to do so, subject to the following conditions: 11 * 12 * The above copyright notice and this permission notice (including the next 13 * paragraph) shall be included in all copies or substantial portions of the 14 * Software. 15 * 16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL 19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING 21 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS 22 * IN THE SOFTWARE. 23 * 24 */ 25 26 #include <drm/drm_atomic.h> 27 #include <drm/drm_atomic_helper.h> 28 #include <drm/drm_edid.h> 29 #include <drm/drm_fixed.h> 30 #include <drm/drm_probe_helper.h> 31 32 #include "i915_drv.h" 33 #include "i915_reg.h" 34 #include "intel_atomic.h" 35 #include "intel_audio.h" 36 #include "intel_connector.h" 37 #include "intel_crtc.h" 38 #include "intel_ddi.h" 39 #include "intel_de.h" 40 #include "intel_display_driver.h" 41 #include "intel_display_types.h" 42 #include "intel_dp.h" 43 #include "intel_dp_hdcp.h" 44 #include "intel_dp_link_training.h" 45 #include "intel_dp_mst.h" 46 #include "intel_dp_test.h" 47 #include "intel_dp_tunnel.h" 48 #include "intel_dpio_phy.h" 49 #include "intel_hdcp.h" 50 #include "intel_hotplug.h" 51 #include "intel_link_bw.h" 52 #include "intel_psr.h" 53 #include "intel_vdsc.h" 54 #include "skl_scaler.h" 55 56 /* 57 * DP MST (DisplayPort Multi-Stream Transport) 58 * 59 * MST support on the source depends on the platform and port. DP initialization 60 * sets up MST for each MST capable encoder. This will become the primary 61 * encoder for the port. 62 * 63 * MST initialization of each primary encoder creates MST stream encoders, one 64 * per pipe, and initializes the MST topology manager. The MST stream encoders 65 * are sometimes called "fake encoders", because they're virtual, not 66 * physical. Thus there are (number of MST capable ports) x (number of pipes) 67 * MST stream encoders in total. 68 * 69 * Decision to use MST for a sink happens at detect on the connector attached to 70 * the primary encoder, and this will not change while the sink is connected. We 71 * always use MST when possible, including for SST sinks with sideband messaging 72 * support. 73 * 74 * The connectors for the MST streams are added and removed dynamically by the 75 * topology manager. Their connection status is also determined by the topology 76 * manager. 77 * 78 * On hardware, each transcoder may be associated with a single DDI 79 * port. Multiple transcoders may be associated with the same DDI port only if 80 * the port is in MST mode. 81 * 82 * On TGL+, all the transcoders streaming on the same DDI port will indicate a 83 * primary transcoder; the TGL_DP_TP_CTL and TGL_DP_TP_STATUS registers are 84 * relevant only on the primary transcoder. Prior to that, they are port 85 * registers. 86 */ 87 88 /* From fake MST stream encoder to primary encoder */ 89 static struct intel_encoder *to_primary_encoder(struct intel_encoder *encoder) 90 { 91 struct intel_dp_mst_encoder *intel_mst = enc_to_mst(encoder); 92 struct intel_digital_port *dig_port = intel_mst->primary; 93 94 return &dig_port->base; 95 } 96 97 /* From fake MST stream encoder to primary DP */ 98 static struct intel_dp *to_primary_dp(struct intel_encoder *encoder) 99 { 100 struct intel_dp_mst_encoder *intel_mst = enc_to_mst(encoder); 101 struct intel_digital_port *dig_port = intel_mst->primary; 102 103 return &dig_port->dp; 104 } 105 106 static int intel_dp_mst_max_dpt_bpp(const struct intel_crtc_state *crtc_state, 107 bool dsc) 108 { 109 struct intel_display *display = to_intel_display(crtc_state); 110 const struct drm_display_mode *adjusted_mode = 111 &crtc_state->hw.adjusted_mode; 112 113 if (!intel_dp_is_uhbr(crtc_state) || DISPLAY_VER(display) >= 20 || !dsc) 114 return INT_MAX; 115 116 /* 117 * DSC->DPT interface width: 118 * ICL-MTL: 72 bits (each branch has 72 bits, only left branch is used) 119 * LNL+: 144 bits (not a bottleneck in any config) 120 * 121 * Bspec/49259 suggests that the FEC overhead needs to be 122 * applied here, though HW people claim that neither this FEC 123 * or any other overhead is applicable here (that is the actual 124 * available_bw is just symbol_clock * 72). However based on 125 * testing on MTL-P the 126 * - DELL U3224KBA display 127 * - Unigraf UCD-500 CTS test sink 128 * devices the 129 * - 5120x2880/995.59Mhz 130 * - 6016x3384/1357.23Mhz 131 * - 6144x3456/1413.39Mhz 132 * modes (all the ones having a DPT limit on the above devices), 133 * both the channel coding efficiency and an additional 3% 134 * overhead needs to be accounted for. 135 */ 136 return div64_u64(mul_u32_u32(intel_dp_link_symbol_clock(crtc_state->port_clock) * 72, 137 drm_dp_bw_channel_coding_efficiency(true)), 138 mul_u32_u32(adjusted_mode->crtc_clock, 1030000)); 139 } 140 141 static int intel_dp_mst_bw_overhead(const struct intel_crtc_state *crtc_state, 142 bool ssc, int dsc_slice_count, int bpp_x16) 143 { 144 const struct drm_display_mode *adjusted_mode = 145 &crtc_state->hw.adjusted_mode; 146 unsigned long flags = DRM_DP_BW_OVERHEAD_MST; 147 int overhead; 148 149 flags |= intel_dp_is_uhbr(crtc_state) ? DRM_DP_BW_OVERHEAD_UHBR : 0; 150 flags |= ssc ? DRM_DP_BW_OVERHEAD_SSC_REF_CLK : 0; 151 flags |= crtc_state->fec_enable ? DRM_DP_BW_OVERHEAD_FEC : 0; 152 153 if (dsc_slice_count) 154 flags |= DRM_DP_BW_OVERHEAD_DSC; 155 156 overhead = drm_dp_bw_overhead(crtc_state->lane_count, 157 adjusted_mode->hdisplay, 158 dsc_slice_count, 159 bpp_x16, 160 flags); 161 162 /* 163 * TODO: clarify whether a minimum required by the fixed FEC overhead 164 * in the bspec audio programming sequence is required here. 165 */ 166 return max(overhead, intel_dp_bw_fec_overhead(crtc_state->fec_enable)); 167 } 168 169 static void intel_dp_mst_compute_m_n(const struct intel_crtc_state *crtc_state, 170 int overhead, 171 int bpp_x16, 172 struct intel_link_m_n *m_n) 173 { 174 const struct drm_display_mode *adjusted_mode = 175 &crtc_state->hw.adjusted_mode; 176 177 /* TODO: Check WA 14013163432 to set data M/N for full BW utilization. */ 178 intel_link_compute_m_n(bpp_x16, crtc_state->lane_count, 179 adjusted_mode->crtc_clock, 180 crtc_state->port_clock, 181 overhead, 182 m_n); 183 184 m_n->tu = DIV_ROUND_UP_ULL(mul_u32_u32(m_n->data_m, 64), m_n->data_n); 185 } 186 187 static int intel_dp_mst_calc_pbn(int pixel_clock, int bpp_x16, int bw_overhead) 188 { 189 int effective_data_rate = 190 intel_dp_effective_data_rate(pixel_clock, bpp_x16, bw_overhead); 191 192 /* 193 * TODO: Use drm_dp_calc_pbn_mode() instead, once it's converted 194 * to calculate PBN with the BW overhead passed to it. 195 */ 196 return DIV_ROUND_UP(effective_data_rate * 64, 54 * 1000); 197 } 198 199 static int intel_dp_mst_dsc_get_slice_count(const struct intel_connector *connector, 200 const struct intel_crtc_state *crtc_state) 201 { 202 const struct drm_display_mode *adjusted_mode = 203 &crtc_state->hw.adjusted_mode; 204 int num_joined_pipes = intel_crtc_num_joined_pipes(crtc_state); 205 206 return intel_dp_dsc_get_slice_count(connector, 207 adjusted_mode->clock, 208 adjusted_mode->hdisplay, 209 num_joined_pipes); 210 } 211 212 int intel_dp_mtp_tu_compute_config(struct intel_dp *intel_dp, 213 struct intel_crtc_state *crtc_state, 214 int max_bpp, int min_bpp, 215 struct drm_connector_state *conn_state, 216 int step, bool dsc) 217 { 218 struct intel_display *display = to_intel_display(intel_dp); 219 struct drm_atomic_state *state = crtc_state->uapi.state; 220 struct intel_connector *connector = 221 to_intel_connector(conn_state->connector); 222 const struct drm_display_mode *adjusted_mode = 223 &crtc_state->hw.adjusted_mode; 224 fixed20_12 pbn_div; 225 int bpp, slots = -EINVAL; 226 int dsc_slice_count = 0; 227 int max_dpt_bpp; 228 229 if (dsc) { 230 if (!intel_dp_supports_fec(intel_dp, connector, crtc_state)) 231 return -EINVAL; 232 233 crtc_state->fec_enable = !intel_dp_is_uhbr(crtc_state); 234 } 235 236 pbn_div = drm_dp_get_vc_payload_bw(crtc_state->port_clock, 237 crtc_state->lane_count); 238 239 max_dpt_bpp = intel_dp_mst_max_dpt_bpp(crtc_state, dsc); 240 if (max_bpp > max_dpt_bpp) { 241 drm_dbg_kms(display->drm, "Limiting bpp to max DPT bpp (%d -> %d)\n", 242 max_bpp, max_dpt_bpp); 243 max_bpp = max_dpt_bpp; 244 } 245 246 drm_dbg_kms(display->drm, "Looking for slots in range min bpp %d max bpp %d\n", 247 min_bpp, max_bpp); 248 249 if (dsc) { 250 dsc_slice_count = intel_dp_mst_dsc_get_slice_count(connector, crtc_state); 251 if (!dsc_slice_count) { 252 drm_dbg_kms(display->drm, "Can't get valid DSC slice count\n"); 253 254 return -ENOSPC; 255 } 256 } 257 258 for (bpp = max_bpp; bpp >= min_bpp; bpp -= step) { 259 int local_bw_overhead; 260 int link_bpp_x16; 261 262 drm_dbg_kms(display->drm, "Trying bpp %d\n", bpp); 263 264 link_bpp_x16 = fxp_q4_from_int(dsc ? bpp : 265 intel_dp_output_bpp(crtc_state->output_format, bpp)); 266 267 local_bw_overhead = intel_dp_mst_bw_overhead(crtc_state, 268 false, dsc_slice_count, link_bpp_x16); 269 intel_dp_mst_compute_m_n(crtc_state, 270 local_bw_overhead, 271 link_bpp_x16, 272 &crtc_state->dp_m_n); 273 274 if (intel_dp->is_mst) { 275 int remote_bw_overhead; 276 int remote_tu; 277 fixed20_12 pbn; 278 279 remote_bw_overhead = intel_dp_mst_bw_overhead(crtc_state, 280 true, dsc_slice_count, link_bpp_x16); 281 282 /* 283 * The TU size programmed to the HW determines which slots in 284 * an MTP frame are used for this stream, which needs to match 285 * the payload size programmed to the first downstream branch 286 * device's payload table. 287 * 288 * Note that atm the payload's PBN value DRM core sends via 289 * the ALLOCATE_PAYLOAD side-band message matches the payload 290 * size (which it calculates from the PBN value) it programs 291 * to the first branch device's payload table. The allocation 292 * in the payload table could be reduced though (to 293 * crtc_state->dp_m_n.tu), provided that the driver doesn't 294 * enable SSC on the corresponding link. 295 */ 296 pbn.full = dfixed_const(intel_dp_mst_calc_pbn(adjusted_mode->crtc_clock, 297 link_bpp_x16, 298 remote_bw_overhead)); 299 remote_tu = DIV_ROUND_UP(pbn.full, pbn_div.full); 300 301 /* 302 * Aligning the TUs ensures that symbols consisting of multiple 303 * (4) symbol cycles don't get split between two consecutive 304 * MTPs, as required by Bspec. 305 * TODO: remove the alignment restriction for 128b/132b links 306 * on some platforms, where Bspec allows this. 307 */ 308 remote_tu = ALIGN(remote_tu, 4 / crtc_state->lane_count); 309 310 /* 311 * Also align PBNs accordingly, since MST core will derive its 312 * own copy of TU from the PBN in drm_dp_atomic_find_time_slots(). 313 * The above comment about the difference between the PBN 314 * allocated for the whole path and the TUs allocated for the 315 * first branch device's link also applies here. 316 */ 317 pbn.full = remote_tu * pbn_div.full; 318 319 drm_WARN_ON(display->drm, remote_tu < crtc_state->dp_m_n.tu); 320 crtc_state->dp_m_n.tu = remote_tu; 321 322 slots = drm_dp_atomic_find_time_slots(state, &intel_dp->mst_mgr, 323 connector->port, 324 dfixed_trunc(pbn)); 325 } else { 326 /* Same as above for remote_tu */ 327 crtc_state->dp_m_n.tu = ALIGN(crtc_state->dp_m_n.tu, 328 4 / crtc_state->lane_count); 329 330 if (crtc_state->dp_m_n.tu <= 64) 331 slots = crtc_state->dp_m_n.tu; 332 else 333 slots = -EINVAL; 334 } 335 336 if (slots == -EDEADLK) 337 return slots; 338 339 if (slots >= 0) { 340 drm_WARN_ON(display->drm, slots != crtc_state->dp_m_n.tu); 341 342 break; 343 } 344 345 /* Allow using zero step to indicate one try */ 346 if (!step) 347 break; 348 } 349 350 if (slots < 0) { 351 drm_dbg_kms(display->drm, "failed finding vcpi slots:%d\n", 352 slots); 353 return slots; 354 } 355 356 if (!dsc) 357 crtc_state->pipe_bpp = bpp; 358 else 359 crtc_state->dsc.compressed_bpp_x16 = fxp_q4_from_int(bpp); 360 361 drm_dbg_kms(display->drm, "Got %d slots for pipe bpp %d dsc %d\n", 362 slots, bpp, dsc); 363 364 return 0; 365 } 366 367 static int mst_stream_find_vcpi_slots_for_bpp(struct intel_dp *intel_dp, 368 struct intel_crtc_state *crtc_state, 369 int max_bpp, int min_bpp, 370 struct link_config_limits *limits, 371 struct drm_connector_state *conn_state, 372 int step, bool dsc) 373 { 374 struct drm_atomic_state *state = crtc_state->uapi.state; 375 struct drm_dp_mst_topology_state *mst_state; 376 377 mst_state = drm_atomic_get_mst_topology_state(state, &intel_dp->mst_mgr); 378 if (IS_ERR(mst_state)) 379 return PTR_ERR(mst_state); 380 381 crtc_state->lane_count = limits->max_lane_count; 382 crtc_state->port_clock = limits->max_rate; 383 384 mst_state->pbn_div = drm_dp_get_vc_payload_bw(crtc_state->port_clock, 385 crtc_state->lane_count); 386 387 return intel_dp_mtp_tu_compute_config(intel_dp, crtc_state, 388 max_bpp, min_bpp, 389 conn_state, step, dsc); 390 } 391 392 static int mst_stream_compute_link_config(struct intel_dp *intel_dp, 393 struct intel_crtc_state *crtc_state, 394 struct drm_connector_state *conn_state, 395 struct link_config_limits *limits) 396 { 397 /* 398 * FIXME: allocate the BW according to link_bpp, which in the case of 399 * YUV420 is only half of the pipe bpp value. 400 */ 401 return mst_stream_find_vcpi_slots_for_bpp(intel_dp, crtc_state, 402 fxp_q4_to_int(limits->link.max_bpp_x16), 403 fxp_q4_to_int(limits->link.min_bpp_x16), 404 limits, 405 conn_state, 2 * 3, false); 406 } 407 408 static int mst_stream_dsc_compute_link_config(struct intel_dp *intel_dp, 409 struct intel_crtc_state *crtc_state, 410 struct drm_connector_state *conn_state, 411 struct link_config_limits *limits) 412 { 413 struct intel_display *display = to_intel_display(intel_dp); 414 struct intel_connector *connector = to_intel_connector(conn_state->connector); 415 int i, num_bpc; 416 u8 dsc_bpc[3] = {}; 417 int min_bpp, max_bpp, sink_min_bpp, sink_max_bpp; 418 int min_compressed_bpp, max_compressed_bpp; 419 420 max_bpp = limits->pipe.max_bpp; 421 min_bpp = limits->pipe.min_bpp; 422 423 num_bpc = drm_dp_dsc_sink_supported_input_bpcs(connector->dp.dsc_dpcd, 424 dsc_bpc); 425 426 drm_dbg_kms(display->drm, "DSC Source supported min bpp %d max bpp %d\n", 427 min_bpp, max_bpp); 428 429 sink_max_bpp = dsc_bpc[0] * 3; 430 sink_min_bpp = sink_max_bpp; 431 432 for (i = 1; i < num_bpc; i++) { 433 if (sink_min_bpp > dsc_bpc[i] * 3) 434 sink_min_bpp = dsc_bpc[i] * 3; 435 if (sink_max_bpp < dsc_bpc[i] * 3) 436 sink_max_bpp = dsc_bpc[i] * 3; 437 } 438 439 drm_dbg_kms(display->drm, "DSC Sink supported min bpp %d max bpp %d\n", 440 sink_min_bpp, sink_max_bpp); 441 442 if (min_bpp < sink_min_bpp) 443 min_bpp = sink_min_bpp; 444 445 if (max_bpp > sink_max_bpp) 446 max_bpp = sink_max_bpp; 447 448 crtc_state->pipe_bpp = max_bpp; 449 450 max_compressed_bpp = fxp_q4_to_int(limits->link.max_bpp_x16); 451 min_compressed_bpp = fxp_q4_to_int_roundup(limits->link.min_bpp_x16); 452 453 drm_dbg_kms(display->drm, "DSC Sink supported compressed min bpp %d compressed max bpp %d\n", 454 min_compressed_bpp, max_compressed_bpp); 455 456 /* Align compressed bpps according to our own constraints */ 457 max_compressed_bpp = intel_dp_dsc_nearest_valid_bpp(display, max_compressed_bpp, 458 crtc_state->pipe_bpp); 459 min_compressed_bpp = intel_dp_dsc_nearest_valid_bpp(display, min_compressed_bpp, 460 crtc_state->pipe_bpp); 461 462 return mst_stream_find_vcpi_slots_for_bpp(intel_dp, crtc_state, max_compressed_bpp, 463 min_compressed_bpp, limits, 464 conn_state, 1, true); 465 } 466 467 static int mst_stream_update_slots(struct intel_dp *intel_dp, 468 struct intel_crtc_state *crtc_state, 469 struct drm_connector_state *conn_state) 470 { 471 struct intel_display *display = to_intel_display(intel_dp); 472 struct drm_dp_mst_topology_mgr *mgr = &intel_dp->mst_mgr; 473 struct drm_dp_mst_topology_state *topology_state; 474 u8 link_coding_cap = intel_dp_is_uhbr(crtc_state) ? 475 DP_CAP_ANSI_128B132B : DP_CAP_ANSI_8B10B; 476 477 topology_state = drm_atomic_get_mst_topology_state(conn_state->state, mgr); 478 if (IS_ERR(topology_state)) { 479 drm_dbg_kms(display->drm, "slot update failed\n"); 480 return PTR_ERR(topology_state); 481 } 482 483 drm_dp_mst_update_slots(topology_state, link_coding_cap); 484 485 return 0; 486 } 487 488 static int mode_hblank_period_ns(const struct drm_display_mode *mode) 489 { 490 return DIV_ROUND_CLOSEST_ULL(mul_u32_u32(mode->htotal - mode->hdisplay, 491 NSEC_PER_SEC / 1000), 492 mode->crtc_clock); 493 } 494 495 static bool 496 hblank_expansion_quirk_needs_dsc(const struct intel_connector *connector, 497 const struct intel_crtc_state *crtc_state, 498 const struct link_config_limits *limits) 499 { 500 const struct drm_display_mode *adjusted_mode = 501 &crtc_state->hw.adjusted_mode; 502 bool is_uhbr_sink = connector->mst_port && 503 drm_dp_128b132b_supported(connector->mst_port->dpcd); 504 int hblank_limit = is_uhbr_sink ? 500 : 300; 505 506 if (!connector->dp.dsc_hblank_expansion_quirk) 507 return false; 508 509 if (is_uhbr_sink && !drm_dp_is_uhbr_rate(limits->max_rate)) 510 return false; 511 512 if (mode_hblank_period_ns(adjusted_mode) > hblank_limit) 513 return false; 514 515 if (!intel_dp_mst_dsc_get_slice_count(connector, crtc_state)) 516 return false; 517 518 return true; 519 } 520 521 static bool 522 adjust_limits_for_dsc_hblank_expansion_quirk(struct intel_dp *intel_dp, 523 const struct intel_connector *connector, 524 const struct intel_crtc_state *crtc_state, 525 struct link_config_limits *limits, 526 bool dsc) 527 { 528 struct intel_display *display = to_intel_display(connector); 529 const struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc); 530 int min_bpp_x16 = limits->link.min_bpp_x16; 531 532 if (!hblank_expansion_quirk_needs_dsc(connector, crtc_state, limits)) 533 return true; 534 535 if (!dsc) { 536 if (intel_dp_supports_dsc(intel_dp, connector, crtc_state)) { 537 drm_dbg_kms(display->drm, 538 "[CRTC:%d:%s][CONNECTOR:%d:%s] DSC needed by hblank expansion quirk\n", 539 crtc->base.base.id, crtc->base.name, 540 connector->base.base.id, connector->base.name); 541 return false; 542 } 543 544 drm_dbg_kms(display->drm, 545 "[CRTC:%d:%s][CONNECTOR:%d:%s] Increasing link min bpp to 24 due to hblank expansion quirk\n", 546 crtc->base.base.id, crtc->base.name, 547 connector->base.base.id, connector->base.name); 548 549 if (limits->link.max_bpp_x16 < fxp_q4_from_int(24)) 550 return false; 551 552 limits->link.min_bpp_x16 = fxp_q4_from_int(24); 553 554 return true; 555 } 556 557 drm_WARN_ON(display->drm, limits->min_rate != limits->max_rate); 558 559 if (limits->max_rate < 540000) 560 min_bpp_x16 = fxp_q4_from_int(13); 561 else if (limits->max_rate < 810000) 562 min_bpp_x16 = fxp_q4_from_int(10); 563 564 if (limits->link.min_bpp_x16 >= min_bpp_x16) 565 return true; 566 567 drm_dbg_kms(display->drm, 568 "[CRTC:%d:%s][CONNECTOR:%d:%s] Increasing link min bpp to " FXP_Q4_FMT " in DSC mode due to hblank expansion quirk\n", 569 crtc->base.base.id, crtc->base.name, 570 connector->base.base.id, connector->base.name, 571 FXP_Q4_ARGS(min_bpp_x16)); 572 573 if (limits->link.max_bpp_x16 < min_bpp_x16) 574 return false; 575 576 limits->link.min_bpp_x16 = min_bpp_x16; 577 578 return true; 579 } 580 581 static bool 582 mst_stream_compute_config_limits(struct intel_dp *intel_dp, 583 const struct intel_connector *connector, 584 struct intel_crtc_state *crtc_state, 585 bool dsc, 586 struct link_config_limits *limits) 587 { 588 if (!intel_dp_compute_config_limits(intel_dp, crtc_state, false, dsc, 589 limits)) 590 return false; 591 592 return adjust_limits_for_dsc_hblank_expansion_quirk(intel_dp, 593 connector, 594 crtc_state, 595 limits, 596 dsc); 597 } 598 599 static int mst_stream_compute_config(struct intel_encoder *encoder, 600 struct intel_crtc_state *pipe_config, 601 struct drm_connector_state *conn_state) 602 { 603 struct intel_display *display = to_intel_display(encoder); 604 struct intel_atomic_state *state = to_intel_atomic_state(conn_state->state); 605 struct intel_crtc *crtc = to_intel_crtc(pipe_config->uapi.crtc); 606 struct intel_dp *intel_dp = to_primary_dp(encoder); 607 struct intel_connector *connector = 608 to_intel_connector(conn_state->connector); 609 const struct drm_display_mode *adjusted_mode = 610 &pipe_config->hw.adjusted_mode; 611 struct link_config_limits limits; 612 bool dsc_needed, joiner_needs_dsc; 613 int num_joined_pipes; 614 int ret = 0; 615 616 if (pipe_config->fec_enable && 617 !intel_dp_supports_fec(intel_dp, connector, pipe_config)) 618 return -EINVAL; 619 620 if (adjusted_mode->flags & DRM_MODE_FLAG_DBLSCAN) 621 return -EINVAL; 622 623 num_joined_pipes = intel_dp_num_joined_pipes(intel_dp, connector, 624 adjusted_mode->crtc_hdisplay, 625 adjusted_mode->crtc_clock); 626 if (num_joined_pipes > 1) 627 pipe_config->joiner_pipes = GENMASK(crtc->pipe + num_joined_pipes - 1, crtc->pipe); 628 629 pipe_config->sink_format = INTEL_OUTPUT_FORMAT_RGB; 630 pipe_config->output_format = INTEL_OUTPUT_FORMAT_RGB; 631 pipe_config->has_pch_encoder = false; 632 633 joiner_needs_dsc = intel_dp_joiner_needs_dsc(display, num_joined_pipes); 634 635 dsc_needed = joiner_needs_dsc || intel_dp->force_dsc_en || 636 !mst_stream_compute_config_limits(intel_dp, connector, 637 pipe_config, false, &limits); 638 639 if (!dsc_needed) { 640 ret = mst_stream_compute_link_config(intel_dp, pipe_config, 641 conn_state, &limits); 642 643 if (ret == -EDEADLK) 644 return ret; 645 646 if (ret) 647 dsc_needed = true; 648 } 649 650 if (dsc_needed && !intel_dp_supports_dsc(intel_dp, connector, pipe_config)) { 651 drm_dbg_kms(display->drm, "DSC required but not available\n"); 652 return -EINVAL; 653 } 654 655 /* enable compression if the mode doesn't fit available BW */ 656 if (dsc_needed) { 657 drm_dbg_kms(display->drm, "Try DSC (fallback=%s, joiner=%s, force=%s)\n", 658 str_yes_no(ret), str_yes_no(joiner_needs_dsc), 659 str_yes_no(intel_dp->force_dsc_en)); 660 661 662 if (!mst_stream_compute_config_limits(intel_dp, connector, 663 pipe_config, true, 664 &limits)) 665 return -EINVAL; 666 667 /* 668 * FIXME: As bpc is hardcoded to 8, as mentioned above, 669 * WARN and ignore the debug flag force_dsc_bpc for now. 670 */ 671 drm_WARN(display->drm, intel_dp->force_dsc_bpc, 672 "Cannot Force BPC for MST\n"); 673 /* 674 * Try to get at least some timeslots and then see, if 675 * we can fit there with DSC. 676 */ 677 drm_dbg_kms(display->drm, "Trying to find VCPI slots in DSC mode\n"); 678 679 ret = mst_stream_dsc_compute_link_config(intel_dp, pipe_config, 680 conn_state, &limits); 681 if (ret < 0) 682 return ret; 683 684 ret = intel_dp_dsc_compute_config(intel_dp, pipe_config, 685 conn_state, &limits, 686 pipe_config->dp_m_n.tu, false); 687 } 688 689 if (ret) 690 return ret; 691 692 ret = mst_stream_update_slots(intel_dp, pipe_config, conn_state); 693 if (ret) 694 return ret; 695 696 pipe_config->limited_color_range = 697 intel_dp_limited_color_range(pipe_config, conn_state); 698 699 if (display->platform.geminilake || display->platform.broxton) 700 pipe_config->lane_lat_optim_mask = 701 bxt_dpio_phy_calc_lane_lat_optim_mask(pipe_config->lane_count); 702 703 intel_dp_audio_compute_config(encoder, pipe_config, conn_state); 704 705 intel_ddi_compute_min_voltage_level(pipe_config); 706 707 intel_psr_compute_config(intel_dp, pipe_config, conn_state); 708 709 return intel_dp_tunnel_atomic_compute_stream_bw(state, intel_dp, connector, 710 pipe_config); 711 } 712 713 /* 714 * Iterate over all connectors and return a mask of 715 * all CPU transcoders streaming over the same DP link. 716 */ 717 static unsigned int 718 intel_dp_mst_transcoder_mask(struct intel_atomic_state *state, 719 struct intel_dp *mst_port) 720 { 721 struct intel_display *display = to_intel_display(state); 722 const struct intel_digital_connector_state *conn_state; 723 struct intel_connector *connector; 724 u8 transcoders = 0; 725 int i; 726 727 if (DISPLAY_VER(display) < 12) 728 return 0; 729 730 for_each_new_intel_connector_in_state(state, connector, conn_state, i) { 731 const struct intel_crtc_state *crtc_state; 732 struct intel_crtc *crtc; 733 734 if (connector->mst_port != mst_port || !conn_state->base.crtc) 735 continue; 736 737 crtc = to_intel_crtc(conn_state->base.crtc); 738 crtc_state = intel_atomic_get_new_crtc_state(state, crtc); 739 740 if (!crtc_state->hw.active) 741 continue; 742 743 transcoders |= BIT(crtc_state->cpu_transcoder); 744 } 745 746 return transcoders; 747 } 748 749 static u8 get_pipes_downstream_of_mst_port(struct intel_atomic_state *state, 750 struct drm_dp_mst_topology_mgr *mst_mgr, 751 struct drm_dp_mst_port *parent_port) 752 { 753 const struct intel_digital_connector_state *conn_state; 754 struct intel_connector *connector; 755 u8 mask = 0; 756 int i; 757 758 for_each_new_intel_connector_in_state(state, connector, conn_state, i) { 759 if (!conn_state->base.crtc) 760 continue; 761 762 if (&connector->mst_port->mst_mgr != mst_mgr) 763 continue; 764 765 if (connector->port != parent_port && 766 !drm_dp_mst_port_downstream_of_parent(mst_mgr, 767 connector->port, 768 parent_port)) 769 continue; 770 771 mask |= BIT(to_intel_crtc(conn_state->base.crtc)->pipe); 772 } 773 774 return mask; 775 } 776 777 static int intel_dp_mst_check_fec_change(struct intel_atomic_state *state, 778 struct drm_dp_mst_topology_mgr *mst_mgr, 779 struct intel_link_bw_limits *limits) 780 { 781 struct intel_display *display = to_intel_display(state); 782 struct intel_crtc *crtc; 783 u8 mst_pipe_mask; 784 u8 fec_pipe_mask = 0; 785 int ret; 786 787 mst_pipe_mask = get_pipes_downstream_of_mst_port(state, mst_mgr, NULL); 788 789 for_each_intel_crtc_in_pipe_mask(display->drm, crtc, mst_pipe_mask) { 790 struct intel_crtc_state *crtc_state = 791 intel_atomic_get_new_crtc_state(state, crtc); 792 793 /* Atomic connector check should've added all the MST CRTCs. */ 794 if (drm_WARN_ON(display->drm, !crtc_state)) 795 return -EINVAL; 796 797 if (crtc_state->fec_enable) 798 fec_pipe_mask |= BIT(crtc->pipe); 799 } 800 801 if (!fec_pipe_mask || mst_pipe_mask == fec_pipe_mask) 802 return 0; 803 804 limits->force_fec_pipes |= mst_pipe_mask; 805 806 ret = intel_modeset_pipes_in_mask_early(state, "MST FEC", 807 mst_pipe_mask); 808 809 return ret ? : -EAGAIN; 810 } 811 812 static int intel_dp_mst_check_bw(struct intel_atomic_state *state, 813 struct drm_dp_mst_topology_mgr *mst_mgr, 814 struct drm_dp_mst_topology_state *mst_state, 815 struct intel_link_bw_limits *limits) 816 { 817 struct drm_dp_mst_port *mst_port; 818 u8 mst_port_pipes; 819 int ret; 820 821 ret = drm_dp_mst_atomic_check_mgr(&state->base, mst_mgr, mst_state, &mst_port); 822 if (ret != -ENOSPC) 823 return ret; 824 825 mst_port_pipes = get_pipes_downstream_of_mst_port(state, mst_mgr, mst_port); 826 827 ret = intel_link_bw_reduce_bpp(state, limits, 828 mst_port_pipes, "MST link BW"); 829 830 return ret ? : -EAGAIN; 831 } 832 833 /** 834 * intel_dp_mst_atomic_check_link - check all modeset MST link configuration 835 * @state: intel atomic state 836 * @limits: link BW limits 837 * 838 * Check the link configuration for all modeset MST outputs. If the 839 * configuration is invalid @limits will be updated if possible to 840 * reduce the total BW, after which the configuration for all CRTCs in 841 * @state must be recomputed with the updated @limits. 842 * 843 * Returns: 844 * - 0 if the confugration is valid 845 * - %-EAGAIN, if the configuration is invalid and @limits got updated 846 * with fallback values with which the configuration of all CRTCs in 847 * @state must be recomputed 848 * - Other negative error, if the configuration is invalid without a 849 * fallback possibility, or the check failed for another reason 850 */ 851 int intel_dp_mst_atomic_check_link(struct intel_atomic_state *state, 852 struct intel_link_bw_limits *limits) 853 { 854 struct drm_dp_mst_topology_mgr *mgr; 855 struct drm_dp_mst_topology_state *mst_state; 856 int ret; 857 int i; 858 859 for_each_new_mst_mgr_in_state(&state->base, mgr, mst_state, i) { 860 ret = intel_dp_mst_check_fec_change(state, mgr, limits); 861 if (ret) 862 return ret; 863 864 ret = intel_dp_mst_check_bw(state, mgr, mst_state, 865 limits); 866 if (ret) 867 return ret; 868 } 869 870 return 0; 871 } 872 873 static int mst_stream_compute_config_late(struct intel_encoder *encoder, 874 struct intel_crtc_state *crtc_state, 875 struct drm_connector_state *conn_state) 876 { 877 struct intel_atomic_state *state = to_intel_atomic_state(conn_state->state); 878 struct intel_dp *intel_dp = to_primary_dp(encoder); 879 880 /* lowest numbered transcoder will be designated master */ 881 crtc_state->mst_master_transcoder = 882 ffs(intel_dp_mst_transcoder_mask(state, intel_dp)) - 1; 883 884 return 0; 885 } 886 887 /* 888 * If one of the connectors in a MST stream needs a modeset, mark all CRTCs 889 * that shares the same MST stream as mode changed, 890 * intel_modeset_pipe_config()+intel_crtc_check_fastset() will take care to do 891 * a fastset when possible. 892 * 893 * On TGL+ this is required since each stream go through a master transcoder, 894 * so if the master transcoder needs modeset, all other streams in the 895 * topology need a modeset. All platforms need to add the atomic state 896 * for all streams in the topology, since a modeset on one may require 897 * changing the MST link BW usage of the others, which in turn needs a 898 * recomputation of the corresponding CRTC states. 899 */ 900 static int 901 mst_connector_atomic_topology_check(struct intel_connector *connector, 902 struct intel_atomic_state *state) 903 { 904 struct intel_display *display = to_intel_display(connector); 905 struct drm_connector_list_iter connector_list_iter; 906 struct intel_connector *connector_iter; 907 int ret = 0; 908 909 if (!intel_connector_needs_modeset(state, &connector->base)) 910 return 0; 911 912 drm_connector_list_iter_begin(display->drm, &connector_list_iter); 913 for_each_intel_connector_iter(connector_iter, &connector_list_iter) { 914 struct intel_digital_connector_state *conn_iter_state; 915 struct intel_crtc_state *crtc_state; 916 struct intel_crtc *crtc; 917 918 if (connector_iter->mst_port != connector->mst_port || 919 connector_iter == connector) 920 continue; 921 922 conn_iter_state = intel_atomic_get_digital_connector_state(state, 923 connector_iter); 924 if (IS_ERR(conn_iter_state)) { 925 ret = PTR_ERR(conn_iter_state); 926 break; 927 } 928 929 if (!conn_iter_state->base.crtc) 930 continue; 931 932 crtc = to_intel_crtc(conn_iter_state->base.crtc); 933 crtc_state = intel_atomic_get_crtc_state(&state->base, crtc); 934 if (IS_ERR(crtc_state)) { 935 ret = PTR_ERR(crtc_state); 936 break; 937 } 938 939 ret = drm_atomic_add_affected_planes(&state->base, &crtc->base); 940 if (ret) 941 break; 942 crtc_state->uapi.mode_changed = true; 943 } 944 drm_connector_list_iter_end(&connector_list_iter); 945 946 return ret; 947 } 948 949 static int 950 mst_connector_atomic_check(struct drm_connector *connector, 951 struct drm_atomic_state *_state) 952 { 953 struct intel_atomic_state *state = to_intel_atomic_state(_state); 954 struct intel_connector *intel_connector = 955 to_intel_connector(connector); 956 int ret; 957 958 ret = intel_digital_connector_atomic_check(connector, &state->base); 959 if (ret) 960 return ret; 961 962 ret = mst_connector_atomic_topology_check(intel_connector, state); 963 if (ret) 964 return ret; 965 966 if (intel_connector_needs_modeset(state, connector)) { 967 ret = intel_dp_tunnel_atomic_check_state(state, 968 intel_connector->mst_port, 969 intel_connector); 970 if (ret) 971 return ret; 972 } 973 974 return drm_dp_atomic_release_time_slots(&state->base, 975 &intel_connector->mst_port->mst_mgr, 976 intel_connector->port); 977 } 978 979 static void mst_stream_disable(struct intel_atomic_state *state, 980 struct intel_encoder *encoder, 981 const struct intel_crtc_state *old_crtc_state, 982 const struct drm_connector_state *old_conn_state) 983 { 984 struct intel_display *display = to_intel_display(state); 985 struct intel_dp_mst_encoder *intel_mst = enc_to_mst(encoder); 986 struct intel_dp *intel_dp = to_primary_dp(encoder); 987 struct intel_connector *connector = 988 to_intel_connector(old_conn_state->connector); 989 990 drm_dbg_kms(display->drm, "active links %d\n", 991 intel_dp->active_mst_links); 992 993 if (intel_dp->active_mst_links == 1) 994 intel_dp->link_trained = false; 995 996 intel_hdcp_disable(intel_mst->connector); 997 998 intel_dp_sink_disable_decompression(state, connector, old_crtc_state); 999 } 1000 1001 static void mst_stream_post_disable(struct intel_atomic_state *state, 1002 struct intel_encoder *encoder, 1003 const struct intel_crtc_state *old_crtc_state, 1004 const struct drm_connector_state *old_conn_state) 1005 { 1006 struct intel_display *display = to_intel_display(encoder); 1007 struct intel_dp_mst_encoder *intel_mst = enc_to_mst(encoder); 1008 struct intel_encoder *primary_encoder = to_primary_encoder(encoder); 1009 struct intel_dp *intel_dp = to_primary_dp(encoder); 1010 struct intel_connector *connector = 1011 to_intel_connector(old_conn_state->connector); 1012 struct drm_dp_mst_topology_state *old_mst_state = 1013 drm_atomic_get_old_mst_topology_state(&state->base, &intel_dp->mst_mgr); 1014 struct drm_dp_mst_topology_state *new_mst_state = 1015 drm_atomic_get_new_mst_topology_state(&state->base, &intel_dp->mst_mgr); 1016 const struct drm_dp_mst_atomic_payload *old_payload = 1017 drm_atomic_get_mst_payload_state(old_mst_state, connector->port); 1018 struct drm_dp_mst_atomic_payload *new_payload = 1019 drm_atomic_get_mst_payload_state(new_mst_state, connector->port); 1020 struct intel_crtc *pipe_crtc; 1021 bool last_mst_stream; 1022 int i; 1023 1024 intel_dp->active_mst_links--; 1025 last_mst_stream = intel_dp->active_mst_links == 0; 1026 drm_WARN_ON(display->drm, DISPLAY_VER(display) >= 12 && last_mst_stream && 1027 !intel_dp_mst_is_master_trans(old_crtc_state)); 1028 1029 for_each_pipe_crtc_modeset_disable(display, pipe_crtc, old_crtc_state, i) { 1030 const struct intel_crtc_state *old_pipe_crtc_state = 1031 intel_atomic_get_old_crtc_state(state, pipe_crtc); 1032 1033 intel_crtc_vblank_off(old_pipe_crtc_state); 1034 } 1035 1036 intel_disable_transcoder(old_crtc_state); 1037 1038 drm_dp_remove_payload_part1(&intel_dp->mst_mgr, new_mst_state, new_payload); 1039 1040 intel_ddi_clear_act_sent(encoder, old_crtc_state); 1041 1042 intel_de_rmw(display, 1043 TRANS_DDI_FUNC_CTL(display, old_crtc_state->cpu_transcoder), 1044 TRANS_DDI_DP_VC_PAYLOAD_ALLOC, 0); 1045 1046 intel_ddi_wait_for_act_sent(encoder, old_crtc_state); 1047 drm_dp_check_act_status(&intel_dp->mst_mgr); 1048 1049 drm_dp_remove_payload_part2(&intel_dp->mst_mgr, new_mst_state, 1050 old_payload, new_payload); 1051 1052 intel_ddi_disable_transcoder_func(old_crtc_state); 1053 1054 for_each_pipe_crtc_modeset_disable(display, pipe_crtc, old_crtc_state, i) { 1055 const struct intel_crtc_state *old_pipe_crtc_state = 1056 intel_atomic_get_old_crtc_state(state, pipe_crtc); 1057 1058 intel_dsc_disable(old_pipe_crtc_state); 1059 1060 if (DISPLAY_VER(display) >= 9) 1061 skl_scaler_disable(old_pipe_crtc_state); 1062 else 1063 ilk_pfit_disable(old_pipe_crtc_state); 1064 } 1065 1066 /* 1067 * Power down mst path before disabling the port, otherwise we end 1068 * up getting interrupts from the sink upon detecting link loss. 1069 */ 1070 drm_dp_send_power_updown_phy(&intel_dp->mst_mgr, connector->port, 1071 false); 1072 1073 /* 1074 * BSpec 4287: disable DIP after the transcoder is disabled and before 1075 * the transcoder clock select is set to none. 1076 */ 1077 intel_dp_set_infoframes(primary_encoder, false, old_crtc_state, NULL); 1078 /* 1079 * From TGL spec: "If multi-stream slave transcoder: Configure 1080 * Transcoder Clock Select to direct no clock to the transcoder" 1081 * 1082 * From older GENs spec: "Configure Transcoder Clock Select to direct 1083 * no clock to the transcoder" 1084 */ 1085 if (DISPLAY_VER(display) < 12 || !last_mst_stream) 1086 intel_ddi_disable_transcoder_clock(old_crtc_state); 1087 1088 1089 intel_mst->connector = NULL; 1090 if (last_mst_stream) 1091 primary_encoder->post_disable(state, primary_encoder, 1092 old_crtc_state, NULL); 1093 1094 drm_dbg_kms(display->drm, "active links %d\n", 1095 intel_dp->active_mst_links); 1096 } 1097 1098 static void mst_stream_post_pll_disable(struct intel_atomic_state *state, 1099 struct intel_encoder *encoder, 1100 const struct intel_crtc_state *old_crtc_state, 1101 const struct drm_connector_state *old_conn_state) 1102 { 1103 struct intel_encoder *primary_encoder = to_primary_encoder(encoder); 1104 struct intel_dp *intel_dp = to_primary_dp(encoder); 1105 1106 if (intel_dp->active_mst_links == 0 && 1107 primary_encoder->post_pll_disable) 1108 primary_encoder->post_pll_disable(state, primary_encoder, old_crtc_state, old_conn_state); 1109 } 1110 1111 static void mst_stream_pre_pll_enable(struct intel_atomic_state *state, 1112 struct intel_encoder *encoder, 1113 const struct intel_crtc_state *pipe_config, 1114 const struct drm_connector_state *conn_state) 1115 { 1116 struct intel_encoder *primary_encoder = to_primary_encoder(encoder); 1117 struct intel_dp *intel_dp = to_primary_dp(encoder); 1118 1119 if (intel_dp->active_mst_links == 0) 1120 primary_encoder->pre_pll_enable(state, primary_encoder, 1121 pipe_config, NULL); 1122 else 1123 /* 1124 * The port PLL state needs to get updated for secondary 1125 * streams as for the primary stream. 1126 */ 1127 intel_ddi_update_active_dpll(state, primary_encoder, 1128 to_intel_crtc(pipe_config->uapi.crtc)); 1129 } 1130 1131 static bool intel_mst_probed_link_params_valid(struct intel_dp *intel_dp, 1132 int link_rate, int lane_count) 1133 { 1134 return intel_dp->link.mst_probed_rate == link_rate && 1135 intel_dp->link.mst_probed_lane_count == lane_count; 1136 } 1137 1138 static void intel_mst_set_probed_link_params(struct intel_dp *intel_dp, 1139 int link_rate, int lane_count) 1140 { 1141 intel_dp->link.mst_probed_rate = link_rate; 1142 intel_dp->link.mst_probed_lane_count = lane_count; 1143 } 1144 1145 static void intel_mst_reprobe_topology(struct intel_dp *intel_dp, 1146 const struct intel_crtc_state *crtc_state) 1147 { 1148 if (intel_mst_probed_link_params_valid(intel_dp, 1149 crtc_state->port_clock, crtc_state->lane_count)) 1150 return; 1151 1152 drm_dp_mst_topology_queue_probe(&intel_dp->mst_mgr); 1153 1154 intel_mst_set_probed_link_params(intel_dp, 1155 crtc_state->port_clock, crtc_state->lane_count); 1156 } 1157 1158 static void mst_stream_pre_enable(struct intel_atomic_state *state, 1159 struct intel_encoder *encoder, 1160 const struct intel_crtc_state *pipe_config, 1161 const struct drm_connector_state *conn_state) 1162 { 1163 struct intel_display *display = to_intel_display(state); 1164 struct intel_dp_mst_encoder *intel_mst = enc_to_mst(encoder); 1165 struct intel_encoder *primary_encoder = to_primary_encoder(encoder); 1166 struct intel_dp *intel_dp = to_primary_dp(encoder); 1167 struct intel_connector *connector = 1168 to_intel_connector(conn_state->connector); 1169 struct drm_dp_mst_topology_state *mst_state = 1170 drm_atomic_get_new_mst_topology_state(&state->base, &intel_dp->mst_mgr); 1171 int ret; 1172 bool first_mst_stream; 1173 1174 /* MST encoders are bound to a crtc, not to a connector, 1175 * force the mapping here for get_hw_state. 1176 */ 1177 connector->encoder = encoder; 1178 intel_mst->connector = connector; 1179 first_mst_stream = intel_dp->active_mst_links == 0; 1180 drm_WARN_ON(display->drm, DISPLAY_VER(display) >= 12 && first_mst_stream && 1181 !intel_dp_mst_is_master_trans(pipe_config)); 1182 1183 drm_dbg_kms(display->drm, "active links %d\n", 1184 intel_dp->active_mst_links); 1185 1186 if (first_mst_stream) 1187 intel_dp_set_power(intel_dp, DP_SET_POWER_D0); 1188 1189 drm_dp_send_power_updown_phy(&intel_dp->mst_mgr, connector->port, true); 1190 1191 intel_dp_sink_enable_decompression(state, connector, pipe_config); 1192 1193 if (first_mst_stream) { 1194 primary_encoder->pre_enable(state, primary_encoder, 1195 pipe_config, NULL); 1196 1197 intel_mst_reprobe_topology(intel_dp, pipe_config); 1198 } 1199 1200 intel_dp->active_mst_links++; 1201 1202 ret = drm_dp_add_payload_part1(&intel_dp->mst_mgr, mst_state, 1203 drm_atomic_get_mst_payload_state(mst_state, connector->port)); 1204 if (ret < 0) 1205 intel_dp_queue_modeset_retry_for_link(state, primary_encoder, pipe_config); 1206 1207 /* 1208 * Before Gen 12 this is not done as part of 1209 * primary_encoder->pre_enable() and should be done here. For 1210 * Gen 12+ the step in which this should be done is different for the 1211 * first MST stream, so it's done on the DDI for the first stream and 1212 * here for the following ones. 1213 */ 1214 if (DISPLAY_VER(display) < 12 || !first_mst_stream) 1215 intel_ddi_enable_transcoder_clock(encoder, pipe_config); 1216 1217 if (DISPLAY_VER(display) >= 13 && !first_mst_stream) 1218 intel_ddi_config_transcoder_func(encoder, pipe_config); 1219 1220 intel_dsc_dp_pps_write(primary_encoder, pipe_config); 1221 intel_ddi_set_dp_msa(pipe_config, conn_state); 1222 } 1223 1224 static void enable_bs_jitter_was(const struct intel_crtc_state *crtc_state) 1225 { 1226 struct intel_display *display = to_intel_display(crtc_state); 1227 struct drm_i915_private *i915 = to_i915(crtc_state->uapi.crtc->dev); 1228 u32 clear = 0; 1229 u32 set = 0; 1230 1231 if (!IS_ALDERLAKE_P(i915)) 1232 return; 1233 1234 if (!IS_DISPLAY_STEP(display, STEP_D0, STEP_FOREVER)) 1235 return; 1236 1237 /* Wa_14013163432:adlp */ 1238 if (crtc_state->fec_enable || intel_dp_is_uhbr(crtc_state)) 1239 set |= DP_MST_FEC_BS_JITTER_WA(crtc_state->cpu_transcoder); 1240 1241 /* Wa_14014143976:adlp */ 1242 if (IS_DISPLAY_STEP(display, STEP_E0, STEP_FOREVER)) { 1243 if (intel_dp_is_uhbr(crtc_state)) 1244 set |= DP_MST_SHORT_HBLANK_WA(crtc_state->cpu_transcoder); 1245 else if (crtc_state->fec_enable) 1246 clear |= DP_MST_SHORT_HBLANK_WA(crtc_state->cpu_transcoder); 1247 1248 if (crtc_state->fec_enable || intel_dp_is_uhbr(crtc_state)) 1249 set |= DP_MST_DPT_DPTP_ALIGN_WA(crtc_state->cpu_transcoder); 1250 } 1251 1252 if (!clear && !set) 1253 return; 1254 1255 intel_de_rmw(display, CHICKEN_MISC_3, clear, set); 1256 } 1257 1258 static void mst_stream_enable(struct intel_atomic_state *state, 1259 struct intel_encoder *encoder, 1260 const struct intel_crtc_state *pipe_config, 1261 const struct drm_connector_state *conn_state) 1262 { 1263 struct intel_display *display = to_intel_display(encoder); 1264 struct intel_encoder *primary_encoder = to_primary_encoder(encoder); 1265 struct intel_dp *intel_dp = to_primary_dp(encoder); 1266 struct intel_connector *connector = to_intel_connector(conn_state->connector); 1267 struct drm_dp_mst_topology_state *mst_state = 1268 drm_atomic_get_new_mst_topology_state(&state->base, &intel_dp->mst_mgr); 1269 enum transcoder trans = pipe_config->cpu_transcoder; 1270 bool first_mst_stream = intel_dp->active_mst_links == 1; 1271 struct intel_crtc *pipe_crtc; 1272 int ret, i; 1273 1274 drm_WARN_ON(display->drm, pipe_config->has_pch_encoder); 1275 1276 if (intel_dp_is_uhbr(pipe_config)) { 1277 const struct drm_display_mode *adjusted_mode = 1278 &pipe_config->hw.adjusted_mode; 1279 u64 crtc_clock_hz = KHz(adjusted_mode->crtc_clock); 1280 1281 intel_de_write(display, TRANS_DP2_VFREQHIGH(pipe_config->cpu_transcoder), 1282 TRANS_DP2_VFREQ_PIXEL_CLOCK(crtc_clock_hz >> 24)); 1283 intel_de_write(display, TRANS_DP2_VFREQLOW(pipe_config->cpu_transcoder), 1284 TRANS_DP2_VFREQ_PIXEL_CLOCK(crtc_clock_hz & 0xffffff)); 1285 } 1286 1287 enable_bs_jitter_was(pipe_config); 1288 1289 intel_ddi_enable_transcoder_func(encoder, pipe_config); 1290 1291 intel_ddi_clear_act_sent(encoder, pipe_config); 1292 1293 intel_de_rmw(display, TRANS_DDI_FUNC_CTL(display, trans), 0, 1294 TRANS_DDI_DP_VC_PAYLOAD_ALLOC); 1295 1296 drm_dbg_kms(display->drm, "active links %d\n", 1297 intel_dp->active_mst_links); 1298 1299 intel_ddi_wait_for_act_sent(encoder, pipe_config); 1300 drm_dp_check_act_status(&intel_dp->mst_mgr); 1301 1302 if (first_mst_stream) 1303 intel_ddi_wait_for_fec_status(encoder, pipe_config, true); 1304 1305 ret = drm_dp_add_payload_part2(&intel_dp->mst_mgr, 1306 drm_atomic_get_mst_payload_state(mst_state, 1307 connector->port)); 1308 if (ret < 0) 1309 intel_dp_queue_modeset_retry_for_link(state, primary_encoder, pipe_config); 1310 1311 if (DISPLAY_VER(display) >= 12) 1312 intel_de_rmw(display, CHICKEN_TRANS(display, trans), 1313 FECSTALL_DIS_DPTSTREAM_DPTTG, 1314 pipe_config->fec_enable ? FECSTALL_DIS_DPTSTREAM_DPTTG : 0); 1315 1316 intel_audio_sdp_split_update(pipe_config); 1317 1318 intel_enable_transcoder(pipe_config); 1319 1320 for_each_pipe_crtc_modeset_enable(display, pipe_crtc, pipe_config, i) { 1321 const struct intel_crtc_state *pipe_crtc_state = 1322 intel_atomic_get_new_crtc_state(state, pipe_crtc); 1323 1324 intel_crtc_vblank_on(pipe_crtc_state); 1325 } 1326 1327 intel_hdcp_enable(state, encoder, pipe_config, conn_state); 1328 } 1329 1330 static bool mst_stream_get_hw_state(struct intel_encoder *encoder, 1331 enum pipe *pipe) 1332 { 1333 struct intel_dp_mst_encoder *intel_mst = enc_to_mst(encoder); 1334 *pipe = intel_mst->pipe; 1335 if (intel_mst->connector) 1336 return true; 1337 return false; 1338 } 1339 1340 static void mst_stream_get_config(struct intel_encoder *encoder, 1341 struct intel_crtc_state *pipe_config) 1342 { 1343 struct intel_encoder *primary_encoder = to_primary_encoder(encoder); 1344 1345 primary_encoder->get_config(primary_encoder, pipe_config); 1346 } 1347 1348 static bool mst_stream_initial_fastset_check(struct intel_encoder *encoder, 1349 struct intel_crtc_state *crtc_state) 1350 { 1351 struct intel_encoder *primary_encoder = to_primary_encoder(encoder); 1352 1353 return intel_dp_initial_fastset_check(primary_encoder, crtc_state); 1354 } 1355 1356 static int mst_connector_get_ddc_modes(struct drm_connector *connector) 1357 { 1358 struct intel_display *display = to_intel_display(connector->dev); 1359 struct intel_connector *intel_connector = to_intel_connector(connector); 1360 struct intel_dp *intel_dp = intel_connector->mst_port; 1361 const struct drm_edid *drm_edid; 1362 int ret; 1363 1364 if (drm_connector_is_unregistered(connector)) 1365 return intel_connector_update_modes(connector, NULL); 1366 1367 if (!intel_display_driver_check_access(display)) 1368 return drm_edid_connector_add_modes(connector); 1369 1370 drm_edid = drm_dp_mst_edid_read(connector, &intel_dp->mst_mgr, intel_connector->port); 1371 1372 ret = intel_connector_update_modes(connector, drm_edid); 1373 1374 drm_edid_free(drm_edid); 1375 1376 return ret; 1377 } 1378 1379 static int 1380 mst_connector_late_register(struct drm_connector *connector) 1381 { 1382 struct intel_connector *intel_connector = to_intel_connector(connector); 1383 int ret; 1384 1385 ret = drm_dp_mst_connector_late_register(connector, 1386 intel_connector->port); 1387 if (ret < 0) 1388 return ret; 1389 1390 ret = intel_connector_register(connector); 1391 if (ret < 0) 1392 drm_dp_mst_connector_early_unregister(connector, 1393 intel_connector->port); 1394 1395 return ret; 1396 } 1397 1398 static void 1399 mst_connector_early_unregister(struct drm_connector *connector) 1400 { 1401 struct intel_connector *intel_connector = to_intel_connector(connector); 1402 1403 intel_connector_unregister(connector); 1404 drm_dp_mst_connector_early_unregister(connector, 1405 intel_connector->port); 1406 } 1407 1408 static const struct drm_connector_funcs mst_connector_funcs = { 1409 .fill_modes = drm_helper_probe_single_connector_modes, 1410 .atomic_get_property = intel_digital_connector_atomic_get_property, 1411 .atomic_set_property = intel_digital_connector_atomic_set_property, 1412 .late_register = mst_connector_late_register, 1413 .early_unregister = mst_connector_early_unregister, 1414 .destroy = intel_connector_destroy, 1415 .atomic_destroy_state = drm_atomic_helper_connector_destroy_state, 1416 .atomic_duplicate_state = intel_digital_connector_duplicate_state, 1417 }; 1418 1419 static int mst_connector_get_modes(struct drm_connector *connector) 1420 { 1421 return mst_connector_get_ddc_modes(connector); 1422 } 1423 1424 static int 1425 mst_connector_mode_valid_ctx(struct drm_connector *connector, 1426 struct drm_display_mode *mode, 1427 struct drm_modeset_acquire_ctx *ctx, 1428 enum drm_mode_status *status) 1429 { 1430 struct intel_display *display = to_intel_display(connector->dev); 1431 struct drm_i915_private *dev_priv = to_i915(connector->dev); 1432 struct intel_connector *intel_connector = to_intel_connector(connector); 1433 struct intel_dp *intel_dp = intel_connector->mst_port; 1434 struct drm_dp_mst_topology_mgr *mgr = &intel_dp->mst_mgr; 1435 struct drm_dp_mst_port *port = intel_connector->port; 1436 const int min_bpp = 18; 1437 int max_dotclk = display->cdclk.max_dotclk_freq; 1438 int max_rate, mode_rate, max_lanes, max_link_clock; 1439 int ret; 1440 bool dsc = false; 1441 u16 dsc_max_compressed_bpp = 0; 1442 u8 dsc_slice_count = 0; 1443 int target_clock = mode->clock; 1444 int num_joined_pipes; 1445 1446 if (drm_connector_is_unregistered(connector)) { 1447 *status = MODE_ERROR; 1448 return 0; 1449 } 1450 1451 *status = intel_cpu_transcoder_mode_valid(dev_priv, mode); 1452 if (*status != MODE_OK) 1453 return 0; 1454 1455 if (mode->flags & DRM_MODE_FLAG_DBLCLK) { 1456 *status = MODE_H_ILLEGAL; 1457 return 0; 1458 } 1459 1460 if (mode->clock < 10000) { 1461 *status = MODE_CLOCK_LOW; 1462 return 0; 1463 } 1464 1465 max_link_clock = intel_dp_max_link_rate(intel_dp); 1466 max_lanes = intel_dp_max_lane_count(intel_dp); 1467 1468 max_rate = intel_dp_max_link_data_rate(intel_dp, 1469 max_link_clock, max_lanes); 1470 mode_rate = intel_dp_link_required(mode->clock, min_bpp); 1471 1472 /* 1473 * TODO: 1474 * - Also check if compression would allow for the mode 1475 * - Calculate the overhead using drm_dp_bw_overhead() / 1476 * drm_dp_bw_channel_coding_efficiency(), similarly to the 1477 * compute config code, as drm_dp_calc_pbn_mode() doesn't 1478 * account with all the overheads. 1479 * - Check here and during compute config the BW reported by 1480 * DFP_Link_Available_Payload_Bandwidth_Number (or the 1481 * corresponding link capabilities of the sink) in case the 1482 * stream is uncompressed for it by the last branch device. 1483 */ 1484 num_joined_pipes = intel_dp_num_joined_pipes(intel_dp, intel_connector, 1485 mode->hdisplay, target_clock); 1486 max_dotclk *= num_joined_pipes; 1487 1488 ret = drm_modeset_lock(&mgr->base.lock, ctx); 1489 if (ret) 1490 return ret; 1491 1492 if (mode_rate > max_rate || mode->clock > max_dotclk || 1493 drm_dp_calc_pbn_mode(mode->clock, min_bpp << 4) > port->full_pbn) { 1494 *status = MODE_CLOCK_HIGH; 1495 return 0; 1496 } 1497 1498 if (intel_dp_has_dsc(intel_connector)) { 1499 /* 1500 * TBD pass the connector BPC, 1501 * for now U8_MAX so that max BPC on that platform would be picked 1502 */ 1503 int pipe_bpp = intel_dp_dsc_compute_max_bpp(intel_connector, U8_MAX); 1504 1505 if (drm_dp_sink_supports_fec(intel_connector->dp.fec_capability)) { 1506 dsc_max_compressed_bpp = 1507 intel_dp_dsc_get_max_compressed_bpp(display, 1508 max_link_clock, 1509 max_lanes, 1510 target_clock, 1511 mode->hdisplay, 1512 num_joined_pipes, 1513 INTEL_OUTPUT_FORMAT_RGB, 1514 pipe_bpp, 64); 1515 dsc_slice_count = 1516 intel_dp_dsc_get_slice_count(intel_connector, 1517 target_clock, 1518 mode->hdisplay, 1519 num_joined_pipes); 1520 } 1521 1522 dsc = dsc_max_compressed_bpp && dsc_slice_count; 1523 } 1524 1525 if (intel_dp_joiner_needs_dsc(display, num_joined_pipes) && !dsc) { 1526 *status = MODE_CLOCK_HIGH; 1527 return 0; 1528 } 1529 1530 if (mode_rate > max_rate && !dsc) { 1531 *status = MODE_CLOCK_HIGH; 1532 return 0; 1533 } 1534 1535 *status = intel_mode_valid_max_plane_size(dev_priv, mode, num_joined_pipes); 1536 return 0; 1537 } 1538 1539 static struct drm_encoder * 1540 mst_connector_atomic_best_encoder(struct drm_connector *connector, 1541 struct drm_atomic_state *state) 1542 { 1543 struct drm_connector_state *connector_state = drm_atomic_get_new_connector_state(state, 1544 connector); 1545 struct intel_connector *intel_connector = to_intel_connector(connector); 1546 struct intel_dp *intel_dp = intel_connector->mst_port; 1547 struct intel_crtc *crtc = to_intel_crtc(connector_state->crtc); 1548 1549 return &intel_dp->mst_encoders[crtc->pipe]->base.base; 1550 } 1551 1552 static int 1553 mst_connector_detect_ctx(struct drm_connector *connector, 1554 struct drm_modeset_acquire_ctx *ctx, bool force) 1555 { 1556 struct intel_display *display = to_intel_display(connector->dev); 1557 struct intel_connector *intel_connector = to_intel_connector(connector); 1558 struct intel_dp *intel_dp = intel_connector->mst_port; 1559 1560 if (!intel_display_device_enabled(display)) 1561 return connector_status_disconnected; 1562 1563 if (drm_connector_is_unregistered(connector)) 1564 return connector_status_disconnected; 1565 1566 if (!intel_display_driver_check_access(display)) 1567 return connector->status; 1568 1569 intel_dp_flush_connector_commits(intel_connector); 1570 1571 return drm_dp_mst_detect_port(connector, ctx, &intel_dp->mst_mgr, 1572 intel_connector->port); 1573 } 1574 1575 static const struct drm_connector_helper_funcs mst_connector_helper_funcs = { 1576 .get_modes = mst_connector_get_modes, 1577 .mode_valid_ctx = mst_connector_mode_valid_ctx, 1578 .atomic_best_encoder = mst_connector_atomic_best_encoder, 1579 .atomic_check = mst_connector_atomic_check, 1580 .detect_ctx = mst_connector_detect_ctx, 1581 }; 1582 1583 static void mst_stream_encoder_destroy(struct drm_encoder *encoder) 1584 { 1585 struct intel_dp_mst_encoder *intel_mst = enc_to_mst(to_intel_encoder(encoder)); 1586 1587 drm_encoder_cleanup(encoder); 1588 kfree(intel_mst); 1589 } 1590 1591 static const struct drm_encoder_funcs mst_stream_encoder_funcs = { 1592 .destroy = mst_stream_encoder_destroy, 1593 }; 1594 1595 static bool mst_connector_get_hw_state(struct intel_connector *connector) 1596 { 1597 /* This is the MST stream encoder set in ->pre_enable, if any */ 1598 struct intel_encoder *encoder = intel_attached_encoder(connector); 1599 enum pipe pipe; 1600 1601 if (!encoder || !connector->base.state->crtc) 1602 return false; 1603 1604 return encoder->get_hw_state(encoder, &pipe); 1605 } 1606 1607 static int mst_topology_add_connector_properties(struct intel_dp *intel_dp, 1608 struct drm_connector *connector, 1609 const char *pathprop) 1610 { 1611 struct intel_display *display = to_intel_display(intel_dp); 1612 1613 drm_object_attach_property(&connector->base, 1614 display->drm->mode_config.path_property, 0); 1615 drm_object_attach_property(&connector->base, 1616 display->drm->mode_config.tile_property, 0); 1617 1618 intel_attach_force_audio_property(connector); 1619 intel_attach_broadcast_rgb_property(connector); 1620 1621 /* 1622 * Reuse the prop from the SST connector because we're 1623 * not allowed to create new props after device registration. 1624 */ 1625 connector->max_bpc_property = 1626 intel_dp->attached_connector->base.max_bpc_property; 1627 if (connector->max_bpc_property) 1628 drm_connector_attach_max_bpc_property(connector, 6, 12); 1629 1630 return drm_connector_set_path_property(connector, pathprop); 1631 } 1632 1633 static void 1634 intel_dp_mst_read_decompression_port_dsc_caps(struct intel_dp *intel_dp, 1635 struct intel_connector *connector) 1636 { 1637 u8 dpcd_caps[DP_RECEIVER_CAP_SIZE]; 1638 1639 if (!connector->dp.dsc_decompression_aux) 1640 return; 1641 1642 if (drm_dp_read_dpcd_caps(connector->dp.dsc_decompression_aux, dpcd_caps) < 0) 1643 return; 1644 1645 intel_dp_get_dsc_sink_cap(dpcd_caps[DP_DPCD_REV], connector); 1646 } 1647 1648 static bool detect_dsc_hblank_expansion_quirk(const struct intel_connector *connector) 1649 { 1650 struct intel_display *display = to_intel_display(connector); 1651 struct drm_dp_aux *aux = connector->dp.dsc_decompression_aux; 1652 struct drm_dp_desc desc; 1653 u8 dpcd[DP_RECEIVER_CAP_SIZE]; 1654 1655 if (!aux) 1656 return false; 1657 1658 /* 1659 * A logical port's OUI (at least for affected sinks) is all 0, so 1660 * instead of that the parent port's OUI is used for identification. 1661 */ 1662 if (drm_dp_mst_port_is_logical(connector->port)) { 1663 aux = drm_dp_mst_aux_for_parent(connector->port); 1664 if (!aux) 1665 aux = &connector->mst_port->aux; 1666 } 1667 1668 if (drm_dp_read_dpcd_caps(aux, dpcd) < 0) 1669 return false; 1670 1671 if (drm_dp_read_desc(aux, &desc, drm_dp_is_branch(dpcd)) < 0) 1672 return false; 1673 1674 if (!drm_dp_has_quirk(&desc, 1675 DP_DPCD_QUIRK_HBLANK_EXPANSION_REQUIRES_DSC)) 1676 return false; 1677 1678 /* 1679 * UHBR (MST sink) devices requiring this quirk don't advertise the 1680 * HBLANK expansion support. Presuming that they perform HBLANK 1681 * expansion internally, or are affected by this issue on modes with a 1682 * short HBLANK for other reasons. 1683 */ 1684 if (!drm_dp_128b132b_supported(dpcd) && 1685 !(dpcd[DP_RECEIVE_PORT_0_CAP_0] & DP_HBLANK_EXPANSION_CAPABLE)) 1686 return false; 1687 1688 drm_dbg_kms(display->drm, 1689 "[CONNECTOR:%d:%s] DSC HBLANK expansion quirk detected\n", 1690 connector->base.base.id, connector->base.name); 1691 1692 return true; 1693 } 1694 1695 static struct drm_connector * 1696 mst_topology_add_connector(struct drm_dp_mst_topology_mgr *mgr, 1697 struct drm_dp_mst_port *port, 1698 const char *pathprop) 1699 { 1700 struct intel_dp *intel_dp = container_of(mgr, struct intel_dp, mst_mgr); 1701 struct intel_display *display = to_intel_display(intel_dp); 1702 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp); 1703 struct intel_connector *intel_connector; 1704 struct drm_connector *connector; 1705 enum pipe pipe; 1706 int ret; 1707 1708 intel_connector = intel_connector_alloc(); 1709 if (!intel_connector) 1710 return NULL; 1711 1712 connector = &intel_connector->base; 1713 1714 intel_connector->get_hw_state = mst_connector_get_hw_state; 1715 intel_connector->sync_state = intel_dp_connector_sync_state; 1716 intel_connector->mst_port = intel_dp; 1717 intel_connector->port = port; 1718 drm_dp_mst_get_port_malloc(port); 1719 1720 intel_dp_init_modeset_retry_work(intel_connector); 1721 1722 ret = drm_connector_dynamic_init(display->drm, connector, &mst_connector_funcs, 1723 DRM_MODE_CONNECTOR_DisplayPort, NULL); 1724 if (ret) { 1725 drm_dp_mst_put_port_malloc(port); 1726 intel_connector_free(intel_connector); 1727 return NULL; 1728 } 1729 1730 intel_connector->dp.dsc_decompression_aux = drm_dp_mst_dsc_aux_for_port(port); 1731 intel_dp_mst_read_decompression_port_dsc_caps(intel_dp, intel_connector); 1732 intel_connector->dp.dsc_hblank_expansion_quirk = 1733 detect_dsc_hblank_expansion_quirk(intel_connector); 1734 1735 drm_connector_helper_add(connector, &mst_connector_helper_funcs); 1736 1737 for_each_pipe(display, pipe) { 1738 struct drm_encoder *enc = 1739 &intel_dp->mst_encoders[pipe]->base.base; 1740 1741 ret = drm_connector_attach_encoder(&intel_connector->base, enc); 1742 if (ret) 1743 goto err; 1744 } 1745 1746 ret = mst_topology_add_connector_properties(intel_dp, connector, pathprop); 1747 if (ret) 1748 goto err; 1749 1750 ret = intel_dp_hdcp_init(dig_port, intel_connector); 1751 if (ret) 1752 drm_dbg_kms(display->drm, "[%s:%d] HDCP MST init failed, skipping.\n", 1753 connector->name, connector->base.id); 1754 1755 return connector; 1756 1757 err: 1758 drm_connector_cleanup(connector); 1759 return NULL; 1760 } 1761 1762 static void 1763 mst_topology_poll_hpd_irq(struct drm_dp_mst_topology_mgr *mgr) 1764 { 1765 struct intel_dp *intel_dp = container_of(mgr, struct intel_dp, mst_mgr); 1766 1767 intel_hpd_trigger_irq(dp_to_dig_port(intel_dp)); 1768 } 1769 1770 static const struct drm_dp_mst_topology_cbs mst_topology_cbs = { 1771 .add_connector = mst_topology_add_connector, 1772 .poll_hpd_irq = mst_topology_poll_hpd_irq, 1773 }; 1774 1775 /* Create a fake encoder for an individual MST stream */ 1776 static struct intel_dp_mst_encoder * 1777 mst_stream_encoder_create(struct intel_digital_port *dig_port, enum pipe pipe) 1778 { 1779 struct intel_display *display = to_intel_display(dig_port); 1780 struct intel_encoder *primary_encoder = &dig_port->base; 1781 struct intel_dp_mst_encoder *intel_mst; 1782 struct intel_encoder *encoder; 1783 1784 intel_mst = kzalloc(sizeof(*intel_mst), GFP_KERNEL); 1785 1786 if (!intel_mst) 1787 return NULL; 1788 1789 intel_mst->pipe = pipe; 1790 encoder = &intel_mst->base; 1791 intel_mst->primary = dig_port; 1792 1793 drm_encoder_init(display->drm, &encoder->base, &mst_stream_encoder_funcs, 1794 DRM_MODE_ENCODER_DPMST, "DP-MST %c", pipe_name(pipe)); 1795 1796 encoder->type = INTEL_OUTPUT_DP_MST; 1797 encoder->power_domain = primary_encoder->power_domain; 1798 encoder->port = primary_encoder->port; 1799 encoder->cloneable = 0; 1800 /* 1801 * This is wrong, but broken userspace uses the intersection 1802 * of possible_crtcs of all the encoders of a given connector 1803 * to figure out which crtcs can drive said connector. What 1804 * should be used instead is the union of possible_crtcs. 1805 * To keep such userspace functioning we must misconfigure 1806 * this to make sure the intersection is not empty :( 1807 */ 1808 encoder->pipe_mask = ~0; 1809 1810 encoder->compute_config = mst_stream_compute_config; 1811 encoder->compute_config_late = mst_stream_compute_config_late; 1812 encoder->disable = mst_stream_disable; 1813 encoder->post_disable = mst_stream_post_disable; 1814 encoder->post_pll_disable = mst_stream_post_pll_disable; 1815 encoder->update_pipe = intel_ddi_update_pipe; 1816 encoder->pre_pll_enable = mst_stream_pre_pll_enable; 1817 encoder->pre_enable = mst_stream_pre_enable; 1818 encoder->enable = mst_stream_enable; 1819 encoder->audio_enable = intel_audio_codec_enable; 1820 encoder->audio_disable = intel_audio_codec_disable; 1821 encoder->get_hw_state = mst_stream_get_hw_state; 1822 encoder->get_config = mst_stream_get_config; 1823 encoder->initial_fastset_check = mst_stream_initial_fastset_check; 1824 1825 return intel_mst; 1826 1827 } 1828 1829 /* Create the fake encoders for MST streams */ 1830 static bool 1831 mst_stream_encoders_create(struct intel_digital_port *dig_port) 1832 { 1833 struct intel_display *display = to_intel_display(dig_port); 1834 struct intel_dp *intel_dp = &dig_port->dp; 1835 enum pipe pipe; 1836 1837 for_each_pipe(display, pipe) 1838 intel_dp->mst_encoders[pipe] = mst_stream_encoder_create(dig_port, pipe); 1839 return true; 1840 } 1841 1842 int 1843 intel_dp_mst_encoder_active_links(struct intel_digital_port *dig_port) 1844 { 1845 return dig_port->dp.active_mst_links; 1846 } 1847 1848 int 1849 intel_dp_mst_encoder_init(struct intel_digital_port *dig_port, int conn_base_id) 1850 { 1851 struct intel_display *display = to_intel_display(dig_port); 1852 struct intel_dp *intel_dp = &dig_port->dp; 1853 enum port port = dig_port->base.port; 1854 int ret; 1855 1856 if (!HAS_DP_MST(display) || intel_dp_is_edp(intel_dp)) 1857 return 0; 1858 1859 if (DISPLAY_VER(display) < 12 && port == PORT_A) 1860 return 0; 1861 1862 if (DISPLAY_VER(display) < 11 && port == PORT_E) 1863 return 0; 1864 1865 intel_dp->mst_mgr.cbs = &mst_topology_cbs; 1866 1867 /* create encoders */ 1868 mst_stream_encoders_create(dig_port); 1869 ret = drm_dp_mst_topology_mgr_init(&intel_dp->mst_mgr, display->drm, 1870 &intel_dp->aux, 16, 3, conn_base_id); 1871 if (ret) { 1872 intel_dp->mst_mgr.cbs = NULL; 1873 return ret; 1874 } 1875 1876 return 0; 1877 } 1878 1879 bool intel_dp_mst_source_support(struct intel_dp *intel_dp) 1880 { 1881 return intel_dp->mst_mgr.cbs; 1882 } 1883 1884 void 1885 intel_dp_mst_encoder_cleanup(struct intel_digital_port *dig_port) 1886 { 1887 struct intel_dp *intel_dp = &dig_port->dp; 1888 1889 if (!intel_dp_mst_source_support(intel_dp)) 1890 return; 1891 1892 drm_dp_mst_topology_mgr_destroy(&intel_dp->mst_mgr); 1893 /* encoders will get killed by normal cleanup */ 1894 1895 intel_dp->mst_mgr.cbs = NULL; 1896 } 1897 1898 bool intel_dp_mst_is_master_trans(const struct intel_crtc_state *crtc_state) 1899 { 1900 return crtc_state->mst_master_transcoder == crtc_state->cpu_transcoder; 1901 } 1902 1903 bool intel_dp_mst_is_slave_trans(const struct intel_crtc_state *crtc_state) 1904 { 1905 return crtc_state->mst_master_transcoder != INVALID_TRANSCODER && 1906 crtc_state->mst_master_transcoder != crtc_state->cpu_transcoder; 1907 } 1908 1909 /** 1910 * intel_dp_mst_add_topology_state_for_connector - add MST topology state for a connector 1911 * @state: atomic state 1912 * @connector: connector to add the state for 1913 * @crtc: the CRTC @connector is attached to 1914 * 1915 * Add the MST topology state for @connector to @state. 1916 * 1917 * Returns 0 on success, negative error code on failure. 1918 */ 1919 static int 1920 intel_dp_mst_add_topology_state_for_connector(struct intel_atomic_state *state, 1921 struct intel_connector *connector, 1922 struct intel_crtc *crtc) 1923 { 1924 struct drm_dp_mst_topology_state *mst_state; 1925 1926 if (!connector->mst_port) 1927 return 0; 1928 1929 mst_state = drm_atomic_get_mst_topology_state(&state->base, 1930 &connector->mst_port->mst_mgr); 1931 if (IS_ERR(mst_state)) 1932 return PTR_ERR(mst_state); 1933 1934 mst_state->pending_crtc_mask |= drm_crtc_mask(&crtc->base); 1935 1936 return 0; 1937 } 1938 1939 /** 1940 * intel_dp_mst_add_topology_state_for_crtc - add MST topology state for a CRTC 1941 * @state: atomic state 1942 * @crtc: CRTC to add the state for 1943 * 1944 * Add the MST topology state for @crtc to @state. 1945 * 1946 * Returns 0 on success, negative error code on failure. 1947 */ 1948 int intel_dp_mst_add_topology_state_for_crtc(struct intel_atomic_state *state, 1949 struct intel_crtc *crtc) 1950 { 1951 struct drm_connector *_connector; 1952 struct drm_connector_state *conn_state; 1953 int i; 1954 1955 for_each_new_connector_in_state(&state->base, _connector, conn_state, i) { 1956 struct intel_connector *connector = to_intel_connector(_connector); 1957 int ret; 1958 1959 if (conn_state->crtc != &crtc->base) 1960 continue; 1961 1962 ret = intel_dp_mst_add_topology_state_for_connector(state, connector, crtc); 1963 if (ret) 1964 return ret; 1965 } 1966 1967 return 0; 1968 } 1969 1970 static struct intel_connector * 1971 get_connector_in_state_for_crtc(struct intel_atomic_state *state, 1972 const struct intel_crtc *crtc) 1973 { 1974 struct drm_connector_state *old_conn_state; 1975 struct drm_connector_state *new_conn_state; 1976 struct drm_connector *_connector; 1977 int i; 1978 1979 for_each_oldnew_connector_in_state(&state->base, _connector, 1980 old_conn_state, new_conn_state, i) { 1981 struct intel_connector *connector = 1982 to_intel_connector(_connector); 1983 1984 if (old_conn_state->crtc == &crtc->base || 1985 new_conn_state->crtc == &crtc->base) 1986 return connector; 1987 } 1988 1989 return NULL; 1990 } 1991 1992 /** 1993 * intel_dp_mst_crtc_needs_modeset - check if changes in topology need to modeset the given CRTC 1994 * @state: atomic state 1995 * @crtc: CRTC for which to check the modeset requirement 1996 * 1997 * Check if any change in a MST topology requires a forced modeset on @crtc in 1998 * this topology. One such change is enabling/disabling the DSC decompression 1999 * state in the first branch device's UFP DPCD as required by one CRTC, while 2000 * the other @crtc in the same topology is still active, requiring a full modeset 2001 * on @crtc. 2002 */ 2003 bool intel_dp_mst_crtc_needs_modeset(struct intel_atomic_state *state, 2004 struct intel_crtc *crtc) 2005 { 2006 const struct intel_connector *crtc_connector; 2007 const struct drm_connector_state *conn_state; 2008 const struct drm_connector *_connector; 2009 int i; 2010 2011 if (!intel_crtc_has_type(intel_atomic_get_new_crtc_state(state, crtc), 2012 INTEL_OUTPUT_DP_MST)) 2013 return false; 2014 2015 crtc_connector = get_connector_in_state_for_crtc(state, crtc); 2016 2017 if (!crtc_connector) 2018 /* None of the connectors in the topology needs modeset */ 2019 return false; 2020 2021 for_each_new_connector_in_state(&state->base, _connector, conn_state, i) { 2022 const struct intel_connector *connector = 2023 to_intel_connector(_connector); 2024 const struct intel_crtc_state *new_crtc_state; 2025 const struct intel_crtc_state *old_crtc_state; 2026 struct intel_crtc *crtc_iter; 2027 2028 if (connector->mst_port != crtc_connector->mst_port || 2029 !conn_state->crtc) 2030 continue; 2031 2032 crtc_iter = to_intel_crtc(conn_state->crtc); 2033 2034 new_crtc_state = intel_atomic_get_new_crtc_state(state, crtc_iter); 2035 old_crtc_state = intel_atomic_get_old_crtc_state(state, crtc_iter); 2036 2037 if (!intel_crtc_needs_modeset(new_crtc_state)) 2038 continue; 2039 2040 if (old_crtc_state->dsc.compression_enable == 2041 new_crtc_state->dsc.compression_enable) 2042 continue; 2043 /* 2044 * Toggling the decompression flag because of this stream in 2045 * the first downstream branch device's UFP DPCD may reset the 2046 * whole branch device. To avoid the reset while other streams 2047 * are also active modeset the whole MST topology in this 2048 * case. 2049 */ 2050 if (connector->dp.dsc_decompression_aux == 2051 &connector->mst_port->aux) 2052 return true; 2053 } 2054 2055 return false; 2056 } 2057 2058 /** 2059 * intel_dp_mst_prepare_probe - Prepare an MST link for topology probing 2060 * @intel_dp: DP port object 2061 * 2062 * Prepare an MST link for topology probing, programming the target 2063 * link parameters to DPCD. This step is a requirement of the enumaration 2064 * of path resources during probing. 2065 */ 2066 void intel_dp_mst_prepare_probe(struct intel_dp *intel_dp) 2067 { 2068 int link_rate = intel_dp_max_link_rate(intel_dp); 2069 int lane_count = intel_dp_max_lane_count(intel_dp); 2070 u8 rate_select; 2071 u8 link_bw; 2072 2073 if (intel_dp->link_trained) 2074 return; 2075 2076 if (intel_mst_probed_link_params_valid(intel_dp, link_rate, lane_count)) 2077 return; 2078 2079 intel_dp_compute_rate(intel_dp, link_rate, &link_bw, &rate_select); 2080 2081 intel_dp_link_training_set_mode(intel_dp, link_rate, false); 2082 intel_dp_link_training_set_bw(intel_dp, link_bw, rate_select, lane_count, 2083 drm_dp_enhanced_frame_cap(intel_dp->dpcd)); 2084 2085 intel_mst_set_probed_link_params(intel_dp, link_rate, lane_count); 2086 } 2087 2088 /* 2089 * intel_dp_mst_verify_dpcd_state - verify the MST SW enabled state wrt. the DPCD 2090 * @intel_dp: DP port object 2091 * 2092 * Verify if @intel_dp's MST enabled SW state matches the corresponding DPCD 2093 * state. A long HPD pulse - not long enough to be detected as a disconnected 2094 * state - could've reset the DPCD state, which requires tearing 2095 * down/recreating the MST topology. 2096 * 2097 * Returns %true if the SW MST enabled and DPCD states match, %false 2098 * otherwise. 2099 */ 2100 bool intel_dp_mst_verify_dpcd_state(struct intel_dp *intel_dp) 2101 { 2102 struct intel_display *display = to_intel_display(intel_dp); 2103 struct intel_connector *connector = intel_dp->attached_connector; 2104 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp); 2105 struct intel_encoder *encoder = &dig_port->base; 2106 int ret; 2107 u8 val; 2108 2109 if (!intel_dp->is_mst) 2110 return true; 2111 2112 ret = drm_dp_dpcd_readb(intel_dp->mst_mgr.aux, DP_MSTM_CTRL, &val); 2113 2114 /* Adjust the expected register value for SST + SideBand. */ 2115 if (ret < 0 || val != (DP_MST_EN | DP_UP_REQ_EN | DP_UPSTREAM_IS_SRC)) { 2116 drm_dbg_kms(display->drm, 2117 "[CONNECTOR:%d:%s][ENCODER:%d:%s] MST mode got reset, removing topology (ret=%d, ctrl=0x%02x)\n", 2118 connector->base.base.id, connector->base.name, 2119 encoder->base.base.id, encoder->base.name, 2120 ret, val); 2121 2122 return false; 2123 } 2124 2125 return true; 2126 } 2127