1 /* 2 * Copyright © 2008 Intel Corporation 3 * 2014 Red Hat Inc. 4 * 5 * Permission is hereby granted, free of charge, to any person obtaining a 6 * copy of this software and associated documentation files (the "Software"), 7 * to deal in the Software without restriction, including without limitation 8 * the rights to use, copy, modify, merge, publish, distribute, sublicense, 9 * and/or sell copies of the Software, and to permit persons to whom the 10 * Software is furnished to do so, subject to the following conditions: 11 * 12 * The above copyright notice and this permission notice (including the next 13 * paragraph) shall be included in all copies or substantial portions of the 14 * Software. 15 * 16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL 19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING 21 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS 22 * IN THE SOFTWARE. 23 * 24 */ 25 26 #include <drm/drm_atomic.h> 27 #include <drm/drm_atomic_helper.h> 28 #include <drm/drm_edid.h> 29 #include <drm/drm_fixed.h> 30 #include <drm/drm_probe_helper.h> 31 32 #include "i915_drv.h" 33 #include "i915_reg.h" 34 #include "intel_atomic.h" 35 #include "intel_audio.h" 36 #include "intel_connector.h" 37 #include "intel_crtc.h" 38 #include "intel_ddi.h" 39 #include "intel_de.h" 40 #include "intel_display_driver.h" 41 #include "intel_display_types.h" 42 #include "intel_dp.h" 43 #include "intel_dp_hdcp.h" 44 #include "intel_dp_link_training.h" 45 #include "intel_dp_mst.h" 46 #include "intel_dp_test.h" 47 #include "intel_dp_tunnel.h" 48 #include "intel_dpio_phy.h" 49 #include "intel_hdcp.h" 50 #include "intel_hotplug.h" 51 #include "intel_link_bw.h" 52 #include "intel_pfit.h" 53 #include "intel_psr.h" 54 #include "intel_vdsc.h" 55 #include "skl_scaler.h" 56 57 /* 58 * DP MST (DisplayPort Multi-Stream Transport) 59 * 60 * MST support on the source depends on the platform and port. DP initialization 61 * sets up MST for each MST capable encoder. This will become the primary 62 * encoder for the port. 63 * 64 * MST initialization of each primary encoder creates MST stream encoders, one 65 * per pipe, and initializes the MST topology manager. The MST stream encoders 66 * are sometimes called "fake encoders", because they're virtual, not 67 * physical. Thus there are (number of MST capable ports) x (number of pipes) 68 * MST stream encoders in total. 69 * 70 * Decision to use MST for a sink happens at detect on the connector attached to 71 * the primary encoder, and this will not change while the sink is connected. We 72 * always use MST when possible, including for SST sinks with sideband messaging 73 * support. 74 * 75 * The connectors for the MST streams are added and removed dynamically by the 76 * topology manager. Their connection status is also determined by the topology 77 * manager. 78 * 79 * On hardware, each transcoder may be associated with a single DDI 80 * port. Multiple transcoders may be associated with the same DDI port only if 81 * the port is in MST mode. 82 * 83 * On TGL+, all the transcoders streaming on the same DDI port will indicate a 84 * primary transcoder; the TGL_DP_TP_CTL and TGL_DP_TP_STATUS registers are 85 * relevant only on the primary transcoder. Prior to that, they are port 86 * registers. 87 */ 88 89 /* From fake MST stream encoder to primary encoder */ 90 static struct intel_encoder *to_primary_encoder(struct intel_encoder *encoder) 91 { 92 struct intel_dp_mst_encoder *intel_mst = enc_to_mst(encoder); 93 struct intel_digital_port *dig_port = intel_mst->primary; 94 95 return &dig_port->base; 96 } 97 98 /* From fake MST stream encoder to primary DP */ 99 static struct intel_dp *to_primary_dp(struct intel_encoder *encoder) 100 { 101 struct intel_dp_mst_encoder *intel_mst = enc_to_mst(encoder); 102 struct intel_digital_port *dig_port = intel_mst->primary; 103 104 return &dig_port->dp; 105 } 106 107 static int intel_dp_mst_max_dpt_bpp(const struct intel_crtc_state *crtc_state, 108 bool dsc) 109 { 110 struct intel_display *display = to_intel_display(crtc_state); 111 const struct drm_display_mode *adjusted_mode = 112 &crtc_state->hw.adjusted_mode; 113 114 if (!intel_dp_is_uhbr(crtc_state) || DISPLAY_VER(display) >= 20 || !dsc) 115 return 0; 116 117 /* 118 * DSC->DPT interface width: 119 * ICL-MTL: 72 bits (each branch has 72 bits, only left branch is used) 120 * LNL+: 144 bits (not a bottleneck in any config) 121 * 122 * Bspec/49259 suggests that the FEC overhead needs to be 123 * applied here, though HW people claim that neither this FEC 124 * or any other overhead is applicable here (that is the actual 125 * available_bw is just symbol_clock * 72). However based on 126 * testing on MTL-P the 127 * - DELL U3224KBA display 128 * - Unigraf UCD-500 CTS test sink 129 * devices the 130 * - 5120x2880/995.59Mhz 131 * - 6016x3384/1357.23Mhz 132 * - 6144x3456/1413.39Mhz 133 * modes (all the ones having a DPT limit on the above devices), 134 * both the channel coding efficiency and an additional 3% 135 * overhead needs to be accounted for. 136 */ 137 return div64_u64(mul_u32_u32(intel_dp_link_symbol_clock(crtc_state->port_clock) * 72, 138 drm_dp_bw_channel_coding_efficiency(true)), 139 mul_u32_u32(adjusted_mode->crtc_clock, 1030000)); 140 } 141 142 static int intel_dp_mst_bw_overhead(const struct intel_crtc_state *crtc_state, 143 bool ssc, int dsc_slice_count, int bpp_x16) 144 { 145 const struct drm_display_mode *adjusted_mode = 146 &crtc_state->hw.adjusted_mode; 147 unsigned long flags = DRM_DP_BW_OVERHEAD_MST; 148 int overhead; 149 150 flags |= intel_dp_is_uhbr(crtc_state) ? DRM_DP_BW_OVERHEAD_UHBR : 0; 151 flags |= ssc ? DRM_DP_BW_OVERHEAD_SSC_REF_CLK : 0; 152 flags |= crtc_state->fec_enable ? DRM_DP_BW_OVERHEAD_FEC : 0; 153 154 if (dsc_slice_count) 155 flags |= DRM_DP_BW_OVERHEAD_DSC; 156 157 overhead = drm_dp_bw_overhead(crtc_state->lane_count, 158 adjusted_mode->hdisplay, 159 dsc_slice_count, 160 bpp_x16, 161 flags); 162 163 /* 164 * TODO: clarify whether a minimum required by the fixed FEC overhead 165 * in the bspec audio programming sequence is required here. 166 */ 167 return max(overhead, intel_dp_bw_fec_overhead(crtc_state->fec_enable)); 168 } 169 170 static void intel_dp_mst_compute_m_n(const struct intel_crtc_state *crtc_state, 171 int overhead, 172 int bpp_x16, 173 struct intel_link_m_n *m_n) 174 { 175 const struct drm_display_mode *adjusted_mode = 176 &crtc_state->hw.adjusted_mode; 177 178 /* TODO: Check WA 14013163432 to set data M/N for full BW utilization. */ 179 intel_link_compute_m_n(bpp_x16, crtc_state->lane_count, 180 adjusted_mode->crtc_clock, 181 crtc_state->port_clock, 182 overhead, 183 m_n); 184 185 m_n->tu = DIV_ROUND_UP_ULL(mul_u32_u32(m_n->data_m, 64), m_n->data_n); 186 } 187 188 static int intel_dp_mst_calc_pbn(int pixel_clock, int bpp_x16, int bw_overhead) 189 { 190 int effective_data_rate = 191 intel_dp_effective_data_rate(pixel_clock, bpp_x16, bw_overhead); 192 193 /* 194 * TODO: Use drm_dp_calc_pbn_mode() instead, once it's converted 195 * to calculate PBN with the BW overhead passed to it. 196 */ 197 return DIV_ROUND_UP(effective_data_rate * 64, 54 * 1000); 198 } 199 200 static int intel_dp_mst_dsc_get_slice_count(const struct intel_connector *connector, 201 const struct intel_crtc_state *crtc_state) 202 { 203 const struct drm_display_mode *adjusted_mode = 204 &crtc_state->hw.adjusted_mode; 205 int num_joined_pipes = intel_crtc_num_joined_pipes(crtc_state); 206 207 return intel_dp_dsc_get_slice_count(connector, 208 adjusted_mode->clock, 209 adjusted_mode->hdisplay, 210 num_joined_pipes); 211 } 212 213 static void intel_dp_mst_compute_min_hblank(struct intel_crtc_state *crtc_state, 214 int bpp_x16) 215 { 216 struct intel_display *display = to_intel_display(crtc_state); 217 const struct drm_display_mode *adjusted_mode = 218 &crtc_state->hw.adjusted_mode; 219 int symbol_size = intel_dp_is_uhbr(crtc_state) ? 32 : 8; 220 int hblank; 221 222 if (DISPLAY_VER(display) < 20) 223 return; 224 225 /* Calculate min Hblank Link Layer Symbol Cycle Count for 8b/10b MST & 128b/132b */ 226 hblank = DIV_ROUND_UP((DIV_ROUND_UP 227 (adjusted_mode->htotal - adjusted_mode->hdisplay, 4) * bpp_x16), 228 symbol_size); 229 230 crtc_state->min_hblank = hblank; 231 } 232 233 int intel_dp_mtp_tu_compute_config(struct intel_dp *intel_dp, 234 struct intel_crtc_state *crtc_state, 235 struct drm_connector_state *conn_state, 236 int min_bpp_x16, int max_bpp_x16, int bpp_step_x16, bool dsc) 237 { 238 struct intel_display *display = to_intel_display(intel_dp); 239 struct drm_atomic_state *state = crtc_state->uapi.state; 240 struct drm_dp_mst_topology_state *mst_state = NULL; 241 struct intel_connector *connector = 242 to_intel_connector(conn_state->connector); 243 const struct drm_display_mode *adjusted_mode = 244 &crtc_state->hw.adjusted_mode; 245 bool is_mst = intel_dp->is_mst; 246 int bpp_x16, slots = -EINVAL; 247 int dsc_slice_count = 0; 248 int max_dpt_bpp_x16; 249 250 /* shouldn't happen, sanity check */ 251 drm_WARN_ON(display->drm, !dsc && (fxp_q4_to_frac(min_bpp_x16) || 252 fxp_q4_to_frac(max_bpp_x16) || 253 fxp_q4_to_frac(bpp_step_x16))); 254 255 if (is_mst) { 256 mst_state = drm_atomic_get_mst_topology_state(state, &intel_dp->mst.mgr); 257 if (IS_ERR(mst_state)) 258 return PTR_ERR(mst_state); 259 260 mst_state->pbn_div = drm_dp_get_vc_payload_bw(crtc_state->port_clock, 261 crtc_state->lane_count); 262 } 263 264 if (dsc) { 265 if (!intel_dp_supports_fec(intel_dp, connector, crtc_state)) 266 return -EINVAL; 267 268 crtc_state->fec_enable = !intel_dp_is_uhbr(crtc_state); 269 } 270 271 max_dpt_bpp_x16 = fxp_q4_from_int(intel_dp_mst_max_dpt_bpp(crtc_state, dsc)); 272 if (max_dpt_bpp_x16 && max_bpp_x16 > max_dpt_bpp_x16) { 273 drm_dbg_kms(display->drm, "Limiting bpp to max DPT bpp (" FXP_Q4_FMT " -> " FXP_Q4_FMT ")\n", 274 FXP_Q4_ARGS(max_bpp_x16), FXP_Q4_ARGS(max_dpt_bpp_x16)); 275 max_bpp_x16 = max_dpt_bpp_x16; 276 } 277 278 drm_dbg_kms(display->drm, "Looking for slots in range min bpp " FXP_Q4_FMT " max bpp " FXP_Q4_FMT "\n", 279 FXP_Q4_ARGS(min_bpp_x16), FXP_Q4_ARGS(max_bpp_x16)); 280 281 if (dsc) { 282 dsc_slice_count = intel_dp_mst_dsc_get_slice_count(connector, crtc_state); 283 if (!dsc_slice_count) { 284 drm_dbg_kms(display->drm, "Can't get valid DSC slice count\n"); 285 286 return -ENOSPC; 287 } 288 } 289 290 for (bpp_x16 = max_bpp_x16; bpp_x16 >= min_bpp_x16; bpp_x16 -= bpp_step_x16) { 291 int local_bw_overhead; 292 int link_bpp_x16; 293 294 drm_dbg_kms(display->drm, "Trying bpp " FXP_Q4_FMT "\n", FXP_Q4_ARGS(bpp_x16)); 295 296 link_bpp_x16 = dsc ? bpp_x16 : 297 fxp_q4_from_int(intel_dp_output_bpp(crtc_state->output_format, 298 fxp_q4_to_int(bpp_x16))); 299 300 local_bw_overhead = intel_dp_mst_bw_overhead(crtc_state, 301 false, dsc_slice_count, link_bpp_x16); 302 303 intel_dp_mst_compute_min_hblank(crtc_state, link_bpp_x16); 304 305 intel_dp_mst_compute_m_n(crtc_state, 306 local_bw_overhead, 307 link_bpp_x16, 308 &crtc_state->dp_m_n); 309 310 if (is_mst) { 311 int remote_bw_overhead; 312 int remote_tu; 313 fixed20_12 pbn; 314 315 remote_bw_overhead = intel_dp_mst_bw_overhead(crtc_state, 316 true, dsc_slice_count, link_bpp_x16); 317 318 /* 319 * The TU size programmed to the HW determines which slots in 320 * an MTP frame are used for this stream, which needs to match 321 * the payload size programmed to the first downstream branch 322 * device's payload table. 323 * 324 * Note that atm the payload's PBN value DRM core sends via 325 * the ALLOCATE_PAYLOAD side-band message matches the payload 326 * size (which it calculates from the PBN value) it programs 327 * to the first branch device's payload table. The allocation 328 * in the payload table could be reduced though (to 329 * crtc_state->dp_m_n.tu), provided that the driver doesn't 330 * enable SSC on the corresponding link. 331 */ 332 pbn.full = dfixed_const(intel_dp_mst_calc_pbn(adjusted_mode->crtc_clock, 333 link_bpp_x16, 334 remote_bw_overhead)); 335 remote_tu = DIV_ROUND_UP(pbn.full, mst_state->pbn_div.full); 336 337 /* 338 * Aligning the TUs ensures that symbols consisting of multiple 339 * (4) symbol cycles don't get split between two consecutive 340 * MTPs, as required by Bspec. 341 * TODO: remove the alignment restriction for 128b/132b links 342 * on some platforms, where Bspec allows this. 343 */ 344 remote_tu = ALIGN(remote_tu, 4 / crtc_state->lane_count); 345 346 /* 347 * Also align PBNs accordingly, since MST core will derive its 348 * own copy of TU from the PBN in drm_dp_atomic_find_time_slots(). 349 * The above comment about the difference between the PBN 350 * allocated for the whole path and the TUs allocated for the 351 * first branch device's link also applies here. 352 */ 353 pbn.full = remote_tu * mst_state->pbn_div.full; 354 355 drm_WARN_ON(display->drm, remote_tu < crtc_state->dp_m_n.tu); 356 crtc_state->dp_m_n.tu = remote_tu; 357 358 slots = drm_dp_atomic_find_time_slots(state, &intel_dp->mst.mgr, 359 connector->mst.port, 360 dfixed_trunc(pbn)); 361 } else { 362 /* Same as above for remote_tu */ 363 crtc_state->dp_m_n.tu = ALIGN(crtc_state->dp_m_n.tu, 364 4 / crtc_state->lane_count); 365 366 if (crtc_state->dp_m_n.tu <= 64) 367 slots = crtc_state->dp_m_n.tu; 368 else 369 slots = -EINVAL; 370 } 371 372 if (slots == -EDEADLK) 373 return slots; 374 375 if (slots >= 0) { 376 drm_WARN_ON(display->drm, slots != crtc_state->dp_m_n.tu); 377 378 break; 379 } 380 381 /* Allow using zero step to indicate one try */ 382 if (!bpp_step_x16) 383 break; 384 } 385 386 if (slots < 0) { 387 drm_dbg_kms(display->drm, "failed finding vcpi slots:%d\n", 388 slots); 389 return slots; 390 } 391 392 if (!dsc) 393 crtc_state->pipe_bpp = fxp_q4_to_int(bpp_x16); 394 else 395 crtc_state->dsc.compressed_bpp_x16 = bpp_x16; 396 397 drm_dbg_kms(display->drm, "Got %d slots for pipe bpp " FXP_Q4_FMT " dsc %d\n", 398 slots, FXP_Q4_ARGS(bpp_x16), dsc); 399 400 return 0; 401 } 402 403 static int mst_stream_compute_link_config(struct intel_dp *intel_dp, 404 struct intel_crtc_state *crtc_state, 405 struct drm_connector_state *conn_state, 406 const struct link_config_limits *limits) 407 { 408 crtc_state->lane_count = limits->max_lane_count; 409 crtc_state->port_clock = limits->max_rate; 410 411 /* 412 * FIXME: allocate the BW according to link_bpp, which in the case of 413 * YUV420 is only half of the pipe bpp value. 414 */ 415 return intel_dp_mtp_tu_compute_config(intel_dp, crtc_state, conn_state, 416 limits->link.min_bpp_x16, 417 limits->link.max_bpp_x16, 418 fxp_q4_from_int(2 * 3), false); 419 } 420 421 static int mst_stream_dsc_compute_link_config(struct intel_dp *intel_dp, 422 struct intel_crtc_state *crtc_state, 423 struct drm_connector_state *conn_state, 424 const struct link_config_limits *limits) 425 { 426 struct intel_display *display = to_intel_display(intel_dp); 427 struct intel_connector *connector = to_intel_connector(conn_state->connector); 428 int num_bpc; 429 u8 dsc_bpc[3] = {}; 430 int min_bpp, max_bpp, sink_min_bpp, sink_max_bpp; 431 int min_compressed_bpp, max_compressed_bpp; 432 433 max_bpp = limits->pipe.max_bpp; 434 min_bpp = limits->pipe.min_bpp; 435 436 num_bpc = drm_dp_dsc_sink_supported_input_bpcs(connector->dp.dsc_dpcd, 437 dsc_bpc); 438 439 drm_dbg_kms(display->drm, "DSC Source supported min bpp %d max bpp %d\n", 440 min_bpp, max_bpp); 441 442 sink_min_bpp = min_array(dsc_bpc, num_bpc) * 3; 443 sink_max_bpp = max_array(dsc_bpc, num_bpc) * 3; 444 445 drm_dbg_kms(display->drm, "DSC Sink supported min bpp %d max bpp %d\n", 446 sink_min_bpp, sink_max_bpp); 447 448 if (min_bpp < sink_min_bpp) 449 min_bpp = sink_min_bpp; 450 451 if (max_bpp > sink_max_bpp) 452 max_bpp = sink_max_bpp; 453 454 crtc_state->pipe_bpp = max_bpp; 455 456 max_compressed_bpp = fxp_q4_to_int(limits->link.max_bpp_x16); 457 min_compressed_bpp = fxp_q4_to_int_roundup(limits->link.min_bpp_x16); 458 459 drm_dbg_kms(display->drm, "DSC Sink supported compressed min bpp %d compressed max bpp %d\n", 460 min_compressed_bpp, max_compressed_bpp); 461 462 /* Align compressed bpps according to our own constraints */ 463 max_compressed_bpp = intel_dp_dsc_nearest_valid_bpp(display, max_compressed_bpp, 464 crtc_state->pipe_bpp); 465 min_compressed_bpp = intel_dp_dsc_nearest_valid_bpp(display, min_compressed_bpp, 466 crtc_state->pipe_bpp); 467 468 crtc_state->lane_count = limits->max_lane_count; 469 crtc_state->port_clock = limits->max_rate; 470 471 return intel_dp_mtp_tu_compute_config(intel_dp, crtc_state, conn_state, 472 fxp_q4_from_int(min_compressed_bpp), 473 fxp_q4_from_int(max_compressed_bpp), 474 fxp_q4_from_int(1), true); 475 } 476 477 static int mst_stream_update_slots(struct intel_dp *intel_dp, 478 struct intel_crtc_state *crtc_state, 479 struct drm_connector_state *conn_state) 480 { 481 struct intel_display *display = to_intel_display(intel_dp); 482 struct drm_dp_mst_topology_mgr *mgr = &intel_dp->mst.mgr; 483 struct drm_dp_mst_topology_state *topology_state; 484 u8 link_coding_cap = intel_dp_is_uhbr(crtc_state) ? 485 DP_CAP_ANSI_128B132B : DP_CAP_ANSI_8B10B; 486 487 topology_state = drm_atomic_get_mst_topology_state(conn_state->state, mgr); 488 if (IS_ERR(topology_state)) { 489 drm_dbg_kms(display->drm, "slot update failed\n"); 490 return PTR_ERR(topology_state); 491 } 492 493 drm_dp_mst_update_slots(topology_state, link_coding_cap); 494 495 return 0; 496 } 497 498 static int mode_hblank_period_ns(const struct drm_display_mode *mode) 499 { 500 return DIV_ROUND_CLOSEST_ULL(mul_u32_u32(mode->htotal - mode->hdisplay, 501 NSEC_PER_SEC / 1000), 502 mode->crtc_clock); 503 } 504 505 static bool 506 hblank_expansion_quirk_needs_dsc(const struct intel_connector *connector, 507 const struct intel_crtc_state *crtc_state, 508 const struct link_config_limits *limits) 509 { 510 const struct drm_display_mode *adjusted_mode = 511 &crtc_state->hw.adjusted_mode; 512 bool is_uhbr_sink = connector->mst.dp && 513 drm_dp_128b132b_supported(connector->mst.dp->dpcd); 514 int hblank_limit = is_uhbr_sink ? 500 : 300; 515 516 if (!connector->dp.dsc_hblank_expansion_quirk) 517 return false; 518 519 if (is_uhbr_sink && !drm_dp_is_uhbr_rate(limits->max_rate)) 520 return false; 521 522 if (mode_hblank_period_ns(adjusted_mode) > hblank_limit) 523 return false; 524 525 if (!intel_dp_mst_dsc_get_slice_count(connector, crtc_state)) 526 return false; 527 528 return true; 529 } 530 531 static bool 532 adjust_limits_for_dsc_hblank_expansion_quirk(struct intel_dp *intel_dp, 533 const struct intel_connector *connector, 534 const struct intel_crtc_state *crtc_state, 535 struct link_config_limits *limits, 536 bool dsc) 537 { 538 struct intel_display *display = to_intel_display(connector); 539 const struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc); 540 int min_bpp_x16 = limits->link.min_bpp_x16; 541 542 if (!hblank_expansion_quirk_needs_dsc(connector, crtc_state, limits)) 543 return true; 544 545 if (!dsc) { 546 if (intel_dp_supports_dsc(intel_dp, connector, crtc_state)) { 547 drm_dbg_kms(display->drm, 548 "[CRTC:%d:%s][CONNECTOR:%d:%s] DSC needed by hblank expansion quirk\n", 549 crtc->base.base.id, crtc->base.name, 550 connector->base.base.id, connector->base.name); 551 return false; 552 } 553 554 drm_dbg_kms(display->drm, 555 "[CRTC:%d:%s][CONNECTOR:%d:%s] Increasing link min bpp to 24 due to hblank expansion quirk\n", 556 crtc->base.base.id, crtc->base.name, 557 connector->base.base.id, connector->base.name); 558 559 if (limits->link.max_bpp_x16 < fxp_q4_from_int(24)) 560 return false; 561 562 limits->link.min_bpp_x16 = fxp_q4_from_int(24); 563 564 return true; 565 } 566 567 drm_WARN_ON(display->drm, limits->min_rate != limits->max_rate); 568 569 if (limits->max_rate < 540000) 570 min_bpp_x16 = fxp_q4_from_int(13); 571 else if (limits->max_rate < 810000) 572 min_bpp_x16 = fxp_q4_from_int(10); 573 574 if (limits->link.min_bpp_x16 >= min_bpp_x16) 575 return true; 576 577 drm_dbg_kms(display->drm, 578 "[CRTC:%d:%s][CONNECTOR:%d:%s] Increasing link min bpp to " FXP_Q4_FMT " in DSC mode due to hblank expansion quirk\n", 579 crtc->base.base.id, crtc->base.name, 580 connector->base.base.id, connector->base.name, 581 FXP_Q4_ARGS(min_bpp_x16)); 582 583 if (limits->link.max_bpp_x16 < min_bpp_x16) 584 return false; 585 586 limits->link.min_bpp_x16 = min_bpp_x16; 587 588 return true; 589 } 590 591 static bool 592 mst_stream_compute_config_limits(struct intel_dp *intel_dp, 593 const struct intel_connector *connector, 594 struct intel_crtc_state *crtc_state, 595 bool dsc, 596 struct link_config_limits *limits) 597 { 598 if (!intel_dp_compute_config_limits(intel_dp, crtc_state, false, dsc, 599 limits)) 600 return false; 601 602 return adjust_limits_for_dsc_hblank_expansion_quirk(intel_dp, 603 connector, 604 crtc_state, 605 limits, 606 dsc); 607 } 608 609 static int mst_stream_compute_config(struct intel_encoder *encoder, 610 struct intel_crtc_state *pipe_config, 611 struct drm_connector_state *conn_state) 612 { 613 struct intel_display *display = to_intel_display(encoder); 614 struct intel_atomic_state *state = to_intel_atomic_state(conn_state->state); 615 struct intel_crtc *crtc = to_intel_crtc(pipe_config->uapi.crtc); 616 struct intel_dp *intel_dp = to_primary_dp(encoder); 617 struct intel_connector *connector = 618 to_intel_connector(conn_state->connector); 619 const struct drm_display_mode *adjusted_mode = 620 &pipe_config->hw.adjusted_mode; 621 struct link_config_limits limits; 622 bool dsc_needed, joiner_needs_dsc; 623 int num_joined_pipes; 624 int ret = 0; 625 626 if (pipe_config->fec_enable && 627 !intel_dp_supports_fec(intel_dp, connector, pipe_config)) 628 return -EINVAL; 629 630 if (adjusted_mode->flags & DRM_MODE_FLAG_DBLSCAN) 631 return -EINVAL; 632 633 num_joined_pipes = intel_dp_num_joined_pipes(intel_dp, connector, 634 adjusted_mode->crtc_hdisplay, 635 adjusted_mode->crtc_clock); 636 if (num_joined_pipes > 1) 637 pipe_config->joiner_pipes = GENMASK(crtc->pipe + num_joined_pipes - 1, crtc->pipe); 638 639 pipe_config->sink_format = INTEL_OUTPUT_FORMAT_RGB; 640 pipe_config->output_format = INTEL_OUTPUT_FORMAT_RGB; 641 pipe_config->has_pch_encoder = false; 642 643 joiner_needs_dsc = intel_dp_joiner_needs_dsc(display, num_joined_pipes); 644 645 dsc_needed = joiner_needs_dsc || intel_dp->force_dsc_en || 646 !mst_stream_compute_config_limits(intel_dp, connector, 647 pipe_config, false, &limits); 648 649 if (!dsc_needed) { 650 ret = mst_stream_compute_link_config(intel_dp, pipe_config, 651 conn_state, &limits); 652 653 if (ret == -EDEADLK) 654 return ret; 655 656 if (ret) 657 dsc_needed = true; 658 } 659 660 if (dsc_needed && !intel_dp_supports_dsc(intel_dp, connector, pipe_config)) { 661 drm_dbg_kms(display->drm, "DSC required but not available\n"); 662 return -EINVAL; 663 } 664 665 /* enable compression if the mode doesn't fit available BW */ 666 if (dsc_needed) { 667 drm_dbg_kms(display->drm, "Try DSC (fallback=%s, joiner=%s, force=%s)\n", 668 str_yes_no(ret), str_yes_no(joiner_needs_dsc), 669 str_yes_no(intel_dp->force_dsc_en)); 670 671 672 if (!mst_stream_compute_config_limits(intel_dp, connector, 673 pipe_config, true, 674 &limits)) 675 return -EINVAL; 676 677 /* 678 * FIXME: As bpc is hardcoded to 8, as mentioned above, 679 * WARN and ignore the debug flag force_dsc_bpc for now. 680 */ 681 drm_WARN(display->drm, intel_dp->force_dsc_bpc, 682 "Cannot Force BPC for MST\n"); 683 /* 684 * Try to get at least some timeslots and then see, if 685 * we can fit there with DSC. 686 */ 687 drm_dbg_kms(display->drm, "Trying to find VCPI slots in DSC mode\n"); 688 689 ret = mst_stream_dsc_compute_link_config(intel_dp, pipe_config, 690 conn_state, &limits); 691 if (ret < 0) 692 return ret; 693 694 ret = intel_dp_dsc_compute_config(intel_dp, pipe_config, 695 conn_state, &limits, 696 pipe_config->dp_m_n.tu); 697 } 698 699 if (ret) 700 return ret; 701 702 ret = mst_stream_update_slots(intel_dp, pipe_config, conn_state); 703 if (ret) 704 return ret; 705 706 pipe_config->limited_color_range = 707 intel_dp_limited_color_range(pipe_config, conn_state); 708 709 if (display->platform.geminilake || display->platform.broxton) 710 pipe_config->lane_lat_optim_mask = 711 bxt_dpio_phy_calc_lane_lat_optim_mask(pipe_config->lane_count); 712 713 intel_dp_audio_compute_config(encoder, pipe_config, conn_state); 714 715 intel_ddi_compute_min_voltage_level(pipe_config); 716 717 intel_psr_compute_config(intel_dp, pipe_config, conn_state); 718 719 return intel_dp_tunnel_atomic_compute_stream_bw(state, intel_dp, connector, 720 pipe_config); 721 } 722 723 /* 724 * Iterate over all connectors and return a mask of 725 * all CPU transcoders streaming over the same DP link. 726 */ 727 static unsigned int 728 intel_dp_mst_transcoder_mask(struct intel_atomic_state *state, 729 struct intel_dp *mst_port) 730 { 731 struct intel_display *display = to_intel_display(state); 732 const struct intel_digital_connector_state *conn_state; 733 struct intel_connector *connector; 734 u8 transcoders = 0; 735 int i; 736 737 if (DISPLAY_VER(display) < 12) 738 return 0; 739 740 for_each_new_intel_connector_in_state(state, connector, conn_state, i) { 741 const struct intel_crtc_state *crtc_state; 742 struct intel_crtc *crtc; 743 744 if (connector->mst.dp != mst_port || !conn_state->base.crtc) 745 continue; 746 747 crtc = to_intel_crtc(conn_state->base.crtc); 748 crtc_state = intel_atomic_get_new_crtc_state(state, crtc); 749 750 if (!crtc_state->hw.active) 751 continue; 752 753 transcoders |= BIT(crtc_state->cpu_transcoder); 754 } 755 756 return transcoders; 757 } 758 759 static u8 get_pipes_downstream_of_mst_port(struct intel_atomic_state *state, 760 struct drm_dp_mst_topology_mgr *mst_mgr, 761 struct drm_dp_mst_port *parent_port) 762 { 763 const struct intel_digital_connector_state *conn_state; 764 struct intel_connector *connector; 765 u8 mask = 0; 766 int i; 767 768 for_each_new_intel_connector_in_state(state, connector, conn_state, i) { 769 if (!conn_state->base.crtc) 770 continue; 771 772 if (&connector->mst.dp->mst.mgr != mst_mgr) 773 continue; 774 775 if (connector->mst.port != parent_port && 776 !drm_dp_mst_port_downstream_of_parent(mst_mgr, 777 connector->mst.port, 778 parent_port)) 779 continue; 780 781 mask |= BIT(to_intel_crtc(conn_state->base.crtc)->pipe); 782 } 783 784 return mask; 785 } 786 787 static int intel_dp_mst_check_fec_change(struct intel_atomic_state *state, 788 struct drm_dp_mst_topology_mgr *mst_mgr, 789 struct intel_link_bw_limits *limits) 790 { 791 struct intel_display *display = to_intel_display(state); 792 struct intel_crtc *crtc; 793 u8 mst_pipe_mask; 794 u8 fec_pipe_mask = 0; 795 int ret; 796 797 mst_pipe_mask = get_pipes_downstream_of_mst_port(state, mst_mgr, NULL); 798 799 for_each_intel_crtc_in_pipe_mask(display->drm, crtc, mst_pipe_mask) { 800 struct intel_crtc_state *crtc_state = 801 intel_atomic_get_new_crtc_state(state, crtc); 802 803 /* Atomic connector check should've added all the MST CRTCs. */ 804 if (drm_WARN_ON(display->drm, !crtc_state)) 805 return -EINVAL; 806 807 if (crtc_state->fec_enable) 808 fec_pipe_mask |= BIT(crtc->pipe); 809 } 810 811 if (!fec_pipe_mask || mst_pipe_mask == fec_pipe_mask) 812 return 0; 813 814 limits->force_fec_pipes |= mst_pipe_mask; 815 816 ret = intel_modeset_pipes_in_mask_early(state, "MST FEC", 817 mst_pipe_mask); 818 819 return ret ? : -EAGAIN; 820 } 821 822 static int intel_dp_mst_check_bw(struct intel_atomic_state *state, 823 struct drm_dp_mst_topology_mgr *mst_mgr, 824 struct drm_dp_mst_topology_state *mst_state, 825 struct intel_link_bw_limits *limits) 826 { 827 struct drm_dp_mst_port *mst_port; 828 u8 mst_port_pipes; 829 int ret; 830 831 ret = drm_dp_mst_atomic_check_mgr(&state->base, mst_mgr, mst_state, &mst_port); 832 if (ret != -ENOSPC) 833 return ret; 834 835 mst_port_pipes = get_pipes_downstream_of_mst_port(state, mst_mgr, mst_port); 836 837 ret = intel_link_bw_reduce_bpp(state, limits, 838 mst_port_pipes, "MST link BW"); 839 840 return ret ? : -EAGAIN; 841 } 842 843 /** 844 * intel_dp_mst_atomic_check_link - check all modeset MST link configuration 845 * @state: intel atomic state 846 * @limits: link BW limits 847 * 848 * Check the link configuration for all modeset MST outputs. If the 849 * configuration is invalid @limits will be updated if possible to 850 * reduce the total BW, after which the configuration for all CRTCs in 851 * @state must be recomputed with the updated @limits. 852 * 853 * Returns: 854 * - 0 if the configuration is valid 855 * - %-EAGAIN, if the configuration is invalid and @limits got updated 856 * with fallback values with which the configuration of all CRTCs in 857 * @state must be recomputed 858 * - Other negative error, if the configuration is invalid without a 859 * fallback possibility, or the check failed for another reason 860 */ 861 int intel_dp_mst_atomic_check_link(struct intel_atomic_state *state, 862 struct intel_link_bw_limits *limits) 863 { 864 struct drm_dp_mst_topology_mgr *mgr; 865 struct drm_dp_mst_topology_state *mst_state; 866 int ret; 867 int i; 868 869 for_each_new_mst_mgr_in_state(&state->base, mgr, mst_state, i) { 870 ret = intel_dp_mst_check_fec_change(state, mgr, limits); 871 if (ret) 872 return ret; 873 874 ret = intel_dp_mst_check_bw(state, mgr, mst_state, 875 limits); 876 if (ret) 877 return ret; 878 } 879 880 return 0; 881 } 882 883 static int mst_stream_compute_config_late(struct intel_encoder *encoder, 884 struct intel_crtc_state *crtc_state, 885 struct drm_connector_state *conn_state) 886 { 887 struct intel_atomic_state *state = to_intel_atomic_state(conn_state->state); 888 struct intel_dp *intel_dp = to_primary_dp(encoder); 889 890 /* lowest numbered transcoder will be designated master */ 891 crtc_state->mst_master_transcoder = 892 ffs(intel_dp_mst_transcoder_mask(state, intel_dp)) - 1; 893 894 return 0; 895 } 896 897 /* 898 * If one of the connectors in a MST stream needs a modeset, mark all CRTCs 899 * that shares the same MST stream as mode changed, 900 * intel_modeset_pipe_config()+intel_crtc_check_fastset() will take care to do 901 * a fastset when possible. 902 * 903 * On TGL+ this is required since each stream go through a master transcoder, 904 * so if the master transcoder needs modeset, all other streams in the 905 * topology need a modeset. All platforms need to add the atomic state 906 * for all streams in the topology, since a modeset on one may require 907 * changing the MST link BW usage of the others, which in turn needs a 908 * recomputation of the corresponding CRTC states. 909 */ 910 static int 911 mst_connector_atomic_topology_check(struct intel_connector *connector, 912 struct intel_atomic_state *state) 913 { 914 struct intel_display *display = to_intel_display(connector); 915 struct drm_connector_list_iter connector_list_iter; 916 struct intel_connector *connector_iter; 917 int ret = 0; 918 919 if (!intel_connector_needs_modeset(state, &connector->base)) 920 return 0; 921 922 drm_connector_list_iter_begin(display->drm, &connector_list_iter); 923 for_each_intel_connector_iter(connector_iter, &connector_list_iter) { 924 struct intel_digital_connector_state *conn_iter_state; 925 struct intel_crtc_state *crtc_state; 926 struct intel_crtc *crtc; 927 928 if (connector_iter->mst.dp != connector->mst.dp || 929 connector_iter == connector) 930 continue; 931 932 conn_iter_state = intel_atomic_get_digital_connector_state(state, 933 connector_iter); 934 if (IS_ERR(conn_iter_state)) { 935 ret = PTR_ERR(conn_iter_state); 936 break; 937 } 938 939 if (!conn_iter_state->base.crtc) 940 continue; 941 942 crtc = to_intel_crtc(conn_iter_state->base.crtc); 943 crtc_state = intel_atomic_get_crtc_state(&state->base, crtc); 944 if (IS_ERR(crtc_state)) { 945 ret = PTR_ERR(crtc_state); 946 break; 947 } 948 949 ret = drm_atomic_add_affected_planes(&state->base, &crtc->base); 950 if (ret) 951 break; 952 crtc_state->uapi.mode_changed = true; 953 } 954 drm_connector_list_iter_end(&connector_list_iter); 955 956 return ret; 957 } 958 959 static int 960 mst_connector_atomic_check(struct drm_connector *_connector, 961 struct drm_atomic_state *_state) 962 { 963 struct intel_atomic_state *state = to_intel_atomic_state(_state); 964 struct intel_connector *connector = to_intel_connector(_connector); 965 int ret; 966 967 ret = intel_digital_connector_atomic_check(&connector->base, &state->base); 968 if (ret) 969 return ret; 970 971 ret = mst_connector_atomic_topology_check(connector, state); 972 if (ret) 973 return ret; 974 975 if (intel_connector_needs_modeset(state, &connector->base)) { 976 ret = intel_dp_tunnel_atomic_check_state(state, 977 connector->mst.dp, 978 connector); 979 if (ret) 980 return ret; 981 } 982 983 return drm_dp_atomic_release_time_slots(&state->base, 984 &connector->mst.dp->mst.mgr, 985 connector->mst.port); 986 } 987 988 static void mst_stream_disable(struct intel_atomic_state *state, 989 struct intel_encoder *encoder, 990 const struct intel_crtc_state *old_crtc_state, 991 const struct drm_connector_state *old_conn_state) 992 { 993 struct intel_display *display = to_intel_display(state); 994 struct intel_dp_mst_encoder *intel_mst = enc_to_mst(encoder); 995 struct intel_dp *intel_dp = to_primary_dp(encoder); 996 struct intel_connector *connector = 997 to_intel_connector(old_conn_state->connector); 998 enum transcoder trans = old_crtc_state->cpu_transcoder; 999 1000 drm_dbg_kms(display->drm, "active links %d\n", 1001 intel_dp->mst.active_links); 1002 1003 if (intel_dp->mst.active_links == 1) 1004 intel_dp->link_trained = false; 1005 1006 intel_hdcp_disable(intel_mst->connector); 1007 1008 intel_dp_sink_disable_decompression(state, connector, old_crtc_state); 1009 1010 if (DISPLAY_VER(display) >= 20) 1011 intel_de_write(display, DP_MIN_HBLANK_CTL(trans), 0); 1012 } 1013 1014 static void mst_stream_post_disable(struct intel_atomic_state *state, 1015 struct intel_encoder *encoder, 1016 const struct intel_crtc_state *old_crtc_state, 1017 const struct drm_connector_state *old_conn_state) 1018 { 1019 struct intel_display *display = to_intel_display(encoder); 1020 struct intel_dp_mst_encoder *intel_mst = enc_to_mst(encoder); 1021 struct intel_encoder *primary_encoder = to_primary_encoder(encoder); 1022 struct intel_dp *intel_dp = to_primary_dp(encoder); 1023 struct intel_connector *connector = 1024 to_intel_connector(old_conn_state->connector); 1025 struct drm_dp_mst_topology_state *old_mst_state = 1026 drm_atomic_get_old_mst_topology_state(&state->base, &intel_dp->mst.mgr); 1027 struct drm_dp_mst_topology_state *new_mst_state = 1028 drm_atomic_get_new_mst_topology_state(&state->base, &intel_dp->mst.mgr); 1029 const struct drm_dp_mst_atomic_payload *old_payload = 1030 drm_atomic_get_mst_payload_state(old_mst_state, connector->mst.port); 1031 struct drm_dp_mst_atomic_payload *new_payload = 1032 drm_atomic_get_mst_payload_state(new_mst_state, connector->mst.port); 1033 struct intel_crtc *pipe_crtc; 1034 bool last_mst_stream; 1035 int i; 1036 1037 intel_dp->mst.active_links--; 1038 last_mst_stream = intel_dp->mst.active_links == 0; 1039 drm_WARN_ON(display->drm, DISPLAY_VER(display) >= 12 && last_mst_stream && 1040 !intel_dp_mst_is_master_trans(old_crtc_state)); 1041 1042 for_each_pipe_crtc_modeset_disable(display, pipe_crtc, old_crtc_state, i) { 1043 const struct intel_crtc_state *old_pipe_crtc_state = 1044 intel_atomic_get_old_crtc_state(state, pipe_crtc); 1045 1046 intel_crtc_vblank_off(old_pipe_crtc_state); 1047 } 1048 1049 intel_disable_transcoder(old_crtc_state); 1050 1051 drm_dp_remove_payload_part1(&intel_dp->mst.mgr, new_mst_state, new_payload); 1052 1053 intel_ddi_clear_act_sent(encoder, old_crtc_state); 1054 1055 intel_de_rmw(display, 1056 TRANS_DDI_FUNC_CTL(display, old_crtc_state->cpu_transcoder), 1057 TRANS_DDI_DP_VC_PAYLOAD_ALLOC, 0); 1058 1059 intel_ddi_wait_for_act_sent(encoder, old_crtc_state); 1060 drm_dp_check_act_status(&intel_dp->mst.mgr); 1061 1062 drm_dp_remove_payload_part2(&intel_dp->mst.mgr, new_mst_state, 1063 old_payload, new_payload); 1064 1065 intel_ddi_disable_transcoder_func(old_crtc_state); 1066 1067 for_each_pipe_crtc_modeset_disable(display, pipe_crtc, old_crtc_state, i) { 1068 const struct intel_crtc_state *old_pipe_crtc_state = 1069 intel_atomic_get_old_crtc_state(state, pipe_crtc); 1070 1071 intel_dsc_disable(old_pipe_crtc_state); 1072 1073 if (DISPLAY_VER(display) >= 9) 1074 skl_scaler_disable(old_pipe_crtc_state); 1075 else 1076 ilk_pfit_disable(old_pipe_crtc_state); 1077 } 1078 1079 /* 1080 * Power down mst path before disabling the port, otherwise we end 1081 * up getting interrupts from the sink upon detecting link loss. 1082 */ 1083 drm_dp_send_power_updown_phy(&intel_dp->mst.mgr, connector->mst.port, 1084 false); 1085 1086 /* 1087 * BSpec 4287: disable DIP after the transcoder is disabled and before 1088 * the transcoder clock select is set to none. 1089 */ 1090 intel_dp_set_infoframes(primary_encoder, false, old_crtc_state, NULL); 1091 /* 1092 * From TGL spec: "If multi-stream slave transcoder: Configure 1093 * Transcoder Clock Select to direct no clock to the transcoder" 1094 * 1095 * From older GENs spec: "Configure Transcoder Clock Select to direct 1096 * no clock to the transcoder" 1097 */ 1098 if (DISPLAY_VER(display) < 12 || !last_mst_stream) 1099 intel_ddi_disable_transcoder_clock(old_crtc_state); 1100 1101 1102 intel_mst->connector = NULL; 1103 if (last_mst_stream) 1104 primary_encoder->post_disable(state, primary_encoder, 1105 old_crtc_state, NULL); 1106 1107 drm_dbg_kms(display->drm, "active links %d\n", 1108 intel_dp->mst.active_links); 1109 } 1110 1111 static void mst_stream_post_pll_disable(struct intel_atomic_state *state, 1112 struct intel_encoder *encoder, 1113 const struct intel_crtc_state *old_crtc_state, 1114 const struct drm_connector_state *old_conn_state) 1115 { 1116 struct intel_encoder *primary_encoder = to_primary_encoder(encoder); 1117 struct intel_dp *intel_dp = to_primary_dp(encoder); 1118 1119 if (intel_dp->mst.active_links == 0 && 1120 primary_encoder->post_pll_disable) 1121 primary_encoder->post_pll_disable(state, primary_encoder, old_crtc_state, old_conn_state); 1122 } 1123 1124 static void mst_stream_pre_pll_enable(struct intel_atomic_state *state, 1125 struct intel_encoder *encoder, 1126 const struct intel_crtc_state *pipe_config, 1127 const struct drm_connector_state *conn_state) 1128 { 1129 struct intel_encoder *primary_encoder = to_primary_encoder(encoder); 1130 struct intel_dp *intel_dp = to_primary_dp(encoder); 1131 1132 if (intel_dp->mst.active_links == 0) 1133 primary_encoder->pre_pll_enable(state, primary_encoder, 1134 pipe_config, NULL); 1135 else 1136 /* 1137 * The port PLL state needs to get updated for secondary 1138 * streams as for the primary stream. 1139 */ 1140 intel_ddi_update_active_dpll(state, primary_encoder, 1141 to_intel_crtc(pipe_config->uapi.crtc)); 1142 } 1143 1144 static bool intel_mst_probed_link_params_valid(struct intel_dp *intel_dp, 1145 int link_rate, int lane_count) 1146 { 1147 return intel_dp->link.mst_probed_rate == link_rate && 1148 intel_dp->link.mst_probed_lane_count == lane_count; 1149 } 1150 1151 static void intel_mst_set_probed_link_params(struct intel_dp *intel_dp, 1152 int link_rate, int lane_count) 1153 { 1154 intel_dp->link.mst_probed_rate = link_rate; 1155 intel_dp->link.mst_probed_lane_count = lane_count; 1156 } 1157 1158 static void intel_mst_reprobe_topology(struct intel_dp *intel_dp, 1159 const struct intel_crtc_state *crtc_state) 1160 { 1161 if (intel_mst_probed_link_params_valid(intel_dp, 1162 crtc_state->port_clock, crtc_state->lane_count)) 1163 return; 1164 1165 drm_dp_mst_topology_queue_probe(&intel_dp->mst.mgr); 1166 1167 intel_mst_set_probed_link_params(intel_dp, 1168 crtc_state->port_clock, crtc_state->lane_count); 1169 } 1170 1171 static void mst_stream_pre_enable(struct intel_atomic_state *state, 1172 struct intel_encoder *encoder, 1173 const struct intel_crtc_state *pipe_config, 1174 const struct drm_connector_state *conn_state) 1175 { 1176 struct intel_display *display = to_intel_display(state); 1177 struct intel_dp_mst_encoder *intel_mst = enc_to_mst(encoder); 1178 struct intel_encoder *primary_encoder = to_primary_encoder(encoder); 1179 struct intel_dp *intel_dp = to_primary_dp(encoder); 1180 struct intel_connector *connector = 1181 to_intel_connector(conn_state->connector); 1182 struct drm_dp_mst_topology_state *mst_state = 1183 drm_atomic_get_new_mst_topology_state(&state->base, &intel_dp->mst.mgr); 1184 int ret; 1185 bool first_mst_stream; 1186 1187 /* MST encoders are bound to a crtc, not to a connector, 1188 * force the mapping here for get_hw_state. 1189 */ 1190 connector->encoder = encoder; 1191 intel_mst->connector = connector; 1192 first_mst_stream = intel_dp->mst.active_links == 0; 1193 drm_WARN_ON(display->drm, DISPLAY_VER(display) >= 12 && first_mst_stream && 1194 !intel_dp_mst_is_master_trans(pipe_config)); 1195 1196 drm_dbg_kms(display->drm, "active links %d\n", 1197 intel_dp->mst.active_links); 1198 1199 if (first_mst_stream) 1200 intel_dp_set_power(intel_dp, DP_SET_POWER_D0); 1201 1202 drm_dp_send_power_updown_phy(&intel_dp->mst.mgr, connector->mst.port, true); 1203 1204 intel_dp_sink_enable_decompression(state, connector, pipe_config); 1205 1206 if (first_mst_stream) { 1207 primary_encoder->pre_enable(state, primary_encoder, 1208 pipe_config, NULL); 1209 1210 intel_mst_reprobe_topology(intel_dp, pipe_config); 1211 } 1212 1213 intel_dp->mst.active_links++; 1214 1215 ret = drm_dp_add_payload_part1(&intel_dp->mst.mgr, mst_state, 1216 drm_atomic_get_mst_payload_state(mst_state, connector->mst.port)); 1217 if (ret < 0) 1218 intel_dp_queue_modeset_retry_for_link(state, primary_encoder, pipe_config); 1219 1220 /* 1221 * Before Gen 12 this is not done as part of 1222 * primary_encoder->pre_enable() and should be done here. For 1223 * Gen 12+ the step in which this should be done is different for the 1224 * first MST stream, so it's done on the DDI for the first stream and 1225 * here for the following ones. 1226 */ 1227 if (DISPLAY_VER(display) < 12 || !first_mst_stream) 1228 intel_ddi_enable_transcoder_clock(encoder, pipe_config); 1229 1230 if (DISPLAY_VER(display) >= 13 && !first_mst_stream) 1231 intel_ddi_config_transcoder_func(encoder, pipe_config); 1232 1233 intel_dsc_dp_pps_write(primary_encoder, pipe_config); 1234 intel_ddi_set_dp_msa(pipe_config, conn_state); 1235 } 1236 1237 static void enable_bs_jitter_was(const struct intel_crtc_state *crtc_state) 1238 { 1239 struct intel_display *display = to_intel_display(crtc_state); 1240 u32 clear = 0; 1241 u32 set = 0; 1242 1243 if (!display->platform.alderlake_p) 1244 return; 1245 1246 if (!IS_DISPLAY_STEP(display, STEP_D0, STEP_FOREVER)) 1247 return; 1248 1249 /* Wa_14013163432:adlp */ 1250 if (crtc_state->fec_enable || intel_dp_is_uhbr(crtc_state)) 1251 set |= DP_MST_FEC_BS_JITTER_WA(crtc_state->cpu_transcoder); 1252 1253 /* Wa_14014143976:adlp */ 1254 if (IS_DISPLAY_STEP(display, STEP_E0, STEP_FOREVER)) { 1255 if (intel_dp_is_uhbr(crtc_state)) 1256 set |= DP_MST_SHORT_HBLANK_WA(crtc_state->cpu_transcoder); 1257 else if (crtc_state->fec_enable) 1258 clear |= DP_MST_SHORT_HBLANK_WA(crtc_state->cpu_transcoder); 1259 1260 if (crtc_state->fec_enable || intel_dp_is_uhbr(crtc_state)) 1261 set |= DP_MST_DPT_DPTP_ALIGN_WA(crtc_state->cpu_transcoder); 1262 } 1263 1264 if (!clear && !set) 1265 return; 1266 1267 intel_de_rmw(display, CHICKEN_MISC_3, clear, set); 1268 } 1269 1270 static void mst_stream_enable(struct intel_atomic_state *state, 1271 struct intel_encoder *encoder, 1272 const struct intel_crtc_state *pipe_config, 1273 const struct drm_connector_state *conn_state) 1274 { 1275 struct intel_display *display = to_intel_display(encoder); 1276 struct intel_encoder *primary_encoder = to_primary_encoder(encoder); 1277 struct intel_dp *intel_dp = to_primary_dp(encoder); 1278 struct intel_connector *connector = to_intel_connector(conn_state->connector); 1279 struct drm_dp_mst_topology_state *mst_state = 1280 drm_atomic_get_new_mst_topology_state(&state->base, &intel_dp->mst.mgr); 1281 enum transcoder trans = pipe_config->cpu_transcoder; 1282 bool first_mst_stream = intel_dp->mst.active_links == 1; 1283 struct intel_crtc *pipe_crtc; 1284 int ret, i, min_hblank; 1285 1286 drm_WARN_ON(display->drm, pipe_config->has_pch_encoder); 1287 1288 if (intel_dp_is_uhbr(pipe_config)) { 1289 const struct drm_display_mode *adjusted_mode = 1290 &pipe_config->hw.adjusted_mode; 1291 u64 crtc_clock_hz = KHz(adjusted_mode->crtc_clock); 1292 1293 intel_de_write(display, TRANS_DP2_VFREQHIGH(pipe_config->cpu_transcoder), 1294 TRANS_DP2_VFREQ_PIXEL_CLOCK(crtc_clock_hz >> 24)); 1295 intel_de_write(display, TRANS_DP2_VFREQLOW(pipe_config->cpu_transcoder), 1296 TRANS_DP2_VFREQ_PIXEL_CLOCK(crtc_clock_hz & 0xffffff)); 1297 } 1298 1299 if (DISPLAY_VER(display) >= 20) { 1300 /* 1301 * adjust the BlankingStart/BlankingEnd framing control from 1302 * the calculated value 1303 */ 1304 min_hblank = pipe_config->min_hblank - 2; 1305 1306 /* Maximum value to be programmed is limited to 0x10 */ 1307 min_hblank = min(0x10, min_hblank); 1308 1309 /* 1310 * Minimum hblank accepted for 128b/132b would be 5 and for 1311 * 8b/10b would be 3 symbol count 1312 */ 1313 if (intel_dp_is_uhbr(pipe_config)) 1314 min_hblank = max(min_hblank, 5); 1315 else 1316 min_hblank = max(min_hblank, 3); 1317 1318 intel_de_write(display, DP_MIN_HBLANK_CTL(trans), 1319 min_hblank); 1320 } 1321 1322 enable_bs_jitter_was(pipe_config); 1323 1324 intel_ddi_enable_transcoder_func(encoder, pipe_config); 1325 1326 intel_ddi_clear_act_sent(encoder, pipe_config); 1327 1328 intel_de_rmw(display, TRANS_DDI_FUNC_CTL(display, trans), 0, 1329 TRANS_DDI_DP_VC_PAYLOAD_ALLOC); 1330 1331 drm_dbg_kms(display->drm, "active links %d\n", 1332 intel_dp->mst.active_links); 1333 1334 intel_ddi_wait_for_act_sent(encoder, pipe_config); 1335 drm_dp_check_act_status(&intel_dp->mst.mgr); 1336 1337 if (first_mst_stream) 1338 intel_ddi_wait_for_fec_status(encoder, pipe_config, true); 1339 1340 ret = drm_dp_add_payload_part2(&intel_dp->mst.mgr, 1341 drm_atomic_get_mst_payload_state(mst_state, 1342 connector->mst.port)); 1343 if (ret < 0) 1344 intel_dp_queue_modeset_retry_for_link(state, primary_encoder, pipe_config); 1345 1346 if (DISPLAY_VER(display) >= 12) 1347 intel_de_rmw(display, CHICKEN_TRANS(display, trans), 1348 FECSTALL_DIS_DPTSTREAM_DPTTG, 1349 pipe_config->fec_enable ? FECSTALL_DIS_DPTSTREAM_DPTTG : 0); 1350 1351 intel_audio_sdp_split_update(pipe_config); 1352 1353 intel_enable_transcoder(pipe_config); 1354 1355 for_each_pipe_crtc_modeset_enable(display, pipe_crtc, pipe_config, i) { 1356 const struct intel_crtc_state *pipe_crtc_state = 1357 intel_atomic_get_new_crtc_state(state, pipe_crtc); 1358 1359 intel_crtc_vblank_on(pipe_crtc_state); 1360 } 1361 1362 intel_hdcp_enable(state, encoder, pipe_config, conn_state); 1363 } 1364 1365 static bool mst_stream_get_hw_state(struct intel_encoder *encoder, 1366 enum pipe *pipe) 1367 { 1368 struct intel_dp_mst_encoder *intel_mst = enc_to_mst(encoder); 1369 *pipe = intel_mst->pipe; 1370 if (intel_mst->connector) 1371 return true; 1372 return false; 1373 } 1374 1375 static void mst_stream_get_config(struct intel_encoder *encoder, 1376 struct intel_crtc_state *pipe_config) 1377 { 1378 struct intel_encoder *primary_encoder = to_primary_encoder(encoder); 1379 1380 primary_encoder->get_config(primary_encoder, pipe_config); 1381 } 1382 1383 static bool mst_stream_initial_fastset_check(struct intel_encoder *encoder, 1384 struct intel_crtc_state *crtc_state) 1385 { 1386 struct intel_encoder *primary_encoder = to_primary_encoder(encoder); 1387 1388 return intel_dp_initial_fastset_check(primary_encoder, crtc_state); 1389 } 1390 1391 static int mst_connector_get_ddc_modes(struct drm_connector *_connector) 1392 { 1393 struct intel_connector *connector = to_intel_connector(_connector); 1394 struct intel_display *display = to_intel_display(connector); 1395 struct intel_dp *intel_dp = connector->mst.dp; 1396 const struct drm_edid *drm_edid; 1397 int ret; 1398 1399 if (drm_connector_is_unregistered(&connector->base)) 1400 return intel_connector_update_modes(&connector->base, NULL); 1401 1402 if (!intel_display_driver_check_access(display)) 1403 return drm_edid_connector_add_modes(&connector->base); 1404 1405 drm_edid = drm_dp_mst_edid_read(&connector->base, &intel_dp->mst.mgr, connector->mst.port); 1406 1407 ret = intel_connector_update_modes(&connector->base, drm_edid); 1408 1409 drm_edid_free(drm_edid); 1410 1411 return ret; 1412 } 1413 1414 static int 1415 mst_connector_late_register(struct drm_connector *_connector) 1416 { 1417 struct intel_connector *connector = to_intel_connector(_connector); 1418 int ret; 1419 1420 ret = drm_dp_mst_connector_late_register(&connector->base, connector->mst.port); 1421 if (ret < 0) 1422 return ret; 1423 1424 ret = intel_connector_register(&connector->base); 1425 if (ret < 0) 1426 drm_dp_mst_connector_early_unregister(&connector->base, connector->mst.port); 1427 1428 return ret; 1429 } 1430 1431 static void 1432 mst_connector_early_unregister(struct drm_connector *_connector) 1433 { 1434 struct intel_connector *connector = to_intel_connector(_connector); 1435 1436 intel_connector_unregister(&connector->base); 1437 drm_dp_mst_connector_early_unregister(&connector->base, connector->mst.port); 1438 } 1439 1440 static const struct drm_connector_funcs mst_connector_funcs = { 1441 .fill_modes = drm_helper_probe_single_connector_modes, 1442 .atomic_get_property = intel_digital_connector_atomic_get_property, 1443 .atomic_set_property = intel_digital_connector_atomic_set_property, 1444 .late_register = mst_connector_late_register, 1445 .early_unregister = mst_connector_early_unregister, 1446 .destroy = intel_connector_destroy, 1447 .atomic_destroy_state = drm_atomic_helper_connector_destroy_state, 1448 .atomic_duplicate_state = intel_digital_connector_duplicate_state, 1449 }; 1450 1451 static int mst_connector_get_modes(struct drm_connector *_connector) 1452 { 1453 struct intel_connector *connector = to_intel_connector(_connector); 1454 1455 return mst_connector_get_ddc_modes(&connector->base); 1456 } 1457 1458 static int 1459 mst_connector_mode_valid_ctx(struct drm_connector *_connector, 1460 const struct drm_display_mode *mode, 1461 struct drm_modeset_acquire_ctx *ctx, 1462 enum drm_mode_status *status) 1463 { 1464 struct intel_connector *connector = to_intel_connector(_connector); 1465 struct intel_display *display = to_intel_display(connector); 1466 struct intel_dp *intel_dp = connector->mst.dp; 1467 struct drm_dp_mst_topology_mgr *mgr = &intel_dp->mst.mgr; 1468 struct drm_dp_mst_port *port = connector->mst.port; 1469 const int min_bpp = 18; 1470 int max_dotclk = display->cdclk.max_dotclk_freq; 1471 int max_rate, mode_rate, max_lanes, max_link_clock; 1472 int ret; 1473 bool dsc = false; 1474 u16 dsc_max_compressed_bpp = 0; 1475 u8 dsc_slice_count = 0; 1476 int target_clock = mode->clock; 1477 int num_joined_pipes; 1478 1479 if (drm_connector_is_unregistered(&connector->base)) { 1480 *status = MODE_ERROR; 1481 return 0; 1482 } 1483 1484 *status = intel_cpu_transcoder_mode_valid(display, mode); 1485 if (*status != MODE_OK) 1486 return 0; 1487 1488 if (mode->flags & DRM_MODE_FLAG_DBLCLK) { 1489 *status = MODE_H_ILLEGAL; 1490 return 0; 1491 } 1492 1493 if (mode->clock < 10000) { 1494 *status = MODE_CLOCK_LOW; 1495 return 0; 1496 } 1497 1498 max_link_clock = intel_dp_max_link_rate(intel_dp); 1499 max_lanes = intel_dp_max_lane_count(intel_dp); 1500 1501 max_rate = intel_dp_max_link_data_rate(intel_dp, 1502 max_link_clock, max_lanes); 1503 mode_rate = intel_dp_link_required(mode->clock, min_bpp); 1504 1505 /* 1506 * TODO: 1507 * - Also check if compression would allow for the mode 1508 * - Calculate the overhead using drm_dp_bw_overhead() / 1509 * drm_dp_bw_channel_coding_efficiency(), similarly to the 1510 * compute config code, as drm_dp_calc_pbn_mode() doesn't 1511 * account with all the overheads. 1512 * - Check here and during compute config the BW reported by 1513 * DFP_Link_Available_Payload_Bandwidth_Number (or the 1514 * corresponding link capabilities of the sink) in case the 1515 * stream is uncompressed for it by the last branch device. 1516 */ 1517 num_joined_pipes = intel_dp_num_joined_pipes(intel_dp, connector, 1518 mode->hdisplay, target_clock); 1519 max_dotclk *= num_joined_pipes; 1520 1521 ret = drm_modeset_lock(&mgr->base.lock, ctx); 1522 if (ret) 1523 return ret; 1524 1525 if (mode_rate > max_rate || mode->clock > max_dotclk || 1526 drm_dp_calc_pbn_mode(mode->clock, min_bpp << 4) > port->full_pbn) { 1527 *status = MODE_CLOCK_HIGH; 1528 return 0; 1529 } 1530 1531 if (intel_dp_has_dsc(connector)) { 1532 /* 1533 * TBD pass the connector BPC, 1534 * for now U8_MAX so that max BPC on that platform would be picked 1535 */ 1536 int pipe_bpp = intel_dp_dsc_compute_max_bpp(connector, U8_MAX); 1537 1538 if (drm_dp_sink_supports_fec(connector->dp.fec_capability)) { 1539 dsc_max_compressed_bpp = 1540 intel_dp_dsc_get_max_compressed_bpp(display, 1541 max_link_clock, 1542 max_lanes, 1543 target_clock, 1544 mode->hdisplay, 1545 num_joined_pipes, 1546 INTEL_OUTPUT_FORMAT_RGB, 1547 pipe_bpp, 64); 1548 dsc_slice_count = 1549 intel_dp_dsc_get_slice_count(connector, 1550 target_clock, 1551 mode->hdisplay, 1552 num_joined_pipes); 1553 } 1554 1555 dsc = dsc_max_compressed_bpp && dsc_slice_count; 1556 } 1557 1558 if (intel_dp_joiner_needs_dsc(display, num_joined_pipes) && !dsc) { 1559 *status = MODE_CLOCK_HIGH; 1560 return 0; 1561 } 1562 1563 if (mode_rate > max_rate && !dsc) { 1564 *status = MODE_CLOCK_HIGH; 1565 return 0; 1566 } 1567 1568 *status = intel_mode_valid_max_plane_size(display, mode, num_joined_pipes); 1569 return 0; 1570 } 1571 1572 static struct drm_encoder * 1573 mst_connector_atomic_best_encoder(struct drm_connector *_connector, 1574 struct drm_atomic_state *state) 1575 { 1576 struct intel_connector *connector = to_intel_connector(_connector); 1577 struct drm_connector_state *connector_state = 1578 drm_atomic_get_new_connector_state(state, &connector->base); 1579 struct intel_dp *intel_dp = connector->mst.dp; 1580 struct intel_crtc *crtc = to_intel_crtc(connector_state->crtc); 1581 1582 return &intel_dp->mst.stream_encoders[crtc->pipe]->base.base; 1583 } 1584 1585 static int 1586 mst_connector_detect_ctx(struct drm_connector *_connector, 1587 struct drm_modeset_acquire_ctx *ctx, bool force) 1588 { 1589 struct intel_connector *connector = to_intel_connector(_connector); 1590 struct intel_display *display = to_intel_display(connector); 1591 struct intel_dp *intel_dp = connector->mst.dp; 1592 1593 if (!intel_display_device_enabled(display)) 1594 return connector_status_disconnected; 1595 1596 if (drm_connector_is_unregistered(&connector->base)) 1597 return connector_status_disconnected; 1598 1599 if (!intel_display_driver_check_access(display)) 1600 return connector->base.status; 1601 1602 intel_dp_flush_connector_commits(connector); 1603 1604 return drm_dp_mst_detect_port(&connector->base, ctx, &intel_dp->mst.mgr, 1605 connector->mst.port); 1606 } 1607 1608 static const struct drm_connector_helper_funcs mst_connector_helper_funcs = { 1609 .get_modes = mst_connector_get_modes, 1610 .mode_valid_ctx = mst_connector_mode_valid_ctx, 1611 .atomic_best_encoder = mst_connector_atomic_best_encoder, 1612 .atomic_check = mst_connector_atomic_check, 1613 .detect_ctx = mst_connector_detect_ctx, 1614 }; 1615 1616 static void mst_stream_encoder_destroy(struct drm_encoder *encoder) 1617 { 1618 struct intel_dp_mst_encoder *intel_mst = enc_to_mst(to_intel_encoder(encoder)); 1619 1620 drm_encoder_cleanup(encoder); 1621 kfree(intel_mst); 1622 } 1623 1624 static const struct drm_encoder_funcs mst_stream_encoder_funcs = { 1625 .destroy = mst_stream_encoder_destroy, 1626 }; 1627 1628 static bool mst_connector_get_hw_state(struct intel_connector *connector) 1629 { 1630 /* This is the MST stream encoder set in ->pre_enable, if any */ 1631 struct intel_encoder *encoder = intel_attached_encoder(connector); 1632 enum pipe pipe; 1633 1634 if (!encoder || !connector->base.state->crtc) 1635 return false; 1636 1637 return encoder->get_hw_state(encoder, &pipe); 1638 } 1639 1640 static int mst_topology_add_connector_properties(struct intel_dp *intel_dp, 1641 struct drm_connector *_connector, 1642 const char *pathprop) 1643 { 1644 struct intel_display *display = to_intel_display(intel_dp); 1645 struct intel_connector *connector = to_intel_connector(_connector); 1646 1647 drm_object_attach_property(&connector->base.base, 1648 display->drm->mode_config.path_property, 0); 1649 drm_object_attach_property(&connector->base.base, 1650 display->drm->mode_config.tile_property, 0); 1651 1652 intel_attach_force_audio_property(&connector->base); 1653 intel_attach_broadcast_rgb_property(&connector->base); 1654 1655 /* 1656 * Reuse the prop from the SST connector because we're 1657 * not allowed to create new props after device registration. 1658 */ 1659 connector->base.max_bpc_property = 1660 intel_dp->attached_connector->base.max_bpc_property; 1661 if (connector->base.max_bpc_property) 1662 drm_connector_attach_max_bpc_property(&connector->base, 6, 12); 1663 1664 return drm_connector_set_path_property(&connector->base, pathprop); 1665 } 1666 1667 static void 1668 intel_dp_mst_read_decompression_port_dsc_caps(struct intel_dp *intel_dp, 1669 struct intel_connector *connector) 1670 { 1671 u8 dpcd_caps[DP_RECEIVER_CAP_SIZE]; 1672 1673 if (!connector->dp.dsc_decompression_aux) 1674 return; 1675 1676 if (drm_dp_read_dpcd_caps(connector->dp.dsc_decompression_aux, dpcd_caps) < 0) 1677 return; 1678 1679 intel_dp_get_dsc_sink_cap(dpcd_caps[DP_DPCD_REV], connector); 1680 } 1681 1682 static bool detect_dsc_hblank_expansion_quirk(const struct intel_connector *connector) 1683 { 1684 struct intel_display *display = to_intel_display(connector); 1685 struct drm_dp_aux *aux = connector->dp.dsc_decompression_aux; 1686 struct drm_dp_desc desc; 1687 u8 dpcd[DP_RECEIVER_CAP_SIZE]; 1688 1689 if (!aux) 1690 return false; 1691 1692 /* 1693 * A logical port's OUI (at least for affected sinks) is all 0, so 1694 * instead of that the parent port's OUI is used for identification. 1695 */ 1696 if (drm_dp_mst_port_is_logical(connector->mst.port)) { 1697 aux = drm_dp_mst_aux_for_parent(connector->mst.port); 1698 if (!aux) 1699 aux = &connector->mst.dp->aux; 1700 } 1701 1702 if (drm_dp_read_dpcd_caps(aux, dpcd) < 0) 1703 return false; 1704 1705 if (drm_dp_read_desc(aux, &desc, drm_dp_is_branch(dpcd)) < 0) 1706 return false; 1707 1708 if (!drm_dp_has_quirk(&desc, 1709 DP_DPCD_QUIRK_HBLANK_EXPANSION_REQUIRES_DSC)) 1710 return false; 1711 1712 /* 1713 * UHBR (MST sink) devices requiring this quirk don't advertise the 1714 * HBLANK expansion support. Presuming that they perform HBLANK 1715 * expansion internally, or are affected by this issue on modes with a 1716 * short HBLANK for other reasons. 1717 */ 1718 if (!drm_dp_128b132b_supported(dpcd) && 1719 !(dpcd[DP_RECEIVE_PORT_0_CAP_0] & DP_HBLANK_EXPANSION_CAPABLE)) 1720 return false; 1721 1722 drm_dbg_kms(display->drm, 1723 "[CONNECTOR:%d:%s] DSC HBLANK expansion quirk detected\n", 1724 connector->base.base.id, connector->base.name); 1725 1726 return true; 1727 } 1728 1729 static struct drm_connector * 1730 mst_topology_add_connector(struct drm_dp_mst_topology_mgr *mgr, 1731 struct drm_dp_mst_port *port, 1732 const char *pathprop) 1733 { 1734 struct intel_dp *intel_dp = container_of(mgr, struct intel_dp, mst.mgr); 1735 struct intel_display *display = to_intel_display(intel_dp); 1736 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp); 1737 struct intel_connector *connector; 1738 enum pipe pipe; 1739 int ret; 1740 1741 connector = intel_connector_alloc(); 1742 if (!connector) 1743 return NULL; 1744 1745 connector->get_hw_state = mst_connector_get_hw_state; 1746 connector->sync_state = intel_dp_connector_sync_state; 1747 connector->mst.dp = intel_dp; 1748 connector->mst.port = port; 1749 drm_dp_mst_get_port_malloc(port); 1750 1751 ret = drm_connector_dynamic_init(display->drm, &connector->base, &mst_connector_funcs, 1752 DRM_MODE_CONNECTOR_DisplayPort, NULL); 1753 if (ret) 1754 goto err_put_port; 1755 1756 connector->dp.dsc_decompression_aux = drm_dp_mst_dsc_aux_for_port(port); 1757 intel_dp_mst_read_decompression_port_dsc_caps(intel_dp, connector); 1758 connector->dp.dsc_hblank_expansion_quirk = 1759 detect_dsc_hblank_expansion_quirk(connector); 1760 1761 drm_connector_helper_add(&connector->base, &mst_connector_helper_funcs); 1762 1763 for_each_pipe(display, pipe) { 1764 struct drm_encoder *enc = 1765 &intel_dp->mst.stream_encoders[pipe]->base.base; 1766 1767 ret = drm_connector_attach_encoder(&connector->base, enc); 1768 if (ret) 1769 goto err_cleanup_connector; 1770 } 1771 1772 ret = mst_topology_add_connector_properties(intel_dp, &connector->base, pathprop); 1773 if (ret) 1774 goto err_cleanup_connector; 1775 1776 ret = intel_dp_hdcp_init(dig_port, connector); 1777 if (ret) 1778 drm_dbg_kms(display->drm, "[%s:%d] HDCP MST init failed, skipping.\n", 1779 connector->base.name, connector->base.base.id); 1780 1781 return &connector->base; 1782 1783 err_cleanup_connector: 1784 drm_connector_cleanup(&connector->base); 1785 err_put_port: 1786 drm_dp_mst_put_port_malloc(port); 1787 intel_connector_free(connector); 1788 1789 return NULL; 1790 } 1791 1792 static void 1793 mst_topology_poll_hpd_irq(struct drm_dp_mst_topology_mgr *mgr) 1794 { 1795 struct intel_dp *intel_dp = container_of(mgr, struct intel_dp, mst.mgr); 1796 1797 intel_hpd_trigger_irq(dp_to_dig_port(intel_dp)); 1798 } 1799 1800 static const struct drm_dp_mst_topology_cbs mst_topology_cbs = { 1801 .add_connector = mst_topology_add_connector, 1802 .poll_hpd_irq = mst_topology_poll_hpd_irq, 1803 }; 1804 1805 /* Create a fake encoder for an individual MST stream */ 1806 static struct intel_dp_mst_encoder * 1807 mst_stream_encoder_create(struct intel_digital_port *dig_port, enum pipe pipe) 1808 { 1809 struct intel_display *display = to_intel_display(dig_port); 1810 struct intel_encoder *primary_encoder = &dig_port->base; 1811 struct intel_dp_mst_encoder *intel_mst; 1812 struct intel_encoder *encoder; 1813 1814 intel_mst = kzalloc(sizeof(*intel_mst), GFP_KERNEL); 1815 1816 if (!intel_mst) 1817 return NULL; 1818 1819 intel_mst->pipe = pipe; 1820 encoder = &intel_mst->base; 1821 intel_mst->primary = dig_port; 1822 1823 drm_encoder_init(display->drm, &encoder->base, &mst_stream_encoder_funcs, 1824 DRM_MODE_ENCODER_DPMST, "DP-MST %c", pipe_name(pipe)); 1825 1826 encoder->type = INTEL_OUTPUT_DP_MST; 1827 encoder->power_domain = primary_encoder->power_domain; 1828 encoder->port = primary_encoder->port; 1829 encoder->cloneable = 0; 1830 /* 1831 * This is wrong, but broken userspace uses the intersection 1832 * of possible_crtcs of all the encoders of a given connector 1833 * to figure out which crtcs can drive said connector. What 1834 * should be used instead is the union of possible_crtcs. 1835 * To keep such userspace functioning we must misconfigure 1836 * this to make sure the intersection is not empty :( 1837 */ 1838 encoder->pipe_mask = ~0; 1839 1840 encoder->compute_config = mst_stream_compute_config; 1841 encoder->compute_config_late = mst_stream_compute_config_late; 1842 encoder->disable = mst_stream_disable; 1843 encoder->post_disable = mst_stream_post_disable; 1844 encoder->post_pll_disable = mst_stream_post_pll_disable; 1845 encoder->update_pipe = intel_ddi_update_pipe; 1846 encoder->pre_pll_enable = mst_stream_pre_pll_enable; 1847 encoder->pre_enable = mst_stream_pre_enable; 1848 encoder->enable = mst_stream_enable; 1849 encoder->audio_enable = intel_audio_codec_enable; 1850 encoder->audio_disable = intel_audio_codec_disable; 1851 encoder->get_hw_state = mst_stream_get_hw_state; 1852 encoder->get_config = mst_stream_get_config; 1853 encoder->initial_fastset_check = mst_stream_initial_fastset_check; 1854 1855 return intel_mst; 1856 1857 } 1858 1859 /* Create the fake encoders for MST streams */ 1860 static bool 1861 mst_stream_encoders_create(struct intel_digital_port *dig_port) 1862 { 1863 struct intel_display *display = to_intel_display(dig_port); 1864 struct intel_dp *intel_dp = &dig_port->dp; 1865 enum pipe pipe; 1866 1867 for_each_pipe(display, pipe) 1868 intel_dp->mst.stream_encoders[pipe] = mst_stream_encoder_create(dig_port, pipe); 1869 return true; 1870 } 1871 1872 int 1873 intel_dp_mst_encoder_active_links(struct intel_digital_port *dig_port) 1874 { 1875 return dig_port->dp.mst.active_links; 1876 } 1877 1878 int 1879 intel_dp_mst_encoder_init(struct intel_digital_port *dig_port, int conn_base_id) 1880 { 1881 struct intel_display *display = to_intel_display(dig_port); 1882 struct intel_dp *intel_dp = &dig_port->dp; 1883 enum port port = dig_port->base.port; 1884 int ret; 1885 1886 if (!HAS_DP_MST(display) || intel_dp_is_edp(intel_dp)) 1887 return 0; 1888 1889 if (DISPLAY_VER(display) < 12 && port == PORT_A) 1890 return 0; 1891 1892 if (DISPLAY_VER(display) < 11 && port == PORT_E) 1893 return 0; 1894 1895 intel_dp->mst.mgr.cbs = &mst_topology_cbs; 1896 1897 /* create encoders */ 1898 mst_stream_encoders_create(dig_port); 1899 ret = drm_dp_mst_topology_mgr_init(&intel_dp->mst.mgr, display->drm, 1900 &intel_dp->aux, 16, 1901 INTEL_NUM_PIPES(display), conn_base_id); 1902 if (ret) { 1903 intel_dp->mst.mgr.cbs = NULL; 1904 return ret; 1905 } 1906 1907 return 0; 1908 } 1909 1910 bool intel_dp_mst_source_support(struct intel_dp *intel_dp) 1911 { 1912 return intel_dp->mst.mgr.cbs; 1913 } 1914 1915 void 1916 intel_dp_mst_encoder_cleanup(struct intel_digital_port *dig_port) 1917 { 1918 struct intel_dp *intel_dp = &dig_port->dp; 1919 1920 if (!intel_dp_mst_source_support(intel_dp)) 1921 return; 1922 1923 drm_dp_mst_topology_mgr_destroy(&intel_dp->mst.mgr); 1924 /* encoders will get killed by normal cleanup */ 1925 1926 intel_dp->mst.mgr.cbs = NULL; 1927 } 1928 1929 bool intel_dp_mst_is_master_trans(const struct intel_crtc_state *crtc_state) 1930 { 1931 return crtc_state->mst_master_transcoder == crtc_state->cpu_transcoder; 1932 } 1933 1934 bool intel_dp_mst_is_slave_trans(const struct intel_crtc_state *crtc_state) 1935 { 1936 return crtc_state->mst_master_transcoder != INVALID_TRANSCODER && 1937 crtc_state->mst_master_transcoder != crtc_state->cpu_transcoder; 1938 } 1939 1940 /** 1941 * intel_dp_mst_add_topology_state_for_connector - add MST topology state for a connector 1942 * @state: atomic state 1943 * @connector: connector to add the state for 1944 * @crtc: the CRTC @connector is attached to 1945 * 1946 * Add the MST topology state for @connector to @state. 1947 * 1948 * Returns 0 on success, negative error code on failure. 1949 */ 1950 static int 1951 intel_dp_mst_add_topology_state_for_connector(struct intel_atomic_state *state, 1952 struct intel_connector *connector, 1953 struct intel_crtc *crtc) 1954 { 1955 struct drm_dp_mst_topology_state *mst_state; 1956 1957 if (!connector->mst.dp) 1958 return 0; 1959 1960 mst_state = drm_atomic_get_mst_topology_state(&state->base, 1961 &connector->mst.dp->mst.mgr); 1962 if (IS_ERR(mst_state)) 1963 return PTR_ERR(mst_state); 1964 1965 mst_state->pending_crtc_mask |= drm_crtc_mask(&crtc->base); 1966 1967 return 0; 1968 } 1969 1970 /** 1971 * intel_dp_mst_add_topology_state_for_crtc - add MST topology state for a CRTC 1972 * @state: atomic state 1973 * @crtc: CRTC to add the state for 1974 * 1975 * Add the MST topology state for @crtc to @state. 1976 * 1977 * Returns 0 on success, negative error code on failure. 1978 */ 1979 int intel_dp_mst_add_topology_state_for_crtc(struct intel_atomic_state *state, 1980 struct intel_crtc *crtc) 1981 { 1982 struct drm_connector *_connector; 1983 struct drm_connector_state *conn_state; 1984 int i; 1985 1986 for_each_new_connector_in_state(&state->base, _connector, conn_state, i) { 1987 struct intel_connector *connector = to_intel_connector(_connector); 1988 int ret; 1989 1990 if (conn_state->crtc != &crtc->base) 1991 continue; 1992 1993 ret = intel_dp_mst_add_topology_state_for_connector(state, connector, crtc); 1994 if (ret) 1995 return ret; 1996 } 1997 1998 return 0; 1999 } 2000 2001 static struct intel_connector * 2002 get_connector_in_state_for_crtc(struct intel_atomic_state *state, 2003 const struct intel_crtc *crtc) 2004 { 2005 struct drm_connector_state *old_conn_state; 2006 struct drm_connector_state *new_conn_state; 2007 struct drm_connector *_connector; 2008 int i; 2009 2010 for_each_oldnew_connector_in_state(&state->base, _connector, 2011 old_conn_state, new_conn_state, i) { 2012 struct intel_connector *connector = 2013 to_intel_connector(_connector); 2014 2015 if (old_conn_state->crtc == &crtc->base || 2016 new_conn_state->crtc == &crtc->base) 2017 return connector; 2018 } 2019 2020 return NULL; 2021 } 2022 2023 /** 2024 * intel_dp_mst_crtc_needs_modeset - check if changes in topology need to modeset the given CRTC 2025 * @state: atomic state 2026 * @crtc: CRTC for which to check the modeset requirement 2027 * 2028 * Check if any change in a MST topology requires a forced modeset on @crtc in 2029 * this topology. One such change is enabling/disabling the DSC decompression 2030 * state in the first branch device's UFP DPCD as required by one CRTC, while 2031 * the other @crtc in the same topology is still active, requiring a full modeset 2032 * on @crtc. 2033 */ 2034 bool intel_dp_mst_crtc_needs_modeset(struct intel_atomic_state *state, 2035 struct intel_crtc *crtc) 2036 { 2037 const struct intel_connector *crtc_connector; 2038 const struct drm_connector_state *conn_state; 2039 const struct drm_connector *_connector; 2040 int i; 2041 2042 if (!intel_crtc_has_type(intel_atomic_get_new_crtc_state(state, crtc), 2043 INTEL_OUTPUT_DP_MST)) 2044 return false; 2045 2046 crtc_connector = get_connector_in_state_for_crtc(state, crtc); 2047 2048 if (!crtc_connector) 2049 /* None of the connectors in the topology needs modeset */ 2050 return false; 2051 2052 for_each_new_connector_in_state(&state->base, _connector, conn_state, i) { 2053 const struct intel_connector *connector = 2054 to_intel_connector(_connector); 2055 const struct intel_crtc_state *new_crtc_state; 2056 const struct intel_crtc_state *old_crtc_state; 2057 struct intel_crtc *crtc_iter; 2058 2059 if (connector->mst.dp != crtc_connector->mst.dp || 2060 !conn_state->crtc) 2061 continue; 2062 2063 crtc_iter = to_intel_crtc(conn_state->crtc); 2064 2065 new_crtc_state = intel_atomic_get_new_crtc_state(state, crtc_iter); 2066 old_crtc_state = intel_atomic_get_old_crtc_state(state, crtc_iter); 2067 2068 if (!intel_crtc_needs_modeset(new_crtc_state)) 2069 continue; 2070 2071 if (old_crtc_state->dsc.compression_enable == 2072 new_crtc_state->dsc.compression_enable) 2073 continue; 2074 /* 2075 * Toggling the decompression flag because of this stream in 2076 * the first downstream branch device's UFP DPCD may reset the 2077 * whole branch device. To avoid the reset while other streams 2078 * are also active modeset the whole MST topology in this 2079 * case. 2080 */ 2081 if (connector->dp.dsc_decompression_aux == 2082 &connector->mst.dp->aux) 2083 return true; 2084 } 2085 2086 return false; 2087 } 2088 2089 /** 2090 * intel_dp_mst_prepare_probe - Prepare an MST link for topology probing 2091 * @intel_dp: DP port object 2092 * 2093 * Prepare an MST link for topology probing, programming the target 2094 * link parameters to DPCD. This step is a requirement of the enumeration 2095 * of path resources during probing. 2096 */ 2097 void intel_dp_mst_prepare_probe(struct intel_dp *intel_dp) 2098 { 2099 int link_rate = intel_dp_max_link_rate(intel_dp); 2100 int lane_count = intel_dp_max_lane_count(intel_dp); 2101 u8 rate_select; 2102 u8 link_bw; 2103 2104 if (intel_dp->link_trained) 2105 return; 2106 2107 if (intel_mst_probed_link_params_valid(intel_dp, link_rate, lane_count)) 2108 return; 2109 2110 intel_dp_compute_rate(intel_dp, link_rate, &link_bw, &rate_select); 2111 2112 intel_dp_link_training_set_mode(intel_dp, link_rate, false); 2113 intel_dp_link_training_set_bw(intel_dp, link_bw, rate_select, lane_count, 2114 drm_dp_enhanced_frame_cap(intel_dp->dpcd)); 2115 2116 intel_mst_set_probed_link_params(intel_dp, link_rate, lane_count); 2117 } 2118 2119 /* 2120 * intel_dp_mst_verify_dpcd_state - verify the MST SW enabled state wrt. the DPCD 2121 * @intel_dp: DP port object 2122 * 2123 * Verify if @intel_dp's MST enabled SW state matches the corresponding DPCD 2124 * state. A long HPD pulse - not long enough to be detected as a disconnected 2125 * state - could've reset the DPCD state, which requires tearing 2126 * down/recreating the MST topology. 2127 * 2128 * Returns %true if the SW MST enabled and DPCD states match, %false 2129 * otherwise. 2130 */ 2131 bool intel_dp_mst_verify_dpcd_state(struct intel_dp *intel_dp) 2132 { 2133 struct intel_display *display = to_intel_display(intel_dp); 2134 struct intel_connector *connector = intel_dp->attached_connector; 2135 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp); 2136 struct intel_encoder *encoder = &dig_port->base; 2137 int ret; 2138 u8 val; 2139 2140 if (!intel_dp->is_mst) 2141 return true; 2142 2143 ret = drm_dp_dpcd_readb(intel_dp->mst.mgr.aux, DP_MSTM_CTRL, &val); 2144 2145 /* Adjust the expected register value for SST + SideBand. */ 2146 if (ret < 0 || val != (DP_MST_EN | DP_UP_REQ_EN | DP_UPSTREAM_IS_SRC)) { 2147 drm_dbg_kms(display->drm, 2148 "[CONNECTOR:%d:%s][ENCODER:%d:%s] MST mode got reset, removing topology (ret=%d, ctrl=0x%02x)\n", 2149 connector->base.base.id, connector->base.name, 2150 encoder->base.base.id, encoder->base.name, 2151 ret, val); 2152 2153 return false; 2154 } 2155 2156 return true; 2157 } 2158