1 /* 2 * Copyright © 2008 Intel Corporation 3 * 2014 Red Hat Inc. 4 * 5 * Permission is hereby granted, free of charge, to any person obtaining a 6 * copy of this software and associated documentation files (the "Software"), 7 * to deal in the Software without restriction, including without limitation 8 * the rights to use, copy, modify, merge, publish, distribute, sublicense, 9 * and/or sell copies of the Software, and to permit persons to whom the 10 * Software is furnished to do so, subject to the following conditions: 11 * 12 * The above copyright notice and this permission notice (including the next 13 * paragraph) shall be included in all copies or substantial portions of the 14 * Software. 15 * 16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL 19 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING 21 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS 22 * IN THE SOFTWARE. 23 * 24 */ 25 26 #include <linux/log2.h> 27 #include <linux/math.h> 28 29 #include <drm/drm_atomic.h> 30 #include <drm/drm_atomic_helper.h> 31 #include <drm/drm_edid.h> 32 #include <drm/drm_fixed.h> 33 #include <drm/drm_print.h> 34 #include <drm/drm_probe_helper.h> 35 36 #include "intel_atomic.h" 37 #include "intel_audio.h" 38 #include "intel_connector.h" 39 #include "intel_crtc.h" 40 #include "intel_ddi.h" 41 #include "intel_de.h" 42 #include "intel_display_driver.h" 43 #include "intel_display_regs.h" 44 #include "intel_display_types.h" 45 #include "intel_display_utils.h" 46 #include "intel_dp.h" 47 #include "intel_dp_hdcp.h" 48 #include "intel_dp_link_training.h" 49 #include "intel_dp_mst.h" 50 #include "intel_dp_test.h" 51 #include "intel_dp_tunnel.h" 52 #include "intel_dpio_phy.h" 53 #include "intel_hdcp.h" 54 #include "intel_hotplug.h" 55 #include "intel_link_bw.h" 56 #include "intel_pfit.h" 57 #include "intel_psr.h" 58 #include "intel_step.h" 59 #include "intel_vdsc.h" 60 #include "intel_vrr.h" 61 #include "skl_scaler.h" 62 63 /* 64 * DP MST (DisplayPort Multi-Stream Transport) 65 * 66 * MST support on the source depends on the platform and port. DP initialization 67 * sets up MST for each MST capable encoder. This will become the primary 68 * encoder for the port. 69 * 70 * MST initialization of each primary encoder creates MST stream encoders, one 71 * per pipe, and initializes the MST topology manager. The MST stream encoders 72 * are sometimes called "fake encoders", because they're virtual, not 73 * physical. Thus there are (number of MST capable ports) x (number of pipes) 74 * MST stream encoders in total. 75 * 76 * Decision to use MST for a sink happens at detect on the connector attached to 77 * the primary encoder, and this will not change while the sink is connected. We 78 * always use MST when possible, including for SST sinks with sideband messaging 79 * support. 80 * 81 * The connectors for the MST streams are added and removed dynamically by the 82 * topology manager. Their connection status is also determined by the topology 83 * manager. 84 * 85 * On hardware, each transcoder may be associated with a single DDI 86 * port. Multiple transcoders may be associated with the same DDI port only if 87 * the port is in MST mode. 88 * 89 * On TGL+, all the transcoders streaming on the same DDI port will indicate a 90 * primary transcoder; the TGL_DP_TP_CTL and TGL_DP_TP_STATUS registers are 91 * relevant only on the primary transcoder. Prior to that, they are port 92 * registers. 93 */ 94 95 /* From fake MST stream encoder to primary encoder */ 96 static struct intel_encoder *to_primary_encoder(struct intel_encoder *encoder) 97 { 98 struct intel_dp_mst_encoder *intel_mst = enc_to_mst(encoder); 99 struct intel_digital_port *dig_port = intel_mst->primary; 100 101 return &dig_port->base; 102 } 103 104 /* From fake MST stream encoder to primary DP */ 105 static struct intel_dp *to_primary_dp(struct intel_encoder *encoder) 106 { 107 struct intel_dp_mst_encoder *intel_mst = enc_to_mst(encoder); 108 struct intel_digital_port *dig_port = intel_mst->primary; 109 110 return &dig_port->dp; 111 } 112 113 int intel_dp_mst_active_streams(struct intel_dp *intel_dp) 114 { 115 return intel_dp->mst.active_streams; 116 } 117 118 static bool intel_dp_mst_dec_active_streams(struct intel_dp *intel_dp) 119 { 120 struct intel_display *display = to_intel_display(intel_dp); 121 122 drm_dbg_kms(display->drm, "active MST streams %d -> %d\n", 123 intel_dp->mst.active_streams, intel_dp->mst.active_streams - 1); 124 125 if (drm_WARN_ON(display->drm, intel_dp->mst.active_streams == 0)) 126 return true; 127 128 return --intel_dp->mst.active_streams == 0; 129 } 130 131 static bool intel_dp_mst_inc_active_streams(struct intel_dp *intel_dp) 132 { 133 struct intel_display *display = to_intel_display(intel_dp); 134 135 drm_dbg_kms(display->drm, "active MST streams %d -> %d\n", 136 intel_dp->mst.active_streams, intel_dp->mst.active_streams + 1); 137 138 return intel_dp->mst.active_streams++ == 0; 139 } 140 141 /* TODO: return a bpp_x16 value */ 142 static int intel_dp_mst_max_dpt_bpp(const struct intel_crtc_state *crtc_state, 143 bool dsc) 144 { 145 struct intel_display *display = to_intel_display(crtc_state); 146 const struct drm_display_mode *adjusted_mode = 147 &crtc_state->hw.adjusted_mode; 148 149 if (!intel_dp_is_uhbr(crtc_state) || DISPLAY_VER(display) >= 20 || !dsc) 150 return 0; 151 152 /* 153 * DSC->DPT interface width: 154 * ICL-MTL: 72 bits (each branch has 72 bits, only left branch is used) 155 * LNL+: 144 bits (not a bottleneck in any config) 156 * 157 * Bspec/49259 suggests that the FEC overhead needs to be 158 * applied here, though HW people claim that neither this FEC 159 * or any other overhead is applicable here (that is the actual 160 * available_bw is just symbol_clock * 72). However based on 161 * testing on MTL-P the 162 * - DELL U3224KBA display 163 * - Unigraf UCD-500 CTS test sink 164 * devices the 165 * - 5120x2880/995.59Mhz 166 * - 6016x3384/1357.23Mhz 167 * - 6144x3456/1413.39Mhz 168 * modes (all the ones having a DPT limit on the above devices), 169 * both the channel coding efficiency and an additional 3% 170 * overhead needs to be accounted for. 171 */ 172 return div64_u64(mul_u32_u32(intel_dp_link_symbol_clock(crtc_state->port_clock) * 72, 173 drm_dp_bw_channel_coding_efficiency(true)), 174 mul_u32_u32(adjusted_mode->crtc_clock, 1030000)); 175 } 176 177 static int intel_dp_mst_bw_overhead(const struct intel_crtc_state *crtc_state, 178 bool ssc, int dsc_slice_count, int bpp_x16) 179 { 180 const struct drm_display_mode *adjusted_mode = 181 &crtc_state->hw.adjusted_mode; 182 unsigned long flags = DRM_DP_BW_OVERHEAD_MST; 183 int overhead; 184 185 flags |= intel_dp_is_uhbr(crtc_state) ? DRM_DP_BW_OVERHEAD_UHBR : 0; 186 flags |= ssc ? DRM_DP_BW_OVERHEAD_SSC_REF_CLK : 0; 187 flags |= crtc_state->fec_enable ? DRM_DP_BW_OVERHEAD_FEC : 0; 188 189 if (dsc_slice_count) 190 flags |= DRM_DP_BW_OVERHEAD_DSC; 191 192 overhead = drm_dp_bw_overhead(crtc_state->lane_count, 193 adjusted_mode->hdisplay, 194 dsc_slice_count, 195 bpp_x16, 196 flags); 197 198 /* 199 * TODO: clarify whether a minimum required by the fixed FEC overhead 200 * in the bspec audio programming sequence is required here. 201 */ 202 return max(overhead, intel_dp_bw_fec_overhead(crtc_state->fec_enable)); 203 } 204 205 static void intel_dp_mst_compute_m_n(const struct intel_crtc_state *crtc_state, 206 int overhead, 207 int bpp_x16, 208 struct intel_link_m_n *m_n) 209 { 210 const struct drm_display_mode *adjusted_mode = 211 &crtc_state->hw.adjusted_mode; 212 213 /* TODO: Check WA 14013163432 to set data M/N for full BW utilization. */ 214 intel_link_compute_m_n(bpp_x16, crtc_state->lane_count, 215 adjusted_mode->crtc_clock, 216 crtc_state->port_clock, 217 overhead, 218 m_n); 219 220 m_n->tu = DIV_ROUND_UP_ULL(mul_u32_u32(m_n->data_m, 64), m_n->data_n); 221 } 222 223 static int intel_dp_mst_calc_pbn(int pixel_clock, int bpp_x16, int bw_overhead) 224 { 225 int effective_data_rate = 226 intel_dp_effective_data_rate(pixel_clock, bpp_x16, bw_overhead); 227 228 /* 229 * TODO: Use drm_dp_calc_pbn_mode() instead, once it's converted 230 * to calculate PBN with the BW overhead passed to it. 231 */ 232 return DIV_ROUND_UP(effective_data_rate * 64, 54 * 1000); 233 } 234 235 static int intel_dp_mst_dsc_get_slice_count(const struct intel_connector *connector, 236 const struct intel_crtc_state *crtc_state) 237 { 238 const struct drm_display_mode *adjusted_mode = 239 &crtc_state->hw.adjusted_mode; 240 int num_joined_pipes = intel_crtc_num_joined_pipes(crtc_state); 241 242 return intel_dp_dsc_get_slice_count(connector, 243 adjusted_mode->clock, 244 adjusted_mode->hdisplay, 245 num_joined_pipes); 246 } 247 248 static void mst_stream_update_slots(const struct intel_crtc_state *crtc_state, 249 struct drm_dp_mst_topology_state *topology_state) 250 { 251 u8 link_coding_cap = intel_dp_is_uhbr(crtc_state) ? 252 DP_CAP_ANSI_128B132B : DP_CAP_ANSI_8B10B; 253 254 drm_dp_mst_update_slots(topology_state, link_coding_cap); 255 } 256 257 int intel_dp_mtp_tu_compute_config(struct intel_dp *intel_dp, 258 struct intel_crtc_state *crtc_state, 259 struct drm_connector_state *conn_state, 260 int min_bpp_x16, int max_bpp_x16, int bpp_step_x16, bool dsc) 261 { 262 struct intel_display *display = to_intel_display(intel_dp); 263 struct drm_atomic_state *state = crtc_state->uapi.state; 264 struct drm_dp_mst_topology_state *mst_state = NULL; 265 struct intel_connector *connector = 266 to_intel_connector(conn_state->connector); 267 const struct drm_display_mode *adjusted_mode = 268 &crtc_state->hw.adjusted_mode; 269 bool is_mst = intel_crtc_has_type(crtc_state, INTEL_OUTPUT_DP_MST); 270 int bpp_x16, slots = -EINVAL; 271 int dsc_slice_count = 0; 272 int max_dpt_bpp_x16; 273 274 /* shouldn't happen, sanity check */ 275 drm_WARN_ON(display->drm, !dsc && (fxp_q4_to_frac(min_bpp_x16) || 276 fxp_q4_to_frac(max_bpp_x16) || 277 fxp_q4_to_frac(bpp_step_x16))); 278 279 if (!bpp_step_x16) { 280 /* Allow using zero step only to indicate single try for a given bpp. */ 281 drm_WARN_ON(display->drm, min_bpp_x16 != max_bpp_x16); 282 bpp_step_x16 = 1; 283 } 284 285 if (is_mst) { 286 mst_state = drm_atomic_get_mst_topology_state(state, &intel_dp->mst.mgr); 287 if (IS_ERR(mst_state)) 288 return PTR_ERR(mst_state); 289 290 mst_state->pbn_div = drm_dp_get_vc_payload_bw(crtc_state->port_clock, 291 crtc_state->lane_count); 292 293 mst_stream_update_slots(crtc_state, mst_state); 294 } 295 296 /* 297 * NOTE: The following must reset crtc_state->fec_enable for UHBR/DSC 298 * after it was set by intel_dp_dsc_compute_config() -> 299 * intel_dp_needs_8b10b_fec(). 300 */ 301 crtc_state->fec_enable = intel_dp_needs_8b10b_fec(crtc_state, dsc); 302 /* 303 * If FEC gets enabled only because of another compressed stream, FEC 304 * may not be supported for this uncompressed stream on the whole link 305 * path until the sink DPRX. In this case a downstream branch device 306 * will disable FEC for the uncompressed stream as expected and so the 307 * FEC support doesn't need to be checked for this uncompressed stream. 308 */ 309 if (crtc_state->fec_enable && dsc && 310 !intel_dp_supports_fec(intel_dp, connector, crtc_state)) 311 return -EINVAL; 312 313 max_dpt_bpp_x16 = fxp_q4_from_int(intel_dp_mst_max_dpt_bpp(crtc_state, dsc)); 314 if (max_dpt_bpp_x16 && max_bpp_x16 > max_dpt_bpp_x16) { 315 drm_dbg_kms(display->drm, "Limiting bpp to max DPT bpp (" FXP_Q4_FMT " -> " FXP_Q4_FMT ")\n", 316 FXP_Q4_ARGS(max_bpp_x16), FXP_Q4_ARGS(max_dpt_bpp_x16)); 317 max_bpp_x16 = max_dpt_bpp_x16; 318 } 319 320 drm_dbg_kms(display->drm, "Looking for slots in range min bpp " FXP_Q4_FMT " max bpp " FXP_Q4_FMT "\n", 321 FXP_Q4_ARGS(min_bpp_x16), FXP_Q4_ARGS(max_bpp_x16)); 322 323 if (dsc) { 324 dsc_slice_count = intel_dp_mst_dsc_get_slice_count(connector, crtc_state); 325 if (!dsc_slice_count) { 326 drm_dbg_kms(display->drm, "Can't get valid DSC slice count\n"); 327 328 return -ENOSPC; 329 } 330 } 331 332 drm_WARN_ON(display->drm, min_bpp_x16 % bpp_step_x16 || max_bpp_x16 % bpp_step_x16); 333 334 for (bpp_x16 = max_bpp_x16; bpp_x16 >= min_bpp_x16; bpp_x16 -= bpp_step_x16) { 335 int local_bw_overhead; 336 int link_bpp_x16; 337 338 drm_dbg_kms(display->drm, "Trying bpp " FXP_Q4_FMT "\n", FXP_Q4_ARGS(bpp_x16)); 339 340 if (dsc && !intel_dp_dsc_valid_compressed_bpp(intel_dp, bpp_x16)) { 341 /* SST must have validated the single bpp tried here already earlier. */ 342 drm_WARN_ON(display->drm, !is_mst); 343 continue; 344 } 345 346 link_bpp_x16 = dsc ? bpp_x16 : 347 fxp_q4_from_int(intel_dp_output_bpp(crtc_state->output_format, 348 fxp_q4_to_int(bpp_x16))); 349 350 local_bw_overhead = intel_dp_mst_bw_overhead(crtc_state, 351 false, dsc_slice_count, link_bpp_x16); 352 353 intel_dp_mst_compute_m_n(crtc_state, 354 local_bw_overhead, 355 link_bpp_x16, 356 &crtc_state->dp_m_n); 357 358 if (is_mst) { 359 int remote_bw_overhead; 360 int remote_tu; 361 fixed20_12 pbn; 362 363 remote_bw_overhead = intel_dp_mst_bw_overhead(crtc_state, 364 true, dsc_slice_count, link_bpp_x16); 365 366 /* 367 * The TU size programmed to the HW determines which slots in 368 * an MTP frame are used for this stream, which needs to match 369 * the payload size programmed to the first downstream branch 370 * device's payload table. 371 * 372 * Note that atm the payload's PBN value DRM core sends via 373 * the ALLOCATE_PAYLOAD side-band message matches the payload 374 * size (which it calculates from the PBN value) it programs 375 * to the first branch device's payload table. The allocation 376 * in the payload table could be reduced though (to 377 * crtc_state->dp_m_n.tu), provided that the driver doesn't 378 * enable SSC on the corresponding link. 379 */ 380 pbn.full = dfixed_const(intel_dp_mst_calc_pbn(adjusted_mode->crtc_clock, 381 link_bpp_x16, 382 remote_bw_overhead)); 383 remote_tu = DIV_ROUND_UP(pbn.full, mst_state->pbn_div.full); 384 385 /* 386 * Aligning the TUs ensures that symbols consisting of multiple 387 * (4) symbol cycles don't get split between two consecutive 388 * MTPs, as required by Bspec. 389 * TODO: remove the alignment restriction for 128b/132b links 390 * on some platforms, where Bspec allows this. 391 */ 392 remote_tu = ALIGN(remote_tu, 4 / crtc_state->lane_count); 393 394 /* 395 * Also align PBNs accordingly, since MST core will derive its 396 * own copy of TU from the PBN in drm_dp_atomic_find_time_slots(). 397 * The above comment about the difference between the PBN 398 * allocated for the whole path and the TUs allocated for the 399 * first branch device's link also applies here. 400 */ 401 pbn.full = remote_tu * mst_state->pbn_div.full; 402 403 drm_WARN_ON(display->drm, remote_tu < crtc_state->dp_m_n.tu); 404 crtc_state->dp_m_n.tu = remote_tu; 405 406 slots = drm_dp_atomic_find_time_slots(state, &intel_dp->mst.mgr, 407 connector->mst.port, 408 dfixed_trunc(pbn)); 409 410 /* TODO: Check this already in drm_dp_atomic_find_time_slots(). */ 411 if (slots > mst_state->total_avail_slots) 412 slots = -EINVAL; 413 } else { 414 /* Same as above for remote_tu */ 415 crtc_state->dp_m_n.tu = ALIGN(crtc_state->dp_m_n.tu, 416 4 / crtc_state->lane_count); 417 418 if (crtc_state->dp_m_n.tu <= 64) 419 slots = crtc_state->dp_m_n.tu; 420 else 421 slots = -EINVAL; 422 } 423 424 if (slots == -EDEADLK) 425 return slots; 426 427 if (slots >= 0) { 428 drm_WARN_ON(display->drm, slots != crtc_state->dp_m_n.tu); 429 430 break; 431 } 432 } 433 434 if (slots < 0) { 435 drm_dbg_kms(display->drm, "failed finding vcpi slots:%d\n", 436 slots); 437 return slots; 438 } 439 440 if (!dsc) 441 crtc_state->pipe_bpp = fxp_q4_to_int(bpp_x16); 442 else 443 crtc_state->dsc.compressed_bpp_x16 = bpp_x16; 444 445 drm_dbg_kms(display->drm, "Got %d slots for pipe bpp " FXP_Q4_FMT " dsc %d\n", 446 slots, FXP_Q4_ARGS(bpp_x16), dsc); 447 448 return 0; 449 } 450 451 static int mst_stream_compute_link_config(struct intel_dp *intel_dp, 452 struct intel_crtc_state *crtc_state, 453 struct drm_connector_state *conn_state, 454 const struct link_config_limits *limits) 455 { 456 crtc_state->lane_count = limits->max_lane_count; 457 crtc_state->port_clock = limits->max_rate; 458 459 /* 460 * FIXME: allocate the BW according to link_bpp, which in the case of 461 * YUV420 is only half of the pipe bpp value. 462 */ 463 return intel_dp_mtp_tu_compute_config(intel_dp, crtc_state, conn_state, 464 limits->link.min_bpp_x16, 465 limits->link.max_bpp_x16, 466 fxp_q4_from_int(2 * 3), false); 467 } 468 469 static int mst_stream_dsc_compute_link_config(struct intel_dp *intel_dp, 470 struct intel_crtc_state *crtc_state, 471 struct drm_connector_state *conn_state, 472 const struct link_config_limits *limits) 473 { 474 struct intel_display *display = to_intel_display(intel_dp); 475 struct intel_connector *connector = to_intel_connector(conn_state->connector); 476 int num_bpc; 477 u8 dsc_bpc[3] = {}; 478 int min_bpp, max_bpp, sink_min_bpp, sink_max_bpp; 479 int min_compressed_bpp_x16, max_compressed_bpp_x16; 480 int bpp_step_x16; 481 482 max_bpp = limits->pipe.max_bpp; 483 min_bpp = limits->pipe.min_bpp; 484 485 num_bpc = drm_dp_dsc_sink_supported_input_bpcs(connector->dp.dsc_dpcd, 486 dsc_bpc); 487 488 drm_dbg_kms(display->drm, "DSC Source supported min bpp %d max bpp %d\n", 489 min_bpp, max_bpp); 490 491 sink_min_bpp = min_array(dsc_bpc, num_bpc) * 3; 492 sink_max_bpp = max_array(dsc_bpc, num_bpc) * 3; 493 494 drm_dbg_kms(display->drm, "DSC Sink supported min bpp %d max bpp %d\n", 495 sink_min_bpp, sink_max_bpp); 496 497 if (min_bpp < sink_min_bpp) 498 min_bpp = sink_min_bpp; 499 500 if (max_bpp > sink_max_bpp) 501 max_bpp = sink_max_bpp; 502 503 crtc_state->pipe_bpp = max_bpp; 504 505 min_compressed_bpp_x16 = limits->link.min_bpp_x16; 506 max_compressed_bpp_x16 = limits->link.max_bpp_x16; 507 508 drm_dbg_kms(display->drm, 509 "DSC Sink supported compressed min bpp " FXP_Q4_FMT " compressed max bpp " FXP_Q4_FMT "\n", 510 FXP_Q4_ARGS(min_compressed_bpp_x16), FXP_Q4_ARGS(max_compressed_bpp_x16)); 511 512 bpp_step_x16 = intel_dp_dsc_bpp_step_x16(connector); 513 514 max_compressed_bpp_x16 = min(max_compressed_bpp_x16, fxp_q4_from_int(crtc_state->pipe_bpp) - bpp_step_x16); 515 516 drm_WARN_ON(display->drm, !is_power_of_2(bpp_step_x16)); 517 min_compressed_bpp_x16 = round_up(min_compressed_bpp_x16, bpp_step_x16); 518 max_compressed_bpp_x16 = round_down(max_compressed_bpp_x16, bpp_step_x16); 519 520 crtc_state->lane_count = limits->max_lane_count; 521 crtc_state->port_clock = limits->max_rate; 522 523 return intel_dp_mtp_tu_compute_config(intel_dp, crtc_state, conn_state, 524 min_compressed_bpp_x16, 525 max_compressed_bpp_x16, 526 bpp_step_x16, true); 527 } 528 529 static int mode_hblank_period_ns(const struct drm_display_mode *mode) 530 { 531 return DIV_ROUND_CLOSEST_ULL(mul_u32_u32(mode->htotal - mode->hdisplay, 532 NSEC_PER_SEC / 1000), 533 mode->crtc_clock); 534 } 535 536 static bool 537 hblank_expansion_quirk_needs_dsc(const struct intel_connector *connector, 538 const struct intel_crtc_state *crtc_state, 539 const struct link_config_limits *limits) 540 { 541 const struct drm_display_mode *adjusted_mode = 542 &crtc_state->hw.adjusted_mode; 543 bool is_uhbr_sink = connector->mst.dp && 544 drm_dp_128b132b_supported(connector->mst.dp->dpcd); 545 int hblank_limit = is_uhbr_sink ? 500 : 300; 546 547 if (!connector->dp.dsc_hblank_expansion_quirk) 548 return false; 549 550 if (is_uhbr_sink && !drm_dp_is_uhbr_rate(limits->max_rate)) 551 return false; 552 553 if (mode_hblank_period_ns(adjusted_mode) > hblank_limit) 554 return false; 555 556 if (!intel_dp_mst_dsc_get_slice_count(connector, crtc_state)) 557 return false; 558 559 return true; 560 } 561 562 static bool 563 adjust_limits_for_dsc_hblank_expansion_quirk(struct intel_dp *intel_dp, 564 const struct intel_connector *connector, 565 const struct intel_crtc_state *crtc_state, 566 struct link_config_limits *limits, 567 bool dsc) 568 { 569 struct intel_display *display = to_intel_display(connector); 570 const struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc); 571 int min_bpp_x16 = limits->link.min_bpp_x16; 572 573 if (!hblank_expansion_quirk_needs_dsc(connector, crtc_state, limits)) 574 return true; 575 576 if (!dsc) { 577 if (intel_dp_supports_dsc(intel_dp, connector, crtc_state)) { 578 drm_dbg_kms(display->drm, 579 "[CRTC:%d:%s][CONNECTOR:%d:%s] DSC needed by hblank expansion quirk\n", 580 crtc->base.base.id, crtc->base.name, 581 connector->base.base.id, connector->base.name); 582 return false; 583 } 584 585 drm_dbg_kms(display->drm, 586 "[CRTC:%d:%s][CONNECTOR:%d:%s] Increasing link min bpp to 24 due to hblank expansion quirk\n", 587 crtc->base.base.id, crtc->base.name, 588 connector->base.base.id, connector->base.name); 589 590 if (limits->link.max_bpp_x16 < fxp_q4_from_int(24)) 591 return false; 592 593 limits->link.min_bpp_x16 = fxp_q4_from_int(24); 594 595 return true; 596 } 597 598 drm_WARN_ON(display->drm, limits->min_rate != limits->max_rate); 599 600 if (limits->max_rate < 540000) 601 min_bpp_x16 = fxp_q4_from_int(13); 602 else if (limits->max_rate < 810000) 603 min_bpp_x16 = fxp_q4_from_int(10); 604 605 if (limits->link.min_bpp_x16 >= min_bpp_x16) 606 return true; 607 608 drm_dbg_kms(display->drm, 609 "[CRTC:%d:%s][CONNECTOR:%d:%s] Increasing link min bpp to " FXP_Q4_FMT " in DSC mode due to hblank expansion quirk\n", 610 crtc->base.base.id, crtc->base.name, 611 connector->base.base.id, connector->base.name, 612 FXP_Q4_ARGS(min_bpp_x16)); 613 614 if (limits->link.max_bpp_x16 < min_bpp_x16) 615 return false; 616 617 limits->link.min_bpp_x16 = min_bpp_x16; 618 619 return true; 620 } 621 622 static bool 623 mst_stream_compute_config_limits(struct intel_dp *intel_dp, 624 struct drm_connector_state *conn_state, 625 struct intel_crtc_state *crtc_state, 626 bool dsc, 627 struct link_config_limits *limits) 628 { 629 struct intel_connector *connector = 630 to_intel_connector(conn_state->connector); 631 632 if (!intel_dp_compute_config_limits(intel_dp, conn_state, 633 crtc_state, false, dsc, 634 limits)) 635 return false; 636 637 return adjust_limits_for_dsc_hblank_expansion_quirk(intel_dp, 638 connector, 639 crtc_state, 640 limits, 641 dsc); 642 } 643 644 static int mst_stream_compute_config(struct intel_encoder *encoder, 645 struct intel_crtc_state *pipe_config, 646 struct drm_connector_state *conn_state) 647 { 648 struct intel_display *display = to_intel_display(encoder); 649 struct intel_atomic_state *state = to_intel_atomic_state(conn_state->state); 650 struct intel_crtc *crtc = to_intel_crtc(pipe_config->uapi.crtc); 651 struct intel_dp *intel_dp = to_primary_dp(encoder); 652 struct intel_connector *connector = 653 to_intel_connector(conn_state->connector); 654 const struct drm_display_mode *adjusted_mode = 655 &pipe_config->hw.adjusted_mode; 656 struct link_config_limits limits; 657 bool dsc_needed, joiner_needs_dsc; 658 int num_joined_pipes; 659 int ret = 0; 660 661 if (pipe_config->fec_enable && 662 !intel_dp_supports_fec(intel_dp, connector, pipe_config)) 663 return -EINVAL; 664 665 if (adjusted_mode->flags & DRM_MODE_FLAG_DBLSCAN) 666 return -EINVAL; 667 668 num_joined_pipes = intel_dp_num_joined_pipes(intel_dp, connector, 669 adjusted_mode->crtc_hdisplay, 670 adjusted_mode->crtc_clock); 671 if (num_joined_pipes > 1) 672 pipe_config->joiner_pipes = GENMASK(crtc->pipe + num_joined_pipes - 1, crtc->pipe); 673 674 pipe_config->sink_format = INTEL_OUTPUT_FORMAT_RGB; 675 pipe_config->output_format = INTEL_OUTPUT_FORMAT_RGB; 676 pipe_config->has_pch_encoder = false; 677 678 joiner_needs_dsc = intel_dp_joiner_needs_dsc(display, num_joined_pipes); 679 680 dsc_needed = joiner_needs_dsc || intel_dp->force_dsc_en || 681 !mst_stream_compute_config_limits(intel_dp, conn_state, 682 pipe_config, false, &limits); 683 684 if (!dsc_needed) { 685 ret = mst_stream_compute_link_config(intel_dp, pipe_config, 686 conn_state, &limits); 687 688 if (ret == -EDEADLK) 689 return ret; 690 691 if (ret) 692 dsc_needed = true; 693 } 694 695 if (dsc_needed && !intel_dp_supports_dsc(intel_dp, connector, pipe_config)) { 696 drm_dbg_kms(display->drm, "DSC required but not available\n"); 697 return -EINVAL; 698 } 699 700 /* enable compression if the mode doesn't fit available BW */ 701 if (dsc_needed) { 702 drm_dbg_kms(display->drm, "Try DSC (fallback=%s, joiner=%s, force=%s)\n", 703 str_yes_no(ret), str_yes_no(joiner_needs_dsc), 704 str_yes_no(intel_dp->force_dsc_en)); 705 706 707 if (!mst_stream_compute_config_limits(intel_dp, conn_state, 708 pipe_config, true, 709 &limits)) 710 return -EINVAL; 711 712 /* 713 * FIXME: As bpc is hardcoded to 8, as mentioned above, 714 * WARN and ignore the debug flag force_dsc_bpc for now. 715 */ 716 drm_WARN(display->drm, intel_dp->force_dsc_bpc, 717 "Cannot Force BPC for MST\n"); 718 /* 719 * Try to get at least some timeslots and then see, if 720 * we can fit there with DSC. 721 */ 722 drm_dbg_kms(display->drm, "Trying to find VCPI slots in DSC mode\n"); 723 724 ret = mst_stream_dsc_compute_link_config(intel_dp, pipe_config, 725 conn_state, &limits); 726 if (ret < 0) 727 return ret; 728 729 ret = intel_dp_dsc_compute_config(intel_dp, pipe_config, 730 conn_state, &limits, 731 pipe_config->dp_m_n.tu); 732 } 733 734 if (ret) 735 return ret; 736 737 pipe_config->limited_color_range = 738 intel_dp_limited_color_range(pipe_config, conn_state); 739 740 if (display->platform.geminilake || display->platform.broxton) 741 pipe_config->lane_lat_optim_mask = 742 bxt_dpio_phy_calc_lane_lat_optim_mask(pipe_config->lane_count); 743 744 ret = intel_dp_compute_min_hblank(pipe_config, conn_state); 745 if (ret) 746 return ret; 747 748 intel_vrr_compute_config(pipe_config, conn_state); 749 750 intel_dp_audio_compute_config(encoder, pipe_config, conn_state); 751 752 intel_ddi_compute_min_voltage_level(pipe_config); 753 754 intel_psr_compute_config(intel_dp, pipe_config, conn_state); 755 756 return intel_dp_tunnel_atomic_compute_stream_bw(state, intel_dp, connector, 757 pipe_config); 758 } 759 760 /* 761 * Iterate over all connectors and return a mask of 762 * all CPU transcoders streaming over the same DP link. 763 */ 764 static unsigned int 765 intel_dp_mst_transcoder_mask(struct intel_atomic_state *state, 766 struct intel_dp *mst_port) 767 { 768 struct intel_display *display = to_intel_display(state); 769 const struct intel_digital_connector_state *conn_state; 770 struct intel_connector *connector; 771 u8 transcoders = 0; 772 int i; 773 774 if (DISPLAY_VER(display) < 12) 775 return 0; 776 777 for_each_new_intel_connector_in_state(state, connector, conn_state, i) { 778 const struct intel_crtc_state *crtc_state; 779 struct intel_crtc *crtc; 780 781 if (connector->mst.dp != mst_port || !conn_state->base.crtc) 782 continue; 783 784 crtc = to_intel_crtc(conn_state->base.crtc); 785 crtc_state = intel_atomic_get_new_crtc_state(state, crtc); 786 787 if (!crtc_state->hw.active) 788 continue; 789 790 transcoders |= BIT(crtc_state->cpu_transcoder); 791 } 792 793 return transcoders; 794 } 795 796 static u8 get_pipes_downstream_of_mst_port(struct intel_atomic_state *state, 797 struct drm_dp_mst_topology_mgr *mst_mgr, 798 struct drm_dp_mst_port *parent_port) 799 { 800 const struct intel_digital_connector_state *conn_state; 801 struct intel_connector *connector; 802 u8 mask = 0; 803 int i; 804 805 for_each_new_intel_connector_in_state(state, connector, conn_state, i) { 806 if (!conn_state->base.crtc) 807 continue; 808 809 if (&connector->mst.dp->mst.mgr != mst_mgr) 810 continue; 811 812 if (connector->mst.port != parent_port && 813 !drm_dp_mst_port_downstream_of_parent(mst_mgr, 814 connector->mst.port, 815 parent_port)) 816 continue; 817 818 mask |= BIT(to_intel_crtc(conn_state->base.crtc)->pipe); 819 } 820 821 return mask; 822 } 823 824 static int intel_dp_mst_check_dsc_change(struct intel_atomic_state *state, 825 struct drm_dp_mst_topology_mgr *mst_mgr, 826 struct intel_link_bw_limits *limits) 827 { 828 struct intel_display *display = to_intel_display(state); 829 struct intel_crtc *crtc; 830 u8 mst_pipe_mask; 831 u8 dsc_pipe_mask = 0; 832 int ret; 833 834 mst_pipe_mask = get_pipes_downstream_of_mst_port(state, mst_mgr, NULL); 835 836 for_each_intel_crtc_in_pipe_mask(display->drm, crtc, mst_pipe_mask) { 837 struct intel_crtc_state *crtc_state = 838 intel_atomic_get_new_crtc_state(state, crtc); 839 840 /* Atomic connector check should've added all the MST CRTCs. */ 841 if (drm_WARN_ON(display->drm, !crtc_state)) 842 return -EINVAL; 843 844 if (intel_dsc_enabled_on_link(crtc_state)) 845 dsc_pipe_mask |= BIT(crtc->pipe); 846 } 847 848 if (!dsc_pipe_mask || mst_pipe_mask == dsc_pipe_mask) 849 return 0; 850 851 limits->link_dsc_pipes |= mst_pipe_mask; 852 853 ret = intel_modeset_pipes_in_mask_early(state, "MST DSC", 854 mst_pipe_mask); 855 856 return ret ? : -EAGAIN; 857 } 858 859 static int intel_dp_mst_check_bw(struct intel_atomic_state *state, 860 struct drm_dp_mst_topology_mgr *mst_mgr, 861 struct drm_dp_mst_topology_state *mst_state, 862 struct intel_link_bw_limits *limits) 863 { 864 struct drm_dp_mst_port *mst_port; 865 u8 mst_port_pipes; 866 int ret; 867 868 ret = drm_dp_mst_atomic_check_mgr(&state->base, mst_mgr, mst_state, &mst_port); 869 if (ret != -ENOSPC) 870 return ret; 871 872 mst_port_pipes = get_pipes_downstream_of_mst_port(state, mst_mgr, mst_port); 873 874 ret = intel_link_bw_reduce_bpp(state, limits, 875 mst_port_pipes, "MST link BW"); 876 877 return ret ? : -EAGAIN; 878 } 879 880 /** 881 * intel_dp_mst_atomic_check_link - check all modeset MST link configuration 882 * @state: intel atomic state 883 * @limits: link BW limits 884 * 885 * Check the link configuration for all modeset MST outputs. If the 886 * configuration is invalid @limits will be updated if possible to 887 * reduce the total BW, after which the configuration for all CRTCs in 888 * @state must be recomputed with the updated @limits. 889 * 890 * Returns: 891 * - 0 if the configuration is valid 892 * - %-EAGAIN, if the configuration is invalid and @limits got updated 893 * with fallback values with which the configuration of all CRTCs in 894 * @state must be recomputed 895 * - Other negative error, if the configuration is invalid without a 896 * fallback possibility, or the check failed for another reason 897 */ 898 int intel_dp_mst_atomic_check_link(struct intel_atomic_state *state, 899 struct intel_link_bw_limits *limits) 900 { 901 struct drm_dp_mst_topology_mgr *mgr; 902 struct drm_dp_mst_topology_state *mst_state; 903 int ret; 904 int i; 905 906 for_each_new_mst_mgr_in_state(&state->base, mgr, mst_state, i) { 907 ret = intel_dp_mst_check_dsc_change(state, mgr, limits); 908 if (ret) 909 return ret; 910 911 ret = intel_dp_mst_check_bw(state, mgr, mst_state, 912 limits); 913 if (ret) 914 return ret; 915 } 916 917 return 0; 918 } 919 920 static int mst_stream_compute_config_late(struct intel_encoder *encoder, 921 struct intel_crtc_state *crtc_state, 922 struct drm_connector_state *conn_state) 923 { 924 struct intel_atomic_state *state = to_intel_atomic_state(conn_state->state); 925 struct intel_dp *intel_dp = to_primary_dp(encoder); 926 927 /* lowest numbered transcoder will be designated master */ 928 crtc_state->mst_master_transcoder = 929 ffs(intel_dp_mst_transcoder_mask(state, intel_dp)) - 1; 930 931 return 0; 932 } 933 934 /* 935 * If one of the connectors in a MST stream needs a modeset, mark all CRTCs 936 * that shares the same MST stream as mode changed, 937 * intel_modeset_pipe_config()+intel_crtc_check_fastset() will take care to do 938 * a fastset when possible. 939 * 940 * On TGL+ this is required since each stream go through a master transcoder, 941 * so if the master transcoder needs modeset, all other streams in the 942 * topology need a modeset. All platforms need to add the atomic state 943 * for all streams in the topology, since a modeset on one may require 944 * changing the MST link BW usage of the others, which in turn needs a 945 * recomputation of the corresponding CRTC states. 946 */ 947 static int 948 mst_connector_atomic_topology_check(struct intel_connector *connector, 949 struct intel_atomic_state *state) 950 { 951 struct intel_display *display = to_intel_display(connector); 952 struct drm_connector_list_iter connector_list_iter; 953 struct intel_connector *connector_iter; 954 int ret = 0; 955 956 if (!intel_connector_needs_modeset(state, &connector->base)) 957 return 0; 958 959 drm_connector_list_iter_begin(display->drm, &connector_list_iter); 960 for_each_intel_connector_iter(connector_iter, &connector_list_iter) { 961 struct intel_digital_connector_state *conn_iter_state; 962 struct intel_crtc_state *crtc_state; 963 struct intel_crtc *crtc; 964 965 if (connector_iter->mst.dp != connector->mst.dp || 966 connector_iter == connector) 967 continue; 968 969 conn_iter_state = intel_atomic_get_digital_connector_state(state, 970 connector_iter); 971 if (IS_ERR(conn_iter_state)) { 972 ret = PTR_ERR(conn_iter_state); 973 break; 974 } 975 976 if (!conn_iter_state->base.crtc) 977 continue; 978 979 crtc = to_intel_crtc(conn_iter_state->base.crtc); 980 crtc_state = intel_atomic_get_crtc_state(&state->base, crtc); 981 if (IS_ERR(crtc_state)) { 982 ret = PTR_ERR(crtc_state); 983 break; 984 } 985 986 ret = drm_atomic_add_affected_planes(&state->base, &crtc->base); 987 if (ret) 988 break; 989 crtc_state->uapi.mode_changed = true; 990 } 991 drm_connector_list_iter_end(&connector_list_iter); 992 993 return ret; 994 } 995 996 static int 997 mst_connector_atomic_check(struct drm_connector *_connector, 998 struct drm_atomic_state *_state) 999 { 1000 struct intel_atomic_state *state = to_intel_atomic_state(_state); 1001 struct intel_connector *connector = to_intel_connector(_connector); 1002 int ret; 1003 1004 ret = intel_digital_connector_atomic_check(&connector->base, &state->base); 1005 if (ret) 1006 return ret; 1007 1008 ret = mst_connector_atomic_topology_check(connector, state); 1009 if (ret) 1010 return ret; 1011 1012 if (intel_connector_needs_modeset(state, &connector->base)) { 1013 ret = intel_dp_tunnel_atomic_check_state(state, 1014 connector->mst.dp, 1015 connector); 1016 if (ret) 1017 return ret; 1018 } 1019 1020 return drm_dp_atomic_release_time_slots(&state->base, 1021 &connector->mst.dp->mst.mgr, 1022 connector->mst.port); 1023 } 1024 1025 static void mst_stream_disable(struct intel_atomic_state *state, 1026 struct intel_encoder *encoder, 1027 const struct intel_crtc_state *old_crtc_state, 1028 const struct drm_connector_state *old_conn_state) 1029 { 1030 struct intel_dp_mst_encoder *intel_mst = enc_to_mst(encoder); 1031 struct intel_dp *intel_dp = to_primary_dp(encoder); 1032 struct intel_connector *connector = 1033 to_intel_connector(old_conn_state->connector); 1034 1035 if (intel_dp_mst_active_streams(intel_dp) == 1) 1036 intel_dp->link.active = false; 1037 1038 intel_hdcp_disable(intel_mst->connector); 1039 1040 intel_dp_sink_disable_decompression(state, connector, old_crtc_state); 1041 } 1042 1043 static void mst_stream_post_disable(struct intel_atomic_state *state, 1044 struct intel_encoder *encoder, 1045 const struct intel_crtc_state *old_crtc_state, 1046 const struct drm_connector_state *old_conn_state) 1047 { 1048 struct intel_display *display = to_intel_display(encoder); 1049 struct intel_dp_mst_encoder *intel_mst = enc_to_mst(encoder); 1050 struct intel_encoder *primary_encoder = to_primary_encoder(encoder); 1051 struct intel_dp *intel_dp = to_primary_dp(encoder); 1052 struct intel_connector *connector = 1053 to_intel_connector(old_conn_state->connector); 1054 struct drm_dp_mst_topology_state *old_mst_state = 1055 drm_atomic_get_old_mst_topology_state(&state->base, &intel_dp->mst.mgr); 1056 struct drm_dp_mst_topology_state *new_mst_state = 1057 drm_atomic_get_new_mst_topology_state(&state->base, &intel_dp->mst.mgr); 1058 const struct drm_dp_mst_atomic_payload *old_payload = 1059 drm_atomic_get_mst_payload_state(old_mst_state, connector->mst.port); 1060 struct drm_dp_mst_atomic_payload *new_payload = 1061 drm_atomic_get_mst_payload_state(new_mst_state, connector->mst.port); 1062 struct intel_crtc *pipe_crtc; 1063 bool last_mst_stream; 1064 int i; 1065 1066 last_mst_stream = intel_dp_mst_dec_active_streams(intel_dp); 1067 1068 drm_WARN_ON(display->drm, DISPLAY_VER(display) >= 12 && last_mst_stream && 1069 !intel_dp_mst_is_master_trans(old_crtc_state)); 1070 1071 for_each_pipe_crtc_modeset_disable(display, pipe_crtc, old_crtc_state, i) { 1072 const struct intel_crtc_state *old_pipe_crtc_state = 1073 intel_atomic_get_old_crtc_state(state, pipe_crtc); 1074 1075 intel_crtc_vblank_off(old_pipe_crtc_state); 1076 } 1077 1078 intel_disable_transcoder(old_crtc_state); 1079 1080 drm_dp_remove_payload_part1(&intel_dp->mst.mgr, new_mst_state, new_payload); 1081 1082 intel_ddi_clear_act_sent(encoder, old_crtc_state); 1083 1084 intel_de_rmw(display, 1085 TRANS_DDI_FUNC_CTL(display, old_crtc_state->cpu_transcoder), 1086 TRANS_DDI_DP_VC_PAYLOAD_ALLOC, 0); 1087 1088 intel_ddi_wait_for_act_sent(encoder, old_crtc_state); 1089 drm_dp_check_act_status(&intel_dp->mst.mgr); 1090 1091 drm_dp_remove_payload_part2(&intel_dp->mst.mgr, new_mst_state, 1092 old_payload, new_payload); 1093 1094 intel_vrr_transcoder_disable(old_crtc_state); 1095 1096 intel_ddi_disable_transcoder_func(old_crtc_state); 1097 1098 for_each_pipe_crtc_modeset_disable(display, pipe_crtc, old_crtc_state, i) { 1099 const struct intel_crtc_state *old_pipe_crtc_state = 1100 intel_atomic_get_old_crtc_state(state, pipe_crtc); 1101 1102 intel_dsc_disable(old_pipe_crtc_state); 1103 1104 if (DISPLAY_VER(display) >= 9) 1105 skl_scaler_disable(old_pipe_crtc_state); 1106 else 1107 ilk_pfit_disable(old_pipe_crtc_state); 1108 } 1109 1110 /* 1111 * Power down mst path before disabling the port, otherwise we end 1112 * up getting interrupts from the sink upon detecting link loss. 1113 */ 1114 drm_dp_send_power_updown_phy(&intel_dp->mst.mgr, connector->mst.port, 1115 false); 1116 1117 /* 1118 * BSpec 4287: disable DIP after the transcoder is disabled and before 1119 * the transcoder clock select is set to none. 1120 */ 1121 intel_dp_set_infoframes(primary_encoder, false, old_crtc_state, NULL); 1122 /* 1123 * From TGL spec: "If multi-stream slave transcoder: Configure 1124 * Transcoder Clock Select to direct no clock to the transcoder" 1125 * 1126 * From older GENs spec: "Configure Transcoder Clock Select to direct 1127 * no clock to the transcoder" 1128 */ 1129 if (DISPLAY_VER(display) < 12 || !last_mst_stream) 1130 intel_ddi_disable_transcoder_clock(old_crtc_state); 1131 1132 1133 intel_mst->connector = NULL; 1134 if (last_mst_stream) 1135 primary_encoder->post_disable(state, primary_encoder, 1136 old_crtc_state, NULL); 1137 1138 } 1139 1140 static void mst_stream_post_pll_disable(struct intel_atomic_state *state, 1141 struct intel_encoder *encoder, 1142 const struct intel_crtc_state *old_crtc_state, 1143 const struct drm_connector_state *old_conn_state) 1144 { 1145 struct intel_encoder *primary_encoder = to_primary_encoder(encoder); 1146 struct intel_dp *intel_dp = to_primary_dp(encoder); 1147 1148 if (intel_dp_mst_active_streams(intel_dp) == 0 && 1149 primary_encoder->post_pll_disable) 1150 primary_encoder->post_pll_disable(state, primary_encoder, old_crtc_state, old_conn_state); 1151 } 1152 1153 static void mst_stream_pre_pll_enable(struct intel_atomic_state *state, 1154 struct intel_encoder *encoder, 1155 const struct intel_crtc_state *pipe_config, 1156 const struct drm_connector_state *conn_state) 1157 { 1158 struct intel_encoder *primary_encoder = to_primary_encoder(encoder); 1159 struct intel_dp *intel_dp = to_primary_dp(encoder); 1160 1161 if (intel_dp_mst_active_streams(intel_dp) == 0) 1162 primary_encoder->pre_pll_enable(state, primary_encoder, 1163 pipe_config, NULL); 1164 else 1165 /* 1166 * The port PLL state needs to get updated for secondary 1167 * streams as for the primary stream. 1168 */ 1169 intel_ddi_update_active_dpll(state, primary_encoder, 1170 to_intel_crtc(pipe_config->uapi.crtc)); 1171 } 1172 1173 static bool intel_mst_probed_link_params_valid(struct intel_dp *intel_dp, 1174 int link_rate, int lane_count) 1175 { 1176 return intel_dp->link.mst_probed_rate == link_rate && 1177 intel_dp->link.mst_probed_lane_count == lane_count; 1178 } 1179 1180 static void intel_mst_set_probed_link_params(struct intel_dp *intel_dp, 1181 int link_rate, int lane_count) 1182 { 1183 intel_dp->link.mst_probed_rate = link_rate; 1184 intel_dp->link.mst_probed_lane_count = lane_count; 1185 } 1186 1187 static void intel_mst_reprobe_topology(struct intel_dp *intel_dp, 1188 const struct intel_crtc_state *crtc_state) 1189 { 1190 if (intel_mst_probed_link_params_valid(intel_dp, 1191 crtc_state->port_clock, crtc_state->lane_count)) 1192 return; 1193 1194 drm_dp_mst_topology_queue_probe(&intel_dp->mst.mgr); 1195 1196 intel_mst_set_probed_link_params(intel_dp, 1197 crtc_state->port_clock, crtc_state->lane_count); 1198 } 1199 1200 static void mst_stream_pre_enable(struct intel_atomic_state *state, 1201 struct intel_encoder *encoder, 1202 const struct intel_crtc_state *pipe_config, 1203 const struct drm_connector_state *conn_state) 1204 { 1205 struct intel_display *display = to_intel_display(state); 1206 struct intel_dp_mst_encoder *intel_mst = enc_to_mst(encoder); 1207 struct intel_encoder *primary_encoder = to_primary_encoder(encoder); 1208 struct intel_dp *intel_dp = to_primary_dp(encoder); 1209 struct intel_connector *connector = 1210 to_intel_connector(conn_state->connector); 1211 struct drm_dp_mst_topology_state *mst_state = 1212 drm_atomic_get_new_mst_topology_state(&state->base, &intel_dp->mst.mgr); 1213 int ret; 1214 bool first_mst_stream; 1215 1216 /* MST encoders are bound to a crtc, not to a connector, 1217 * force the mapping here for get_hw_state. 1218 */ 1219 connector->encoder = encoder; 1220 intel_mst->connector = connector; 1221 1222 first_mst_stream = intel_dp_mst_inc_active_streams(intel_dp); 1223 drm_WARN_ON(display->drm, DISPLAY_VER(display) >= 12 && first_mst_stream && 1224 !intel_dp_mst_is_master_trans(pipe_config)); 1225 1226 if (first_mst_stream) 1227 intel_dp_set_power(intel_dp, DP_SET_POWER_D0); 1228 1229 drm_dp_send_power_updown_phy(&intel_dp->mst.mgr, connector->mst.port, true); 1230 1231 intel_dp_sink_enable_decompression(state, connector, pipe_config); 1232 1233 if (first_mst_stream) { 1234 primary_encoder->pre_enable(state, primary_encoder, 1235 pipe_config, NULL); 1236 1237 intel_mst_reprobe_topology(intel_dp, pipe_config); 1238 } 1239 1240 ret = drm_dp_add_payload_part1(&intel_dp->mst.mgr, mst_state, 1241 drm_atomic_get_mst_payload_state(mst_state, connector->mst.port)); 1242 if (ret < 0) 1243 intel_dp_queue_modeset_retry_for_link(state, primary_encoder, pipe_config); 1244 1245 /* 1246 * Before Gen 12 this is not done as part of 1247 * primary_encoder->pre_enable() and should be done here. For 1248 * Gen 12+ the step in which this should be done is different for the 1249 * first MST stream, so it's done on the DDI for the first stream and 1250 * here for the following ones. 1251 */ 1252 if (DISPLAY_VER(display) < 12 || !first_mst_stream) 1253 intel_ddi_enable_transcoder_clock(encoder, pipe_config); 1254 1255 if (DISPLAY_VER(display) >= 13 && !first_mst_stream) 1256 intel_ddi_config_transcoder_func(encoder, pipe_config); 1257 1258 intel_dsc_dp_pps_write(primary_encoder, pipe_config); 1259 intel_ddi_set_dp_msa(pipe_config, conn_state); 1260 } 1261 1262 static void enable_bs_jitter_was(const struct intel_crtc_state *crtc_state) 1263 { 1264 struct intel_display *display = to_intel_display(crtc_state); 1265 u32 clear = 0; 1266 u32 set = 0; 1267 1268 if (!display->platform.alderlake_p) 1269 return; 1270 1271 if (!IS_DISPLAY_STEP(display, STEP_D0, STEP_FOREVER)) 1272 return; 1273 1274 /* Wa_14013163432:adlp */ 1275 if (crtc_state->fec_enable || intel_dp_is_uhbr(crtc_state)) 1276 set |= DP_MST_FEC_BS_JITTER_WA(crtc_state->cpu_transcoder); 1277 1278 /* Wa_14014143976:adlp */ 1279 if (IS_DISPLAY_STEP(display, STEP_E0, STEP_FOREVER)) { 1280 if (intel_dp_is_uhbr(crtc_state)) 1281 set |= DP_MST_SHORT_HBLANK_WA(crtc_state->cpu_transcoder); 1282 else if (crtc_state->fec_enable) 1283 clear |= DP_MST_SHORT_HBLANK_WA(crtc_state->cpu_transcoder); 1284 1285 if (crtc_state->fec_enable || intel_dp_is_uhbr(crtc_state)) 1286 set |= DP_MST_DPT_DPTP_ALIGN_WA(crtc_state->cpu_transcoder); 1287 } 1288 1289 if (!clear && !set) 1290 return; 1291 1292 intel_de_rmw(display, CHICKEN_MISC_3, clear, set); 1293 } 1294 1295 static void mst_stream_enable(struct intel_atomic_state *state, 1296 struct intel_encoder *encoder, 1297 const struct intel_crtc_state *pipe_config, 1298 const struct drm_connector_state *conn_state) 1299 { 1300 struct intel_display *display = to_intel_display(encoder); 1301 struct intel_encoder *primary_encoder = to_primary_encoder(encoder); 1302 struct intel_dp *intel_dp = to_primary_dp(encoder); 1303 struct intel_connector *connector = to_intel_connector(conn_state->connector); 1304 struct drm_dp_mst_topology_state *mst_state = 1305 drm_atomic_get_new_mst_topology_state(&state->base, &intel_dp->mst.mgr); 1306 enum transcoder trans = pipe_config->cpu_transcoder; 1307 bool first_mst_stream = intel_dp_mst_active_streams(intel_dp) == 1; 1308 struct intel_crtc *pipe_crtc; 1309 int ret, i; 1310 1311 drm_WARN_ON(display->drm, pipe_config->has_pch_encoder); 1312 1313 if (intel_dp_is_uhbr(pipe_config)) { 1314 const struct drm_display_mode *adjusted_mode = 1315 &pipe_config->hw.adjusted_mode; 1316 u64 crtc_clock_hz = KHz(adjusted_mode->crtc_clock); 1317 1318 intel_de_write(display, TRANS_DP2_VFREQHIGH(pipe_config->cpu_transcoder), 1319 TRANS_DP2_VFREQ_PIXEL_CLOCK(crtc_clock_hz >> 24)); 1320 intel_de_write(display, TRANS_DP2_VFREQLOW(pipe_config->cpu_transcoder), 1321 TRANS_DP2_VFREQ_PIXEL_CLOCK(crtc_clock_hz & 0xffffff)); 1322 } 1323 1324 enable_bs_jitter_was(pipe_config); 1325 1326 intel_ddi_enable_transcoder_func(encoder, pipe_config); 1327 1328 intel_vrr_transcoder_enable(pipe_config); 1329 1330 intel_ddi_clear_act_sent(encoder, pipe_config); 1331 1332 intel_de_rmw(display, TRANS_DDI_FUNC_CTL(display, trans), 0, 1333 TRANS_DDI_DP_VC_PAYLOAD_ALLOC); 1334 1335 intel_ddi_wait_for_act_sent(encoder, pipe_config); 1336 drm_dp_check_act_status(&intel_dp->mst.mgr); 1337 1338 if (first_mst_stream) 1339 intel_ddi_wait_for_fec_status(encoder, pipe_config, true); 1340 1341 ret = drm_dp_add_payload_part2(&intel_dp->mst.mgr, 1342 drm_atomic_get_mst_payload_state(mst_state, 1343 connector->mst.port)); 1344 if (ret < 0) 1345 intel_dp_queue_modeset_retry_for_link(state, primary_encoder, pipe_config); 1346 1347 if (DISPLAY_VER(display) >= 12) 1348 intel_de_rmw(display, CHICKEN_TRANS(display, trans), 1349 FECSTALL_DIS_DPTSTREAM_DPTTG, 1350 pipe_config->fec_enable ? FECSTALL_DIS_DPTSTREAM_DPTTG : 0); 1351 1352 intel_enable_transcoder(pipe_config); 1353 1354 for_each_pipe_crtc_modeset_enable(display, pipe_crtc, pipe_config, i) { 1355 const struct intel_crtc_state *pipe_crtc_state = 1356 intel_atomic_get_new_crtc_state(state, pipe_crtc); 1357 1358 intel_crtc_vblank_on(pipe_crtc_state); 1359 } 1360 1361 intel_hdcp_enable(state, encoder, pipe_config, conn_state); 1362 } 1363 1364 static bool mst_stream_get_hw_state(struct intel_encoder *encoder, 1365 enum pipe *pipe) 1366 { 1367 struct intel_dp_mst_encoder *intel_mst = enc_to_mst(encoder); 1368 *pipe = intel_mst->pipe; 1369 if (intel_mst->connector) 1370 return true; 1371 return false; 1372 } 1373 1374 static void mst_stream_get_config(struct intel_encoder *encoder, 1375 struct intel_crtc_state *pipe_config) 1376 { 1377 struct intel_encoder *primary_encoder = to_primary_encoder(encoder); 1378 1379 primary_encoder->get_config(primary_encoder, pipe_config); 1380 } 1381 1382 static bool mst_stream_initial_fastset_check(struct intel_encoder *encoder, 1383 struct intel_crtc_state *crtc_state) 1384 { 1385 struct intel_encoder *primary_encoder = to_primary_encoder(encoder); 1386 1387 return intel_dp_initial_fastset_check(primary_encoder, crtc_state); 1388 } 1389 1390 static int mst_connector_get_ddc_modes(struct drm_connector *_connector) 1391 { 1392 struct intel_connector *connector = to_intel_connector(_connector); 1393 struct intel_display *display = to_intel_display(connector); 1394 struct intel_dp *intel_dp = connector->mst.dp; 1395 const struct drm_edid *drm_edid; 1396 int ret; 1397 1398 if (drm_connector_is_unregistered(&connector->base)) 1399 return intel_connector_update_modes(&connector->base, NULL); 1400 1401 if (!intel_display_driver_check_access(display)) 1402 return drm_edid_connector_add_modes(&connector->base); 1403 1404 drm_edid = drm_dp_mst_edid_read(&connector->base, &intel_dp->mst.mgr, connector->mst.port); 1405 1406 ret = intel_connector_update_modes(&connector->base, drm_edid); 1407 1408 drm_edid_free(drm_edid); 1409 1410 return ret; 1411 } 1412 1413 static int 1414 mst_connector_late_register(struct drm_connector *_connector) 1415 { 1416 struct intel_connector *connector = to_intel_connector(_connector); 1417 int ret; 1418 1419 ret = drm_dp_mst_connector_late_register(&connector->base, connector->mst.port); 1420 if (ret < 0) 1421 return ret; 1422 1423 ret = intel_connector_register(&connector->base); 1424 if (ret < 0) 1425 drm_dp_mst_connector_early_unregister(&connector->base, connector->mst.port); 1426 1427 return ret; 1428 } 1429 1430 static void 1431 mst_connector_early_unregister(struct drm_connector *_connector) 1432 { 1433 struct intel_connector *connector = to_intel_connector(_connector); 1434 1435 intel_connector_unregister(&connector->base); 1436 drm_dp_mst_connector_early_unregister(&connector->base, connector->mst.port); 1437 } 1438 1439 static const struct drm_connector_funcs mst_connector_funcs = { 1440 .fill_modes = drm_helper_probe_single_connector_modes, 1441 .atomic_get_property = intel_digital_connector_atomic_get_property, 1442 .atomic_set_property = intel_digital_connector_atomic_set_property, 1443 .late_register = mst_connector_late_register, 1444 .early_unregister = mst_connector_early_unregister, 1445 .destroy = intel_connector_destroy, 1446 .atomic_destroy_state = drm_atomic_helper_connector_destroy_state, 1447 .atomic_duplicate_state = intel_digital_connector_duplicate_state, 1448 }; 1449 1450 static int mst_connector_get_modes(struct drm_connector *_connector) 1451 { 1452 struct intel_connector *connector = to_intel_connector(_connector); 1453 1454 return mst_connector_get_ddc_modes(&connector->base); 1455 } 1456 1457 static int 1458 mst_connector_mode_valid_ctx(struct drm_connector *_connector, 1459 const struct drm_display_mode *mode, 1460 struct drm_modeset_acquire_ctx *ctx, 1461 enum drm_mode_status *status) 1462 { 1463 struct intel_connector *connector = to_intel_connector(_connector); 1464 struct intel_display *display = to_intel_display(connector); 1465 struct intel_dp *intel_dp = connector->mst.dp; 1466 struct drm_dp_mst_topology_mgr *mgr = &intel_dp->mst.mgr; 1467 struct drm_dp_mst_port *port = connector->mst.port; 1468 const int min_bpp = 18; 1469 int max_dotclk = display->cdclk.max_dotclk_freq; 1470 int max_rate, mode_rate, max_lanes, max_link_clock; 1471 int ret; 1472 bool dsc = false; 1473 u16 dsc_max_compressed_bpp = 0; 1474 u8 dsc_slice_count = 0; 1475 int target_clock = mode->clock; 1476 int num_joined_pipes; 1477 1478 if (drm_connector_is_unregistered(&connector->base)) { 1479 *status = MODE_ERROR; 1480 return 0; 1481 } 1482 1483 *status = intel_cpu_transcoder_mode_valid(display, mode); 1484 if (*status != MODE_OK) 1485 return 0; 1486 1487 if (mode->flags & DRM_MODE_FLAG_DBLCLK) { 1488 *status = MODE_H_ILLEGAL; 1489 return 0; 1490 } 1491 1492 if (mode->clock < 10000) { 1493 *status = MODE_CLOCK_LOW; 1494 return 0; 1495 } 1496 1497 max_link_clock = intel_dp_max_link_rate(intel_dp); 1498 max_lanes = intel_dp_max_lane_count(intel_dp); 1499 1500 max_rate = intel_dp_max_link_data_rate(intel_dp, 1501 max_link_clock, max_lanes); 1502 mode_rate = intel_dp_link_required(mode->clock, min_bpp); 1503 1504 /* 1505 * TODO: 1506 * - Also check if compression would allow for the mode 1507 * - Calculate the overhead using drm_dp_bw_overhead() / 1508 * drm_dp_bw_channel_coding_efficiency(), similarly to the 1509 * compute config code, as drm_dp_calc_pbn_mode() doesn't 1510 * account with all the overheads. 1511 * - Check here and during compute config the BW reported by 1512 * DFP_Link_Available_Payload_Bandwidth_Number (or the 1513 * corresponding link capabilities of the sink) in case the 1514 * stream is uncompressed for it by the last branch device. 1515 */ 1516 num_joined_pipes = intel_dp_num_joined_pipes(intel_dp, connector, 1517 mode->hdisplay, target_clock); 1518 max_dotclk *= num_joined_pipes; 1519 1520 ret = drm_modeset_lock(&mgr->base.lock, ctx); 1521 if (ret) 1522 return ret; 1523 1524 if (mode_rate > max_rate || mode->clock > max_dotclk || 1525 drm_dp_calc_pbn_mode(mode->clock, min_bpp << 4) > port->full_pbn) { 1526 *status = MODE_CLOCK_HIGH; 1527 return 0; 1528 } 1529 1530 if (intel_dp_has_dsc(connector)) { 1531 /* 1532 * TBD pass the connector BPC, 1533 * for now U8_MAX so that max BPC on that platform would be picked 1534 */ 1535 int pipe_bpp = intel_dp_dsc_compute_max_bpp(connector, U8_MAX); 1536 1537 if (drm_dp_sink_supports_fec(connector->dp.fec_capability)) { 1538 dsc_max_compressed_bpp = 1539 intel_dp_dsc_get_max_compressed_bpp(display, 1540 max_link_clock, 1541 max_lanes, 1542 target_clock, 1543 mode->hdisplay, 1544 num_joined_pipes, 1545 INTEL_OUTPUT_FORMAT_RGB, 1546 pipe_bpp, 64); 1547 dsc_slice_count = 1548 intel_dp_dsc_get_slice_count(connector, 1549 target_clock, 1550 mode->hdisplay, 1551 num_joined_pipes); 1552 } 1553 1554 dsc = dsc_max_compressed_bpp && dsc_slice_count; 1555 } 1556 1557 if (intel_dp_joiner_needs_dsc(display, num_joined_pipes) && !dsc) { 1558 *status = MODE_CLOCK_HIGH; 1559 return 0; 1560 } 1561 1562 if (mode_rate > max_rate && !dsc) { 1563 *status = MODE_CLOCK_HIGH; 1564 return 0; 1565 } 1566 1567 *status = intel_mode_valid_max_plane_size(display, mode, num_joined_pipes); 1568 return 0; 1569 } 1570 1571 static struct drm_encoder * 1572 mst_connector_atomic_best_encoder(struct drm_connector *_connector, 1573 struct drm_atomic_state *state) 1574 { 1575 struct intel_connector *connector = to_intel_connector(_connector); 1576 struct drm_connector_state *connector_state = 1577 drm_atomic_get_new_connector_state(state, &connector->base); 1578 struct intel_dp *intel_dp = connector->mst.dp; 1579 struct intel_crtc *crtc = to_intel_crtc(connector_state->crtc); 1580 1581 return &intel_dp->mst.stream_encoders[crtc->pipe]->base.base; 1582 } 1583 1584 static int 1585 mst_connector_detect_ctx(struct drm_connector *_connector, 1586 struct drm_modeset_acquire_ctx *ctx, bool force) 1587 { 1588 struct intel_connector *connector = to_intel_connector(_connector); 1589 struct intel_display *display = to_intel_display(connector); 1590 struct intel_dp *intel_dp = connector->mst.dp; 1591 1592 if (!intel_display_device_enabled(display)) 1593 return connector_status_disconnected; 1594 1595 if (drm_connector_is_unregistered(&connector->base)) 1596 return connector_status_disconnected; 1597 1598 if (!intel_display_driver_check_access(display)) 1599 return connector->base.status; 1600 1601 intel_dp_flush_connector_commits(connector); 1602 1603 return drm_dp_mst_detect_port(&connector->base, ctx, &intel_dp->mst.mgr, 1604 connector->mst.port); 1605 } 1606 1607 static const struct drm_connector_helper_funcs mst_connector_helper_funcs = { 1608 .get_modes = mst_connector_get_modes, 1609 .mode_valid_ctx = mst_connector_mode_valid_ctx, 1610 .atomic_best_encoder = mst_connector_atomic_best_encoder, 1611 .atomic_check = mst_connector_atomic_check, 1612 .detect_ctx = mst_connector_detect_ctx, 1613 }; 1614 1615 static void mst_stream_encoder_destroy(struct drm_encoder *encoder) 1616 { 1617 struct intel_dp_mst_encoder *intel_mst = enc_to_mst(to_intel_encoder(encoder)); 1618 1619 drm_encoder_cleanup(encoder); 1620 kfree(intel_mst); 1621 } 1622 1623 static const struct drm_encoder_funcs mst_stream_encoder_funcs = { 1624 .destroy = mst_stream_encoder_destroy, 1625 }; 1626 1627 static bool mst_connector_get_hw_state(struct intel_connector *connector) 1628 { 1629 /* This is the MST stream encoder set in ->pre_enable, if any */ 1630 struct intel_encoder *encoder = intel_attached_encoder(connector); 1631 enum pipe pipe; 1632 1633 if (!encoder || !connector->base.state->crtc) 1634 return false; 1635 1636 return encoder->get_hw_state(encoder, &pipe); 1637 } 1638 1639 static int mst_topology_add_connector_properties(struct intel_dp *intel_dp, 1640 struct drm_connector *_connector, 1641 const char *pathprop) 1642 { 1643 struct intel_display *display = to_intel_display(intel_dp); 1644 struct intel_connector *connector = to_intel_connector(_connector); 1645 1646 drm_object_attach_property(&connector->base.base, 1647 display->drm->mode_config.path_property, 0); 1648 drm_object_attach_property(&connector->base.base, 1649 display->drm->mode_config.tile_property, 0); 1650 1651 intel_attach_force_audio_property(&connector->base); 1652 intel_attach_broadcast_rgb_property(&connector->base); 1653 1654 /* 1655 * Reuse the prop from the SST connector because we're 1656 * not allowed to create new props after device registration. 1657 */ 1658 connector->base.max_bpc_property = 1659 intel_dp->attached_connector->base.max_bpc_property; 1660 if (connector->base.max_bpc_property) 1661 drm_connector_attach_max_bpc_property(&connector->base, 6, 12); 1662 1663 return drm_connector_set_path_property(&connector->base, pathprop); 1664 } 1665 1666 static void 1667 intel_dp_mst_read_decompression_port_dsc_caps(struct intel_dp *intel_dp, 1668 struct intel_connector *connector) 1669 { 1670 u8 dpcd_caps[DP_RECEIVER_CAP_SIZE]; 1671 struct drm_dp_desc desc; 1672 1673 if (!connector->dp.dsc_decompression_aux) 1674 return; 1675 1676 if (drm_dp_read_dpcd_caps(connector->dp.dsc_decompression_aux, dpcd_caps) < 0) 1677 return; 1678 1679 if (drm_dp_read_desc(connector->dp.dsc_decompression_aux, &desc, 1680 drm_dp_is_branch(dpcd_caps)) < 0) 1681 return; 1682 1683 intel_dp_get_dsc_sink_cap(dpcd_caps[DP_DPCD_REV], 1684 &desc, drm_dp_is_branch(dpcd_caps), 1685 connector); 1686 } 1687 1688 static bool detect_dsc_hblank_expansion_quirk(const struct intel_connector *connector) 1689 { 1690 struct intel_display *display = to_intel_display(connector); 1691 struct drm_dp_aux *aux = connector->dp.dsc_decompression_aux; 1692 struct drm_dp_desc desc; 1693 u8 dpcd[DP_RECEIVER_CAP_SIZE]; 1694 1695 if (!aux) 1696 return false; 1697 1698 /* 1699 * A logical port's OUI (at least for affected sinks) is all 0, so 1700 * instead of that the parent port's OUI is used for identification. 1701 */ 1702 if (drm_dp_mst_port_is_logical(connector->mst.port)) { 1703 aux = drm_dp_mst_aux_for_parent(connector->mst.port); 1704 if (!aux) 1705 aux = &connector->mst.dp->aux; 1706 } 1707 1708 if (drm_dp_read_dpcd_caps(aux, dpcd) < 0) 1709 return false; 1710 1711 if (drm_dp_read_desc(aux, &desc, drm_dp_is_branch(dpcd)) < 0) 1712 return false; 1713 1714 if (!drm_dp_has_quirk(&desc, 1715 DP_DPCD_QUIRK_HBLANK_EXPANSION_REQUIRES_DSC)) 1716 return false; 1717 1718 /* 1719 * UHBR (MST sink) devices requiring this quirk don't advertise the 1720 * HBLANK expansion support. Presuming that they perform HBLANK 1721 * expansion internally, or are affected by this issue on modes with a 1722 * short HBLANK for other reasons. 1723 */ 1724 if (!drm_dp_128b132b_supported(dpcd) && 1725 !(dpcd[DP_RECEIVE_PORT_0_CAP_0] & DP_HBLANK_EXPANSION_CAPABLE)) 1726 return false; 1727 1728 drm_dbg_kms(display->drm, 1729 "[CONNECTOR:%d:%s] DSC HBLANK expansion quirk detected\n", 1730 connector->base.base.id, connector->base.name); 1731 1732 return true; 1733 } 1734 1735 static struct drm_connector * 1736 mst_topology_add_connector(struct drm_dp_mst_topology_mgr *mgr, 1737 struct drm_dp_mst_port *port, 1738 const char *pathprop) 1739 { 1740 struct intel_dp *intel_dp = container_of(mgr, struct intel_dp, mst.mgr); 1741 struct intel_display *display = to_intel_display(intel_dp); 1742 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp); 1743 struct intel_connector *connector; 1744 enum pipe pipe; 1745 int ret; 1746 1747 connector = intel_connector_alloc(); 1748 if (!connector) 1749 return NULL; 1750 1751 connector->get_hw_state = mst_connector_get_hw_state; 1752 connector->sync_state = intel_dp_connector_sync_state; 1753 connector->mst.dp = intel_dp; 1754 connector->mst.port = port; 1755 drm_dp_mst_get_port_malloc(port); 1756 1757 ret = drm_connector_dynamic_init(display->drm, &connector->base, &mst_connector_funcs, 1758 DRM_MODE_CONNECTOR_DisplayPort, NULL); 1759 if (ret) 1760 goto err_put_port; 1761 1762 connector->dp.dsc_decompression_aux = drm_dp_mst_dsc_aux_for_port(port); 1763 intel_dp_mst_read_decompression_port_dsc_caps(intel_dp, connector); 1764 connector->dp.dsc_hblank_expansion_quirk = 1765 detect_dsc_hblank_expansion_quirk(connector); 1766 1767 drm_connector_helper_add(&connector->base, &mst_connector_helper_funcs); 1768 1769 for_each_pipe(display, pipe) { 1770 struct drm_encoder *enc = 1771 &intel_dp->mst.stream_encoders[pipe]->base.base; 1772 1773 ret = drm_connector_attach_encoder(&connector->base, enc); 1774 if (ret) 1775 goto err_cleanup_connector; 1776 } 1777 1778 ret = mst_topology_add_connector_properties(intel_dp, &connector->base, pathprop); 1779 if (ret) 1780 goto err_cleanup_connector; 1781 1782 ret = intel_dp_hdcp_init(dig_port, connector); 1783 if (ret) 1784 drm_dbg_kms(display->drm, "[%s:%d] HDCP MST init failed, skipping.\n", 1785 connector->base.name, connector->base.base.id); 1786 1787 return &connector->base; 1788 1789 err_cleanup_connector: 1790 drm_connector_cleanup(&connector->base); 1791 err_put_port: 1792 drm_dp_mst_put_port_malloc(port); 1793 intel_connector_free(connector); 1794 1795 return NULL; 1796 } 1797 1798 static void 1799 mst_topology_poll_hpd_irq(struct drm_dp_mst_topology_mgr *mgr) 1800 { 1801 struct intel_dp *intel_dp = container_of(mgr, struct intel_dp, mst.mgr); 1802 1803 intel_hpd_trigger_irq(dp_to_dig_port(intel_dp)); 1804 } 1805 1806 static const struct drm_dp_mst_topology_cbs mst_topology_cbs = { 1807 .add_connector = mst_topology_add_connector, 1808 .poll_hpd_irq = mst_topology_poll_hpd_irq, 1809 }; 1810 1811 /* Create a fake encoder for an individual MST stream */ 1812 static struct intel_dp_mst_encoder * 1813 mst_stream_encoder_create(struct intel_digital_port *dig_port, enum pipe pipe) 1814 { 1815 struct intel_display *display = to_intel_display(dig_port); 1816 struct intel_encoder *primary_encoder = &dig_port->base; 1817 struct intel_dp_mst_encoder *intel_mst; 1818 struct intel_encoder *encoder; 1819 1820 intel_mst = kzalloc(sizeof(*intel_mst), GFP_KERNEL); 1821 1822 if (!intel_mst) 1823 return NULL; 1824 1825 intel_mst->pipe = pipe; 1826 encoder = &intel_mst->base; 1827 intel_mst->primary = dig_port; 1828 1829 drm_encoder_init(display->drm, &encoder->base, &mst_stream_encoder_funcs, 1830 DRM_MODE_ENCODER_DPMST, "DP-MST %c", pipe_name(pipe)); 1831 1832 encoder->type = INTEL_OUTPUT_DP_MST; 1833 encoder->power_domain = primary_encoder->power_domain; 1834 encoder->port = primary_encoder->port; 1835 encoder->cloneable = 0; 1836 /* 1837 * This is wrong, but broken userspace uses the intersection 1838 * of possible_crtcs of all the encoders of a given connector 1839 * to figure out which crtcs can drive said connector. What 1840 * should be used instead is the union of possible_crtcs. 1841 * To keep such userspace functioning we must misconfigure 1842 * this to make sure the intersection is not empty :( 1843 */ 1844 encoder->pipe_mask = ~0; 1845 1846 encoder->compute_config = mst_stream_compute_config; 1847 encoder->compute_config_late = mst_stream_compute_config_late; 1848 encoder->disable = mst_stream_disable; 1849 encoder->post_disable = mst_stream_post_disable; 1850 encoder->post_pll_disable = mst_stream_post_pll_disable; 1851 encoder->update_pipe = intel_ddi_update_pipe; 1852 encoder->pre_pll_enable = mst_stream_pre_pll_enable; 1853 encoder->pre_enable = mst_stream_pre_enable; 1854 encoder->enable = mst_stream_enable; 1855 encoder->audio_enable = intel_audio_codec_enable; 1856 encoder->audio_disable = intel_audio_codec_disable; 1857 encoder->get_hw_state = mst_stream_get_hw_state; 1858 encoder->get_config = mst_stream_get_config; 1859 encoder->initial_fastset_check = mst_stream_initial_fastset_check; 1860 1861 return intel_mst; 1862 1863 } 1864 1865 /* Create the fake encoders for MST streams */ 1866 static bool 1867 mst_stream_encoders_create(struct intel_digital_port *dig_port) 1868 { 1869 struct intel_display *display = to_intel_display(dig_port); 1870 struct intel_dp *intel_dp = &dig_port->dp; 1871 enum pipe pipe; 1872 1873 for_each_pipe(display, pipe) 1874 intel_dp->mst.stream_encoders[pipe] = mst_stream_encoder_create(dig_port, pipe); 1875 return true; 1876 } 1877 1878 int 1879 intel_dp_mst_encoder_init(struct intel_digital_port *dig_port, int conn_base_id) 1880 { 1881 struct intel_display *display = to_intel_display(dig_port); 1882 struct intel_dp *intel_dp = &dig_port->dp; 1883 enum port port = dig_port->base.port; 1884 int ret; 1885 1886 if (!HAS_DP_MST(display) || intel_dp_is_edp(intel_dp)) 1887 return 0; 1888 1889 if (DISPLAY_VER(display) < 12 && port == PORT_A) 1890 return 0; 1891 1892 if (DISPLAY_VER(display) < 11 && port == PORT_E) 1893 return 0; 1894 1895 intel_dp->mst.mgr.cbs = &mst_topology_cbs; 1896 1897 /* create encoders */ 1898 mst_stream_encoders_create(dig_port); 1899 ret = drm_dp_mst_topology_mgr_init(&intel_dp->mst.mgr, display->drm, 1900 &intel_dp->aux, 16, 1901 INTEL_NUM_PIPES(display), conn_base_id); 1902 if (ret) { 1903 intel_dp->mst.mgr.cbs = NULL; 1904 return ret; 1905 } 1906 1907 return 0; 1908 } 1909 1910 bool intel_dp_mst_source_support(struct intel_dp *intel_dp) 1911 { 1912 return intel_dp->mst.mgr.cbs; 1913 } 1914 1915 void 1916 intel_dp_mst_encoder_cleanup(struct intel_digital_port *dig_port) 1917 { 1918 struct intel_dp *intel_dp = &dig_port->dp; 1919 1920 if (!intel_dp_mst_source_support(intel_dp)) 1921 return; 1922 1923 drm_dp_mst_topology_mgr_destroy(&intel_dp->mst.mgr); 1924 /* encoders will get killed by normal cleanup */ 1925 1926 intel_dp->mst.mgr.cbs = NULL; 1927 } 1928 1929 bool intel_dp_mst_is_master_trans(const struct intel_crtc_state *crtc_state) 1930 { 1931 return crtc_state->mst_master_transcoder == crtc_state->cpu_transcoder; 1932 } 1933 1934 bool intel_dp_mst_is_slave_trans(const struct intel_crtc_state *crtc_state) 1935 { 1936 return crtc_state->mst_master_transcoder != INVALID_TRANSCODER && 1937 crtc_state->mst_master_transcoder != crtc_state->cpu_transcoder; 1938 } 1939 1940 /** 1941 * intel_dp_mst_add_topology_state_for_connector - add MST topology state for a connector 1942 * @state: atomic state 1943 * @connector: connector to add the state for 1944 * @crtc: the CRTC @connector is attached to 1945 * 1946 * Add the MST topology state for @connector to @state. 1947 * 1948 * Returns 0 on success, negative error code on failure. 1949 */ 1950 static int 1951 intel_dp_mst_add_topology_state_for_connector(struct intel_atomic_state *state, 1952 struct intel_connector *connector, 1953 struct intel_crtc *crtc) 1954 { 1955 struct drm_dp_mst_topology_state *mst_state; 1956 1957 if (!connector->mst.dp) 1958 return 0; 1959 1960 mst_state = drm_atomic_get_mst_topology_state(&state->base, 1961 &connector->mst.dp->mst.mgr); 1962 if (IS_ERR(mst_state)) 1963 return PTR_ERR(mst_state); 1964 1965 mst_state->pending_crtc_mask |= drm_crtc_mask(&crtc->base); 1966 1967 return 0; 1968 } 1969 1970 /** 1971 * intel_dp_mst_add_topology_state_for_crtc - add MST topology state for a CRTC 1972 * @state: atomic state 1973 * @crtc: CRTC to add the state for 1974 * 1975 * Add the MST topology state for @crtc to @state. 1976 * 1977 * Returns 0 on success, negative error code on failure. 1978 */ 1979 int intel_dp_mst_add_topology_state_for_crtc(struct intel_atomic_state *state, 1980 struct intel_crtc *crtc) 1981 { 1982 struct drm_connector *_connector; 1983 struct drm_connector_state *conn_state; 1984 int i; 1985 1986 for_each_new_connector_in_state(&state->base, _connector, conn_state, i) { 1987 struct intel_connector *connector = to_intel_connector(_connector); 1988 int ret; 1989 1990 if (conn_state->crtc != &crtc->base) 1991 continue; 1992 1993 ret = intel_dp_mst_add_topology_state_for_connector(state, connector, crtc); 1994 if (ret) 1995 return ret; 1996 } 1997 1998 return 0; 1999 } 2000 2001 static struct intel_connector * 2002 get_connector_in_state_for_crtc(struct intel_atomic_state *state, 2003 const struct intel_crtc *crtc) 2004 { 2005 struct drm_connector_state *old_conn_state; 2006 struct drm_connector_state *new_conn_state; 2007 struct drm_connector *_connector; 2008 int i; 2009 2010 for_each_oldnew_connector_in_state(&state->base, _connector, 2011 old_conn_state, new_conn_state, i) { 2012 struct intel_connector *connector = 2013 to_intel_connector(_connector); 2014 2015 if (old_conn_state->crtc == &crtc->base || 2016 new_conn_state->crtc == &crtc->base) 2017 return connector; 2018 } 2019 2020 return NULL; 2021 } 2022 2023 /** 2024 * intel_dp_mst_crtc_needs_modeset - check if changes in topology need to modeset the given CRTC 2025 * @state: atomic state 2026 * @crtc: CRTC for which to check the modeset requirement 2027 * 2028 * Check if any change in a MST topology requires a forced modeset on @crtc in 2029 * this topology. One such change is enabling/disabling the DSC decompression 2030 * state in the first branch device's UFP DPCD as required by one CRTC, while 2031 * the other @crtc in the same topology is still active, requiring a full modeset 2032 * on @crtc. 2033 */ 2034 bool intel_dp_mst_crtc_needs_modeset(struct intel_atomic_state *state, 2035 struct intel_crtc *crtc) 2036 { 2037 const struct intel_connector *crtc_connector; 2038 const struct drm_connector_state *conn_state; 2039 const struct drm_connector *_connector; 2040 int i; 2041 2042 if (!intel_crtc_has_type(intel_atomic_get_new_crtc_state(state, crtc), 2043 INTEL_OUTPUT_DP_MST)) 2044 return false; 2045 2046 crtc_connector = get_connector_in_state_for_crtc(state, crtc); 2047 2048 if (!crtc_connector) 2049 /* None of the connectors in the topology needs modeset */ 2050 return false; 2051 2052 for_each_new_connector_in_state(&state->base, _connector, conn_state, i) { 2053 const struct intel_connector *connector = 2054 to_intel_connector(_connector); 2055 const struct intel_crtc_state *new_crtc_state; 2056 const struct intel_crtc_state *old_crtc_state; 2057 struct intel_crtc *crtc_iter; 2058 2059 if (connector->mst.dp != crtc_connector->mst.dp || 2060 !conn_state->crtc) 2061 continue; 2062 2063 crtc_iter = to_intel_crtc(conn_state->crtc); 2064 2065 new_crtc_state = intel_atomic_get_new_crtc_state(state, crtc_iter); 2066 old_crtc_state = intel_atomic_get_old_crtc_state(state, crtc_iter); 2067 2068 if (!intel_crtc_needs_modeset(new_crtc_state)) 2069 continue; 2070 2071 if (old_crtc_state->dsc.compression_enable == 2072 new_crtc_state->dsc.compression_enable) 2073 continue; 2074 /* 2075 * Toggling the decompression flag because of this stream in 2076 * the first downstream branch device's UFP DPCD may reset the 2077 * whole branch device. To avoid the reset while other streams 2078 * are also active modeset the whole MST topology in this 2079 * case. 2080 */ 2081 if (connector->dp.dsc_decompression_aux == 2082 &connector->mst.dp->aux) 2083 return true; 2084 } 2085 2086 return false; 2087 } 2088 2089 /** 2090 * intel_dp_mst_prepare_probe - Prepare an MST link for topology probing 2091 * @intel_dp: DP port object 2092 * 2093 * Prepare an MST link for topology probing, programming the target 2094 * link parameters to DPCD. This step is a requirement of the enumeration 2095 * of path resources during probing. 2096 */ 2097 void intel_dp_mst_prepare_probe(struct intel_dp *intel_dp) 2098 { 2099 int link_rate = intel_dp_max_link_rate(intel_dp); 2100 int lane_count = intel_dp_max_lane_count(intel_dp); 2101 u8 rate_select; 2102 u8 link_bw; 2103 2104 if (intel_dp->link.active) 2105 return; 2106 2107 if (intel_mst_probed_link_params_valid(intel_dp, link_rate, lane_count)) 2108 return; 2109 2110 intel_dp_compute_rate(intel_dp, link_rate, &link_bw, &rate_select); 2111 2112 intel_dp_link_training_set_mode(intel_dp, link_rate, false); 2113 intel_dp_link_training_set_bw(intel_dp, link_bw, rate_select, lane_count, 2114 drm_dp_enhanced_frame_cap(intel_dp->dpcd)); 2115 2116 intel_mst_set_probed_link_params(intel_dp, link_rate, lane_count); 2117 } 2118 2119 /* 2120 * intel_dp_mst_verify_dpcd_state - verify the MST SW enabled state wrt. the DPCD 2121 * @intel_dp: DP port object 2122 * 2123 * Verify if @intel_dp's MST enabled SW state matches the corresponding DPCD 2124 * state. A long HPD pulse - not long enough to be detected as a disconnected 2125 * state - could've reset the DPCD state, which requires tearing 2126 * down/recreating the MST topology. 2127 * 2128 * Returns %true if the SW MST enabled and DPCD states match, %false 2129 * otherwise. 2130 */ 2131 bool intel_dp_mst_verify_dpcd_state(struct intel_dp *intel_dp) 2132 { 2133 struct intel_display *display = to_intel_display(intel_dp); 2134 struct intel_connector *connector = intel_dp->attached_connector; 2135 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp); 2136 struct intel_encoder *encoder = &dig_port->base; 2137 int ret; 2138 u8 val; 2139 2140 if (!intel_dp->is_mst) 2141 return true; 2142 2143 ret = drm_dp_dpcd_readb(intel_dp->mst.mgr.aux, DP_MSTM_CTRL, &val); 2144 2145 /* Adjust the expected register value for SST + SideBand. */ 2146 if (ret < 0 || val != (DP_MST_EN | DP_UP_REQ_EN | DP_UPSTREAM_IS_SRC)) { 2147 drm_dbg_kms(display->drm, 2148 "[CONNECTOR:%d:%s][ENCODER:%d:%s] MST mode got reset, removing topology (ret=%d, ctrl=0x%02x)\n", 2149 connector->base.base.id, connector->base.name, 2150 encoder->base.base.id, encoder->base.name, 2151 ret, val); 2152 2153 return false; 2154 } 2155 2156 return true; 2157 } 2158