1 // SPDX-License-Identifier: MIT 2 /* 3 * Copyright © 2020 Intel Corporation 4 * 5 */ 6 7 #include <drm/drm_print.h> 8 9 #include "i915_reg.h" 10 #include "intel_de.h" 11 #include "intel_display_types.h" 12 #include "intel_dp.h" 13 #include "intel_vrr.h" 14 #include "intel_vrr_regs.h" 15 16 #define FIXED_POINT_PRECISION 100 17 #define CMRR_PRECISION_TOLERANCE 10 18 19 bool intel_vrr_is_capable(struct intel_connector *connector) 20 { 21 struct intel_display *display = to_intel_display(connector); 22 const struct drm_display_info *info = &connector->base.display_info; 23 struct intel_dp *intel_dp; 24 25 /* 26 * DP Sink is capable of VRR video timings if 27 * Ignore MSA bit is set in DPCD. 28 * EDID monitor range also should be atleast 10 for reasonable 29 * Adaptive Sync or Variable Refresh Rate end user experience. 30 */ 31 switch (connector->base.connector_type) { 32 case DRM_MODE_CONNECTOR_eDP: 33 if (!connector->panel.vbt.vrr) 34 return false; 35 fallthrough; 36 case DRM_MODE_CONNECTOR_DisplayPort: 37 if (connector->mst.dp) 38 return false; 39 intel_dp = intel_attached_dp(connector); 40 41 if (!drm_dp_sink_can_do_video_without_timing_msa(intel_dp->dpcd)) 42 return false; 43 44 break; 45 default: 46 return false; 47 } 48 49 return HAS_VRR(display) && 50 info->monitor_range.max_vfreq - info->monitor_range.min_vfreq > 10; 51 } 52 53 bool intel_vrr_is_in_range(struct intel_connector *connector, int vrefresh) 54 { 55 const struct drm_display_info *info = &connector->base.display_info; 56 57 return intel_vrr_is_capable(connector) && 58 vrefresh >= info->monitor_range.min_vfreq && 59 vrefresh <= info->monitor_range.max_vfreq; 60 } 61 62 bool intel_vrr_possible(const struct intel_crtc_state *crtc_state) 63 { 64 return crtc_state->vrr.flipline; 65 } 66 67 void 68 intel_vrr_check_modeset(struct intel_atomic_state *state) 69 { 70 int i; 71 struct intel_crtc_state *old_crtc_state, *new_crtc_state; 72 struct intel_crtc *crtc; 73 74 for_each_oldnew_intel_crtc_in_state(state, crtc, old_crtc_state, 75 new_crtc_state, i) { 76 if (new_crtc_state->uapi.vrr_enabled != 77 old_crtc_state->uapi.vrr_enabled) 78 new_crtc_state->uapi.mode_changed = true; 79 } 80 } 81 82 static int intel_vrr_real_vblank_delay(const struct intel_crtc_state *crtc_state) 83 { 84 return crtc_state->hw.adjusted_mode.crtc_vblank_start - 85 crtc_state->hw.adjusted_mode.crtc_vdisplay; 86 } 87 88 static int intel_vrr_extra_vblank_delay(struct intel_display *display) 89 { 90 /* 91 * On ICL/TGL VRR hardware inserts one extra scanline 92 * just after vactive, which pushes the vmin decision 93 * boundary ahead accordingly. We'll include the extra 94 * scanline in our vblank delay estimates to make sure 95 * that we never underestimate how long we have until 96 * the delayed vblank has passed. 97 */ 98 return DISPLAY_VER(display) < 13 ? 1 : 0; 99 } 100 101 int intel_vrr_vblank_delay(const struct intel_crtc_state *crtc_state) 102 { 103 struct intel_display *display = to_intel_display(crtc_state); 104 105 return intel_vrr_real_vblank_delay(crtc_state) + 106 intel_vrr_extra_vblank_delay(display); 107 } 108 109 static int intel_vrr_flipline_offset(struct intel_display *display) 110 { 111 /* ICL/TGL hardware imposes flipline>=vmin+1 */ 112 return DISPLAY_VER(display) < 13 ? 1 : 0; 113 } 114 115 static int intel_vrr_vmin_flipline(const struct intel_crtc_state *crtc_state) 116 { 117 struct intel_display *display = to_intel_display(crtc_state); 118 119 return crtc_state->vrr.vmin + intel_vrr_flipline_offset(display); 120 } 121 122 /* 123 * Without VRR registers get latched at: 124 * vblank_start 125 * 126 * With VRR the earliest registers can get latched is: 127 * intel_vrr_vmin_vblank_start(), which if we want to maintain 128 * the correct min vtotal is >=vblank_start+1 129 * 130 * The latest point registers can get latched is the vmax decision boundary: 131 * intel_vrr_vmax_vblank_start() 132 * 133 * Between those two points the vblank exit starts (and hence registers get 134 * latched) ASAP after a push is sent. 135 * 136 * framestart_delay is programmable 1-4. 137 */ 138 static int intel_vrr_vblank_exit_length(const struct intel_crtc_state *crtc_state) 139 { 140 struct intel_display *display = to_intel_display(crtc_state); 141 142 if (DISPLAY_VER(display) >= 13) 143 return crtc_state->vrr.guardband; 144 else 145 /* hardware imposes one extra scanline somewhere */ 146 return crtc_state->vrr.pipeline_full + crtc_state->framestart_delay + 1; 147 } 148 149 int intel_vrr_vmin_vtotal(const struct intel_crtc_state *crtc_state) 150 { 151 struct intel_display *display = to_intel_display(crtc_state); 152 153 /* Min vblank actually determined by flipline */ 154 if (DISPLAY_VER(display) >= 13) 155 return intel_vrr_vmin_flipline(crtc_state); 156 else 157 return intel_vrr_vmin_flipline(crtc_state) + 158 intel_vrr_real_vblank_delay(crtc_state); 159 } 160 161 int intel_vrr_vmax_vtotal(const struct intel_crtc_state *crtc_state) 162 { 163 struct intel_display *display = to_intel_display(crtc_state); 164 165 if (DISPLAY_VER(display) >= 13) 166 return crtc_state->vrr.vmax; 167 else 168 return crtc_state->vrr.vmax + 169 intel_vrr_real_vblank_delay(crtc_state); 170 } 171 172 int intel_vrr_vmin_vblank_start(const struct intel_crtc_state *crtc_state) 173 { 174 return intel_vrr_vmin_vtotal(crtc_state) - intel_vrr_vblank_exit_length(crtc_state); 175 } 176 177 int intel_vrr_vmax_vblank_start(const struct intel_crtc_state *crtc_state) 178 { 179 return intel_vrr_vmax_vtotal(crtc_state) - intel_vrr_vblank_exit_length(crtc_state); 180 } 181 182 static bool 183 is_cmrr_frac_required(struct intel_crtc_state *crtc_state) 184 { 185 struct intel_display *display = to_intel_display(crtc_state); 186 int calculated_refresh_k, actual_refresh_k, pixel_clock_per_line; 187 struct drm_display_mode *adjusted_mode = &crtc_state->hw.adjusted_mode; 188 189 /* Avoid CMRR for now till we have VRR with fixed timings working */ 190 if (!HAS_CMRR(display) || true) 191 return false; 192 193 actual_refresh_k = 194 drm_mode_vrefresh(adjusted_mode) * FIXED_POINT_PRECISION; 195 pixel_clock_per_line = 196 adjusted_mode->crtc_clock * 1000 / adjusted_mode->crtc_htotal; 197 calculated_refresh_k = 198 pixel_clock_per_line * FIXED_POINT_PRECISION / adjusted_mode->crtc_vtotal; 199 200 if ((actual_refresh_k - calculated_refresh_k) < CMRR_PRECISION_TOLERANCE) 201 return false; 202 203 return true; 204 } 205 206 static unsigned int 207 cmrr_get_vtotal(struct intel_crtc_state *crtc_state, bool video_mode_required) 208 { 209 int multiplier_m = 1, multiplier_n = 1, vtotal, desired_refresh_rate; 210 u64 adjusted_pixel_rate; 211 struct drm_display_mode *adjusted_mode = &crtc_state->hw.adjusted_mode; 212 213 desired_refresh_rate = drm_mode_vrefresh(adjusted_mode); 214 215 if (video_mode_required) { 216 multiplier_m = 1001; 217 multiplier_n = 1000; 218 } 219 220 crtc_state->cmrr.cmrr_n = mul_u32_u32(desired_refresh_rate * adjusted_mode->crtc_htotal, 221 multiplier_n); 222 vtotal = DIV_ROUND_UP_ULL(mul_u32_u32(adjusted_mode->crtc_clock * 1000, multiplier_n), 223 crtc_state->cmrr.cmrr_n); 224 adjusted_pixel_rate = mul_u32_u32(adjusted_mode->crtc_clock * 1000, multiplier_m); 225 crtc_state->cmrr.cmrr_m = do_div(adjusted_pixel_rate, crtc_state->cmrr.cmrr_n); 226 227 return vtotal; 228 } 229 230 static 231 void intel_vrr_compute_cmrr_timings(struct intel_crtc_state *crtc_state) 232 { 233 crtc_state->cmrr.enable = true; 234 /* 235 * TODO: Compute precise target refresh rate to determine 236 * if video_mode_required should be true. Currently set to 237 * false due to uncertainty about the precise target 238 * refresh Rate. 239 */ 240 crtc_state->vrr.vmax = cmrr_get_vtotal(crtc_state, false); 241 crtc_state->vrr.vmin = crtc_state->vrr.vmax; 242 crtc_state->vrr.flipline = crtc_state->vrr.vmin; 243 crtc_state->mode_flags |= I915_MODE_FLAG_VRR; 244 } 245 246 static 247 void intel_vrr_compute_vrr_timings(struct intel_crtc_state *crtc_state) 248 { 249 crtc_state->vrr.enable = true; 250 crtc_state->mode_flags |= I915_MODE_FLAG_VRR; 251 } 252 253 /* 254 * For fixed refresh rate mode Vmin, Vmax and Flipline all are set to 255 * Vtotal value. 256 */ 257 static 258 int intel_vrr_fixed_rr_vtotal(const struct intel_crtc_state *crtc_state) 259 { 260 struct intel_display *display = to_intel_display(crtc_state); 261 int crtc_vtotal = crtc_state->hw.adjusted_mode.crtc_vtotal; 262 263 if (DISPLAY_VER(display) >= 13) 264 return crtc_vtotal; 265 else 266 return crtc_vtotal - 267 intel_vrr_real_vblank_delay(crtc_state); 268 } 269 270 static 271 int intel_vrr_fixed_rr_vmax(const struct intel_crtc_state *crtc_state) 272 { 273 return intel_vrr_fixed_rr_vtotal(crtc_state); 274 } 275 276 static 277 int intel_vrr_fixed_rr_vmin(const struct intel_crtc_state *crtc_state) 278 { 279 struct intel_display *display = to_intel_display(crtc_state); 280 281 return intel_vrr_fixed_rr_vtotal(crtc_state) - 282 intel_vrr_flipline_offset(display); 283 } 284 285 static 286 int intel_vrr_fixed_rr_flipline(const struct intel_crtc_state *crtc_state) 287 { 288 return intel_vrr_fixed_rr_vtotal(crtc_state); 289 } 290 291 void intel_vrr_set_fixed_rr_timings(const struct intel_crtc_state *crtc_state) 292 { 293 struct intel_display *display = to_intel_display(crtc_state); 294 enum transcoder cpu_transcoder = crtc_state->cpu_transcoder; 295 296 if (!intel_vrr_possible(crtc_state)) 297 return; 298 299 intel_de_write(display, TRANS_VRR_VMIN(display, cpu_transcoder), 300 intel_vrr_fixed_rr_vmin(crtc_state) - 1); 301 intel_de_write(display, TRANS_VRR_VMAX(display, cpu_transcoder), 302 intel_vrr_fixed_rr_vmax(crtc_state) - 1); 303 intel_de_write(display, TRANS_VRR_FLIPLINE(display, cpu_transcoder), 304 intel_vrr_fixed_rr_flipline(crtc_state) - 1); 305 } 306 307 static 308 void intel_vrr_compute_fixed_rr_timings(struct intel_crtc_state *crtc_state) 309 { 310 /* 311 * For fixed rr, vmin = vmax = flipline. 312 * vmin is already set to crtc_vtotal set vmax and flipline the same. 313 */ 314 crtc_state->vrr.vmax = crtc_state->hw.adjusted_mode.crtc_vtotal; 315 crtc_state->vrr.flipline = crtc_state->hw.adjusted_mode.crtc_vtotal; 316 } 317 318 static 319 int intel_vrr_compute_vmin(struct intel_crtc_state *crtc_state) 320 { 321 /* 322 * To make fixed rr and vrr work seamless the guardband/pipeline full 323 * should be set such that it satisfies both the fixed and variable 324 * timings. 325 * For this set the vmin as crtc_vtotal. With this we never need to 326 * change anything to do with the guardband. 327 */ 328 return crtc_state->hw.adjusted_mode.crtc_vtotal; 329 } 330 331 static 332 int intel_vrr_compute_vmax(struct intel_connector *connector, 333 const struct drm_display_mode *adjusted_mode) 334 { 335 const struct drm_display_info *info = &connector->base.display_info; 336 int vmax; 337 338 vmax = adjusted_mode->crtc_clock * 1000 / 339 (adjusted_mode->crtc_htotal * info->monitor_range.min_vfreq); 340 vmax = max_t(int, vmax, adjusted_mode->crtc_vtotal); 341 342 return vmax; 343 } 344 345 void 346 intel_vrr_compute_config(struct intel_crtc_state *crtc_state, 347 struct drm_connector_state *conn_state) 348 { 349 struct intel_display *display = to_intel_display(crtc_state); 350 struct intel_connector *connector = 351 to_intel_connector(conn_state->connector); 352 struct intel_dp *intel_dp = intel_attached_dp(connector); 353 bool is_edp = intel_dp_is_edp(intel_dp); 354 struct drm_display_mode *adjusted_mode = &crtc_state->hw.adjusted_mode; 355 int vmin, vmax; 356 357 if (!HAS_VRR(display)) 358 return; 359 360 if (adjusted_mode->flags & DRM_MODE_FLAG_INTERLACE) 361 return; 362 363 crtc_state->vrr.in_range = 364 intel_vrr_is_in_range(connector, drm_mode_vrefresh(adjusted_mode)); 365 366 /* 367 * Allow fixed refresh rate with VRR Timing Generator. 368 * For now set the vrr.in_range to 0, to allow fixed_rr but skip actual 369 * VRR and LRR. 370 * #TODO For actual VRR with joiner, we need to figure out how to 371 * correctly sequence transcoder level stuff vs. pipe level stuff 372 * in the commit. 373 */ 374 if (crtc_state->joiner_pipes) 375 crtc_state->vrr.in_range = false; 376 377 vmin = intel_vrr_compute_vmin(crtc_state); 378 379 if (crtc_state->vrr.in_range) { 380 if (HAS_LRR(display)) 381 crtc_state->update_lrr = true; 382 vmax = intel_vrr_compute_vmax(connector, adjusted_mode); 383 } else { 384 vmax = vmin; 385 } 386 387 crtc_state->vrr.vmin = vmin; 388 crtc_state->vrr.vmax = vmax; 389 390 crtc_state->vrr.flipline = crtc_state->vrr.vmin; 391 392 if (crtc_state->uapi.vrr_enabled && vmin < vmax) 393 intel_vrr_compute_vrr_timings(crtc_state); 394 else if (is_cmrr_frac_required(crtc_state) && is_edp) 395 intel_vrr_compute_cmrr_timings(crtc_state); 396 else 397 intel_vrr_compute_fixed_rr_timings(crtc_state); 398 399 /* 400 * flipline determines the min vblank length the hardware will 401 * generate, and on ICL/TGL flipline>=vmin+1, hence we reduce 402 * vmin by one to make sure we can get the actual min vblank length. 403 */ 404 crtc_state->vrr.vmin -= intel_vrr_flipline_offset(display); 405 406 if (HAS_AS_SDP(display)) { 407 crtc_state->vrr.vsync_start = 408 (crtc_state->hw.adjusted_mode.crtc_vtotal - 409 crtc_state->hw.adjusted_mode.vsync_start); 410 crtc_state->vrr.vsync_end = 411 (crtc_state->hw.adjusted_mode.crtc_vtotal - 412 crtc_state->hw.adjusted_mode.vsync_end); 413 } 414 } 415 416 void intel_vrr_compute_config_late(struct intel_crtc_state *crtc_state) 417 { 418 struct intel_display *display = to_intel_display(crtc_state); 419 const struct drm_display_mode *adjusted_mode = &crtc_state->hw.adjusted_mode; 420 421 if (!intel_vrr_possible(crtc_state)) 422 return; 423 424 if (DISPLAY_VER(display) >= 13) { 425 crtc_state->vrr.guardband = 426 crtc_state->vrr.vmin - adjusted_mode->crtc_vblank_start; 427 } else { 428 /* hardware imposes one extra scanline somewhere */ 429 crtc_state->vrr.pipeline_full = 430 min(255, crtc_state->vrr.vmin - adjusted_mode->crtc_vblank_start - 431 crtc_state->framestart_delay - 1); 432 433 /* 434 * vmin/vmax/flipline also need to be adjusted by 435 * the vblank delay to maintain correct vtotals. 436 */ 437 crtc_state->vrr.vmin -= intel_vrr_real_vblank_delay(crtc_state); 438 crtc_state->vrr.vmax -= intel_vrr_real_vblank_delay(crtc_state); 439 crtc_state->vrr.flipline -= intel_vrr_real_vblank_delay(crtc_state); 440 } 441 } 442 443 static u32 trans_vrr_ctl(const struct intel_crtc_state *crtc_state) 444 { 445 struct intel_display *display = to_intel_display(crtc_state); 446 447 if (DISPLAY_VER(display) >= 14) 448 return VRR_CTL_FLIP_LINE_EN | 449 XELPD_VRR_CTL_VRR_GUARDBAND(crtc_state->vrr.guardband); 450 else if (DISPLAY_VER(display) >= 13) 451 return VRR_CTL_IGN_MAX_SHIFT | VRR_CTL_FLIP_LINE_EN | 452 XELPD_VRR_CTL_VRR_GUARDBAND(crtc_state->vrr.guardband); 453 else 454 return VRR_CTL_IGN_MAX_SHIFT | VRR_CTL_FLIP_LINE_EN | 455 VRR_CTL_PIPELINE_FULL(crtc_state->vrr.pipeline_full) | 456 VRR_CTL_PIPELINE_FULL_OVERRIDE; 457 } 458 459 void intel_vrr_set_transcoder_timings(const struct intel_crtc_state *crtc_state) 460 { 461 struct intel_display *display = to_intel_display(crtc_state); 462 enum transcoder cpu_transcoder = crtc_state->cpu_transcoder; 463 464 /* 465 * This bit seems to have two meanings depending on the platform: 466 * TGL: generate VRR "safe window" for DSB vblank waits 467 * ADL/DG2: make TRANS_SET_CONTEXT_LATENCY effective with VRR 468 */ 469 if (IS_DISPLAY_VER(display, 12, 13)) 470 intel_de_rmw(display, CHICKEN_TRANS(display, cpu_transcoder), 471 0, PIPE_VBLANK_WITH_DELAY); 472 473 if (!intel_vrr_possible(crtc_state)) { 474 intel_de_write(display, 475 TRANS_VRR_CTL(display, cpu_transcoder), 0); 476 return; 477 } 478 479 if (crtc_state->cmrr.enable) { 480 intel_de_write(display, TRANS_CMRR_M_HI(display, cpu_transcoder), 481 upper_32_bits(crtc_state->cmrr.cmrr_m)); 482 intel_de_write(display, TRANS_CMRR_M_LO(display, cpu_transcoder), 483 lower_32_bits(crtc_state->cmrr.cmrr_m)); 484 intel_de_write(display, TRANS_CMRR_N_HI(display, cpu_transcoder), 485 upper_32_bits(crtc_state->cmrr.cmrr_n)); 486 intel_de_write(display, TRANS_CMRR_N_LO(display, cpu_transcoder), 487 lower_32_bits(crtc_state->cmrr.cmrr_n)); 488 } 489 490 intel_vrr_set_fixed_rr_timings(crtc_state); 491 492 if (!intel_vrr_always_use_vrr_tg(display) && !crtc_state->vrr.enable) 493 intel_de_write(display, TRANS_VRR_CTL(display, cpu_transcoder), 494 trans_vrr_ctl(crtc_state)); 495 496 if (HAS_AS_SDP(display)) 497 intel_de_write(display, 498 TRANS_VRR_VSYNC(display, cpu_transcoder), 499 VRR_VSYNC_END(crtc_state->vrr.vsync_end) | 500 VRR_VSYNC_START(crtc_state->vrr.vsync_start)); 501 } 502 503 void intel_vrr_send_push(struct intel_dsb *dsb, 504 const struct intel_crtc_state *crtc_state) 505 { 506 struct intel_display *display = to_intel_display(crtc_state); 507 enum transcoder cpu_transcoder = crtc_state->cpu_transcoder; 508 509 if (!crtc_state->vrr.enable) 510 return; 511 512 if (dsb) 513 intel_dsb_nonpost_start(dsb); 514 515 intel_de_write_dsb(display, dsb, 516 TRANS_PUSH(display, cpu_transcoder), 517 TRANS_PUSH_EN | TRANS_PUSH_SEND); 518 519 if (dsb) 520 intel_dsb_nonpost_end(dsb); 521 } 522 523 void intel_vrr_check_push_sent(struct intel_dsb *dsb, 524 const struct intel_crtc_state *crtc_state) 525 { 526 struct intel_display *display = to_intel_display(crtc_state); 527 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc); 528 enum transcoder cpu_transcoder = crtc_state->cpu_transcoder; 529 530 if (!crtc_state->vrr.enable) 531 return; 532 533 /* 534 * Make sure the push send bit has cleared. This should 535 * already be the case as long as the caller makes sure 536 * this is called after the delayed vblank has occurred. 537 */ 538 if (dsb) { 539 int wait_us, count; 540 541 wait_us = 2; 542 count = 1; 543 544 /* 545 * If the bit hasn't cleared the DSB will 546 * raise the poll error interrupt. 547 */ 548 intel_dsb_poll(dsb, TRANS_PUSH(display, cpu_transcoder), 549 TRANS_PUSH_SEND, 0, wait_us, count); 550 } else { 551 if (intel_vrr_is_push_sent(crtc_state)) 552 drm_err(display->drm, "[CRTC:%d:%s] VRR push send still pending\n", 553 crtc->base.base.id, crtc->base.name); 554 } 555 } 556 557 bool intel_vrr_is_push_sent(const struct intel_crtc_state *crtc_state) 558 { 559 struct intel_display *display = to_intel_display(crtc_state); 560 enum transcoder cpu_transcoder = crtc_state->cpu_transcoder; 561 562 if (!crtc_state->vrr.enable) 563 return false; 564 565 return intel_de_read(display, TRANS_PUSH(display, cpu_transcoder)) & TRANS_PUSH_SEND; 566 } 567 568 bool intel_vrr_always_use_vrr_tg(struct intel_display *display) 569 { 570 if (!HAS_VRR(display)) 571 return false; 572 573 if (DISPLAY_VER(display) >= 30) 574 return true; 575 576 return false; 577 } 578 579 void intel_vrr_enable(const struct intel_crtc_state *crtc_state) 580 { 581 struct intel_display *display = to_intel_display(crtc_state); 582 enum transcoder cpu_transcoder = crtc_state->cpu_transcoder; 583 584 if (!crtc_state->vrr.enable) 585 return; 586 587 intel_de_write(display, TRANS_VRR_VMIN(display, cpu_transcoder), 588 crtc_state->vrr.vmin - 1); 589 intel_de_write(display, TRANS_VRR_VMAX(display, cpu_transcoder), 590 crtc_state->vrr.vmax - 1); 591 intel_de_write(display, TRANS_VRR_FLIPLINE(display, cpu_transcoder), 592 crtc_state->vrr.flipline - 1); 593 594 intel_de_write(display, TRANS_PUSH(display, cpu_transcoder), 595 TRANS_PUSH_EN); 596 597 if (!intel_vrr_always_use_vrr_tg(display)) { 598 if (crtc_state->cmrr.enable) { 599 intel_de_write(display, TRANS_VRR_CTL(display, cpu_transcoder), 600 VRR_CTL_VRR_ENABLE | VRR_CTL_CMRR_ENABLE | 601 trans_vrr_ctl(crtc_state)); 602 } else { 603 intel_de_write(display, TRANS_VRR_CTL(display, cpu_transcoder), 604 VRR_CTL_VRR_ENABLE | trans_vrr_ctl(crtc_state)); 605 } 606 } 607 } 608 609 void intel_vrr_disable(const struct intel_crtc_state *old_crtc_state) 610 { 611 struct intel_display *display = to_intel_display(old_crtc_state); 612 enum transcoder cpu_transcoder = old_crtc_state->cpu_transcoder; 613 614 if (!old_crtc_state->vrr.enable) 615 return; 616 617 if (!intel_vrr_always_use_vrr_tg(display)) { 618 intel_de_write(display, TRANS_VRR_CTL(display, cpu_transcoder), 619 trans_vrr_ctl(old_crtc_state)); 620 intel_de_wait_for_clear(display, 621 TRANS_VRR_STATUS(display, cpu_transcoder), 622 VRR_STATUS_VRR_EN_LIVE, 1000); 623 intel_de_write(display, TRANS_PUSH(display, cpu_transcoder), 0); 624 } 625 626 intel_vrr_set_fixed_rr_timings(old_crtc_state); 627 } 628 629 void intel_vrr_transcoder_enable(const struct intel_crtc_state *crtc_state) 630 { 631 struct intel_display *display = to_intel_display(crtc_state); 632 enum transcoder cpu_transcoder = crtc_state->cpu_transcoder; 633 634 if (!HAS_VRR(display)) 635 return; 636 637 if (!intel_vrr_possible(crtc_state)) 638 return; 639 640 if (!intel_vrr_always_use_vrr_tg(display)) { 641 intel_de_write(display, TRANS_VRR_CTL(display, cpu_transcoder), 642 trans_vrr_ctl(crtc_state)); 643 return; 644 } 645 646 intel_de_write(display, TRANS_PUSH(display, cpu_transcoder), 647 TRANS_PUSH_EN); 648 649 intel_de_write(display, TRANS_VRR_CTL(display, cpu_transcoder), 650 VRR_CTL_VRR_ENABLE | trans_vrr_ctl(crtc_state)); 651 } 652 653 void intel_vrr_transcoder_disable(const struct intel_crtc_state *crtc_state) 654 { 655 struct intel_display *display = to_intel_display(crtc_state); 656 enum transcoder cpu_transcoder = crtc_state->cpu_transcoder; 657 658 if (!HAS_VRR(display)) 659 return; 660 661 if (!intel_vrr_possible(crtc_state)) 662 return; 663 664 intel_de_write(display, TRANS_VRR_CTL(display, cpu_transcoder), 0); 665 666 intel_de_wait_for_clear(display, TRANS_VRR_STATUS(display, cpu_transcoder), 667 VRR_STATUS_VRR_EN_LIVE, 1000); 668 intel_de_write(display, TRANS_PUSH(display, cpu_transcoder), 0); 669 } 670 671 bool intel_vrr_is_fixed_rr(const struct intel_crtc_state *crtc_state) 672 { 673 return crtc_state->vrr.flipline && 674 crtc_state->vrr.flipline == crtc_state->vrr.vmax && 675 crtc_state->vrr.flipline == intel_vrr_vmin_flipline(crtc_state); 676 } 677 678 void intel_vrr_get_config(struct intel_crtc_state *crtc_state) 679 { 680 struct intel_display *display = to_intel_display(crtc_state); 681 enum transcoder cpu_transcoder = crtc_state->cpu_transcoder; 682 u32 trans_vrr_ctl, trans_vrr_vsync; 683 bool vrr_enable; 684 685 trans_vrr_ctl = intel_de_read(display, 686 TRANS_VRR_CTL(display, cpu_transcoder)); 687 688 if (HAS_CMRR(display)) 689 crtc_state->cmrr.enable = (trans_vrr_ctl & VRR_CTL_CMRR_ENABLE); 690 691 if (crtc_state->cmrr.enable) { 692 crtc_state->cmrr.cmrr_n = 693 intel_de_read64_2x32(display, TRANS_CMRR_N_LO(display, cpu_transcoder), 694 TRANS_CMRR_N_HI(display, cpu_transcoder)); 695 crtc_state->cmrr.cmrr_m = 696 intel_de_read64_2x32(display, TRANS_CMRR_M_LO(display, cpu_transcoder), 697 TRANS_CMRR_M_HI(display, cpu_transcoder)); 698 } 699 700 if (DISPLAY_VER(display) >= 13) 701 crtc_state->vrr.guardband = 702 REG_FIELD_GET(XELPD_VRR_CTL_VRR_GUARDBAND_MASK, trans_vrr_ctl); 703 else 704 if (trans_vrr_ctl & VRR_CTL_PIPELINE_FULL_OVERRIDE) 705 crtc_state->vrr.pipeline_full = 706 REG_FIELD_GET(VRR_CTL_PIPELINE_FULL_MASK, trans_vrr_ctl); 707 708 if (trans_vrr_ctl & VRR_CTL_FLIP_LINE_EN) { 709 crtc_state->vrr.flipline = intel_de_read(display, 710 TRANS_VRR_FLIPLINE(display, cpu_transcoder)) + 1; 711 crtc_state->vrr.vmax = intel_de_read(display, 712 TRANS_VRR_VMAX(display, cpu_transcoder)) + 1; 713 crtc_state->vrr.vmin = intel_de_read(display, 714 TRANS_VRR_VMIN(display, cpu_transcoder)) + 1; 715 716 /* 717 * For platforms that always use VRR Timing Generator, the VTOTAL.Vtotal 718 * bits are not filled. Since for these platforms TRAN_VMIN is always 719 * filled with crtc_vtotal, use TRAN_VRR_VMIN to get the vtotal for 720 * adjusted_mode. 721 */ 722 if (intel_vrr_always_use_vrr_tg(display)) 723 crtc_state->hw.adjusted_mode.crtc_vtotal = 724 intel_vrr_vmin_vtotal(crtc_state); 725 726 if (HAS_AS_SDP(display)) { 727 trans_vrr_vsync = 728 intel_de_read(display, 729 TRANS_VRR_VSYNC(display, cpu_transcoder)); 730 crtc_state->vrr.vsync_start = 731 REG_FIELD_GET(VRR_VSYNC_START_MASK, trans_vrr_vsync); 732 crtc_state->vrr.vsync_end = 733 REG_FIELD_GET(VRR_VSYNC_END_MASK, trans_vrr_vsync); 734 } 735 } 736 737 vrr_enable = trans_vrr_ctl & VRR_CTL_VRR_ENABLE; 738 739 if (intel_vrr_always_use_vrr_tg(display)) 740 crtc_state->vrr.enable = vrr_enable && !intel_vrr_is_fixed_rr(crtc_state); 741 else 742 crtc_state->vrr.enable = vrr_enable; 743 744 /* 745 * #TODO: For Both VRR and CMRR the flag I915_MODE_FLAG_VRR is set for mode_flags. 746 * Since CMRR is currently disabled, set this flag for VRR for now. 747 * Need to keep this in mind while re-enabling CMRR. 748 */ 749 if (crtc_state->vrr.enable) 750 crtc_state->mode_flags |= I915_MODE_FLAG_VRR; 751 } 752