xref: /linux/drivers/gpu/drm/i915/display/intel_vrr.c (revision ca220141fa8ebae09765a242076b2b77338106b0)
1 // SPDX-License-Identifier: MIT
2 /*
3  * Copyright © 2020 Intel Corporation
4  *
5  */
6 
7 #include <drm/drm_print.h>
8 
9 #include "intel_crtc.h"
10 #include "intel_de.h"
11 #include "intel_display_regs.h"
12 #include "intel_display_types.h"
13 #include "intel_dmc.h"
14 #include "intel_dmc_regs.h"
15 #include "intel_dp.h"
16 #include "intel_psr.h"
17 #include "intel_vrr.h"
18 #include "intel_vrr_regs.h"
19 #include "skl_prefill.h"
20 #include "skl_watermark.h"
21 
22 #define FIXED_POINT_PRECISION		100
23 #define CMRR_PRECISION_TOLERANCE	10
24 
25 /*
26  * Tunable parameters for DC Balance correction.
27  * These are captured based on experimentations.
28  */
29 #define DCB_CORRECTION_SENSITIVITY	30
30 #define DCB_CORRECTION_AGGRESSIVENESS	1000 /* ms × 100; 10 ms */
31 #define DCB_BLANK_TARGET		50
32 
33 bool intel_vrr_is_capable(struct intel_connector *connector)
34 {
35 	struct intel_display *display = to_intel_display(connector);
36 	const struct drm_display_info *info = &connector->base.display_info;
37 	struct intel_dp *intel_dp;
38 
39 	if (!HAS_VRR(display))
40 		return false;
41 
42 	/*
43 	 * DP Sink is capable of VRR video timings if
44 	 * Ignore MSA bit is set in DPCD.
45 	 * EDID monitor range also should be atleast 10 for reasonable
46 	 * Adaptive Sync or Variable Refresh Rate end user experience.
47 	 */
48 	switch (connector->base.connector_type) {
49 	case DRM_MODE_CONNECTOR_eDP:
50 		if (!connector->panel.vbt.vrr)
51 			return false;
52 		fallthrough;
53 	case DRM_MODE_CONNECTOR_DisplayPort:
54 		if (connector->mst.dp)
55 			return false;
56 		intel_dp = intel_attached_dp(connector);
57 
58 		if (!drm_dp_sink_can_do_video_without_timing_msa(intel_dp->dpcd))
59 			return false;
60 
61 		break;
62 	default:
63 		return false;
64 	}
65 
66 	return info->monitor_range.max_vfreq - info->monitor_range.min_vfreq > 10;
67 }
68 
69 bool intel_vrr_is_in_range(struct intel_connector *connector, int vrefresh)
70 {
71 	const struct drm_display_info *info = &connector->base.display_info;
72 
73 	return intel_vrr_is_capable(connector) &&
74 		vrefresh >= info->monitor_range.min_vfreq &&
75 		vrefresh <= info->monitor_range.max_vfreq;
76 }
77 
78 bool intel_vrr_possible(const struct intel_crtc_state *crtc_state)
79 {
80 	return crtc_state->vrr.flipline;
81 }
82 
83 void
84 intel_vrr_check_modeset(struct intel_atomic_state *state)
85 {
86 	int i;
87 	struct intel_crtc_state *old_crtc_state, *new_crtc_state;
88 	struct intel_crtc *crtc;
89 
90 	for_each_oldnew_intel_crtc_in_state(state, crtc, old_crtc_state,
91 					    new_crtc_state, i) {
92 		if (new_crtc_state->uapi.vrr_enabled !=
93 		    old_crtc_state->uapi.vrr_enabled)
94 			new_crtc_state->uapi.mode_changed = true;
95 	}
96 }
97 
98 static int intel_vrr_extra_vblank_delay(struct intel_display *display)
99 {
100 	/*
101 	 * On ICL/TGL VRR hardware inserts one extra scanline
102 	 * just after vactive, which pushes the vmin decision
103 	 * boundary ahead accordingly, and thus reduces the
104 	 * max guardband length by one scanline.
105 	 */
106 	return DISPLAY_VER(display) < 13 ? 1 : 0;
107 }
108 
109 static int intel_vrr_vmin_flipline_offset(struct intel_display *display)
110 {
111 	/*
112 	 * ICL/TGL hardware imposes flipline>=vmin+1
113 	 *
114 	 * We reduce the vmin value to compensate when programming the
115 	 * hardware. This approach allows flipline to remain set at the
116 	 * original value, and thus the frame will have the desired
117 	 * minimum vtotal.
118 	 */
119 	return DISPLAY_VER(display) < 13 ? 1 : 0;
120 }
121 
122 static int intel_vrr_guardband_to_pipeline_full(const struct intel_crtc_state *crtc_state,
123 						int guardband)
124 {
125 	/* hardware imposes one extra scanline somewhere */
126 	return guardband - crtc_state->framestart_delay - 1;
127 }
128 
129 static int intel_vrr_pipeline_full_to_guardband(const struct intel_crtc_state *crtc_state,
130 						int pipeline_full)
131 {
132 	/* hardware imposes one extra scanline somewhere */
133 	return pipeline_full + crtc_state->framestart_delay + 1;
134 }
135 
136 /*
137  * Without VRR registers get latched at:
138  *  vblank_start
139  *
140  * With VRR the earliest registers can get latched is:
141  *  intel_vrr_vmin_vblank_start(), which if we want to maintain
142  *  the correct min vtotal is >=vblank_start+1
143  *
144  * The latest point registers can get latched is the vmax decision boundary:
145  *  intel_vrr_vmax_vblank_start()
146  *
147  * Between those two points the vblank exit starts (and hence registers get
148  * latched) ASAP after a push is sent.
149  *
150  * framestart_delay is programmable 1-4.
151  */
152 
153 int intel_vrr_vmin_vtotal(const struct intel_crtc_state *crtc_state)
154 {
155 	/* Min vblank actually determined by flipline */
156 	return crtc_state->vrr.vmin;
157 }
158 
159 int intel_vrr_vmax_vtotal(const struct intel_crtc_state *crtc_state)
160 {
161 	return crtc_state->vrr.vmax;
162 }
163 
164 int intel_vrr_vmin_vblank_start(const struct intel_crtc_state *crtc_state)
165 {
166 	return intel_vrr_vmin_vtotal(crtc_state) - crtc_state->vrr.guardband;
167 }
168 
169 int intel_vrr_vmax_vblank_start(const struct intel_crtc_state *crtc_state)
170 {
171 	return intel_vrr_vmax_vtotal(crtc_state) - crtc_state->vrr.guardband;
172 }
173 
174 static bool
175 is_cmrr_frac_required(struct intel_crtc_state *crtc_state)
176 {
177 	struct intel_display *display = to_intel_display(crtc_state);
178 	int calculated_refresh_k, actual_refresh_k, pixel_clock_per_line;
179 	struct drm_display_mode *adjusted_mode = &crtc_state->hw.adjusted_mode;
180 
181 	/* Avoid CMRR for now till we have VRR with fixed timings working */
182 	if (!HAS_CMRR(display) || true)
183 		return false;
184 
185 	actual_refresh_k =
186 		drm_mode_vrefresh(adjusted_mode) * FIXED_POINT_PRECISION;
187 	pixel_clock_per_line =
188 		adjusted_mode->crtc_clock * 1000 / adjusted_mode->crtc_htotal;
189 	calculated_refresh_k =
190 		pixel_clock_per_line * FIXED_POINT_PRECISION / adjusted_mode->crtc_vtotal;
191 
192 	if ((actual_refresh_k - calculated_refresh_k) < CMRR_PRECISION_TOLERANCE)
193 		return false;
194 
195 	return true;
196 }
197 
198 static unsigned int
199 cmrr_get_vtotal(struct intel_crtc_state *crtc_state, bool video_mode_required)
200 {
201 	int multiplier_m = 1, multiplier_n = 1, vtotal, desired_refresh_rate;
202 	u64 adjusted_pixel_rate;
203 	struct drm_display_mode *adjusted_mode = &crtc_state->hw.adjusted_mode;
204 
205 	desired_refresh_rate = drm_mode_vrefresh(adjusted_mode);
206 
207 	if (video_mode_required) {
208 		multiplier_m = 1001;
209 		multiplier_n = 1000;
210 	}
211 
212 	crtc_state->cmrr.cmrr_n = mul_u32_u32(desired_refresh_rate * adjusted_mode->crtc_htotal,
213 					      multiplier_n);
214 	vtotal = DIV_ROUND_UP_ULL(mul_u32_u32(adjusted_mode->crtc_clock * 1000, multiplier_n),
215 				  crtc_state->cmrr.cmrr_n);
216 	adjusted_pixel_rate = mul_u32_u32(adjusted_mode->crtc_clock * 1000, multiplier_m);
217 	crtc_state->cmrr.cmrr_m = do_div(adjusted_pixel_rate, crtc_state->cmrr.cmrr_n);
218 
219 	return vtotal;
220 }
221 
222 static
223 void intel_vrr_compute_cmrr_timings(struct intel_crtc_state *crtc_state)
224 {
225 	/*
226 	 * TODO: Compute precise target refresh rate to determine
227 	 * if video_mode_required should be true. Currently set to
228 	 * false due to uncertainty about the precise target
229 	 * refresh Rate.
230 	 */
231 	crtc_state->vrr.vmax = cmrr_get_vtotal(crtc_state, false);
232 	crtc_state->vrr.vmin = crtc_state->vrr.vmax;
233 	crtc_state->vrr.flipline = crtc_state->vrr.vmin;
234 
235 	crtc_state->cmrr.enable = true;
236 	crtc_state->mode_flags |= I915_MODE_FLAG_VRR;
237 }
238 
239 static
240 void intel_vrr_compute_vrr_timings(struct intel_crtc_state *crtc_state,
241 				   int vmin, int vmax)
242 {
243 	crtc_state->vrr.vmax = vmax;
244 	crtc_state->vrr.vmin = vmin;
245 	crtc_state->vrr.flipline = crtc_state->vrr.vmin;
246 
247 	crtc_state->vrr.enable = true;
248 	crtc_state->mode_flags |= I915_MODE_FLAG_VRR;
249 }
250 
251 static
252 void intel_vrr_compute_fixed_rr_timings(struct intel_crtc_state *crtc_state)
253 {
254 	/* For fixed rr,  vmin = vmax = flipline */
255 	crtc_state->vrr.vmax = crtc_state->hw.adjusted_mode.crtc_vtotal;
256 	crtc_state->vrr.vmin = crtc_state->vrr.vmax;
257 	crtc_state->vrr.flipline = crtc_state->vrr.vmin;
258 }
259 
260 static int intel_vrr_hw_value(const struct intel_crtc_state *crtc_state,
261 			      int value)
262 {
263 	struct intel_display *display = to_intel_display(crtc_state);
264 
265 	/*
266 	 * On TGL vmin/vmax/flipline also need to be
267 	 * adjusted by the SCL to maintain correct vtotals.
268 	 */
269 	if (DISPLAY_VER(display) >= 13)
270 		return value;
271 	else
272 		return value - crtc_state->set_context_latency;
273 }
274 
275 static int intel_vrr_vblank_start(const struct intel_crtc_state *crtc_state,
276 				  int vmin_vmax)
277 {
278 	return intel_vrr_hw_value(crtc_state, vmin_vmax) - crtc_state->vrr.guardband;
279 }
280 
281 /*
282  * For fixed refresh rate mode Vmin, Vmax and Flipline all are set to
283  * Vtotal value.
284  */
285 static
286 int intel_vrr_fixed_rr_hw_vtotal(const struct intel_crtc_state *crtc_state)
287 {
288 	return intel_vrr_hw_value(crtc_state, crtc_state->hw.adjusted_mode.crtc_vtotal);
289 }
290 
291 static
292 int intel_vrr_fixed_rr_hw_vmax(const struct intel_crtc_state *crtc_state)
293 {
294 	return intel_vrr_fixed_rr_hw_vtotal(crtc_state);
295 }
296 
297 static
298 int intel_vrr_fixed_rr_hw_vmin(const struct intel_crtc_state *crtc_state)
299 {
300 	struct intel_display *display = to_intel_display(crtc_state);
301 
302 	return intel_vrr_fixed_rr_hw_vtotal(crtc_state) -
303 		intel_vrr_vmin_flipline_offset(display);
304 }
305 
306 static
307 int intel_vrr_fixed_rr_hw_flipline(const struct intel_crtc_state *crtc_state)
308 {
309 	return intel_vrr_fixed_rr_hw_vtotal(crtc_state);
310 }
311 
312 void intel_vrr_set_fixed_rr_timings(const struct intel_crtc_state *crtc_state)
313 {
314 	struct intel_display *display = to_intel_display(crtc_state);
315 	enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
316 
317 	if (!intel_vrr_possible(crtc_state))
318 		return;
319 
320 	intel_de_write(display, TRANS_VRR_VMIN(display, cpu_transcoder),
321 		       intel_vrr_fixed_rr_hw_vmin(crtc_state) - 1);
322 	intel_de_write(display, TRANS_VRR_VMAX(display, cpu_transcoder),
323 		       intel_vrr_fixed_rr_hw_vmax(crtc_state) - 1);
324 	intel_de_write(display, TRANS_VRR_FLIPLINE(display, cpu_transcoder),
325 		       intel_vrr_fixed_rr_hw_flipline(crtc_state) - 1);
326 }
327 
328 static
329 int intel_vrr_compute_vmin(struct intel_crtc_state *crtc_state)
330 {
331 	/*
332 	 * To make fixed rr and vrr work seamless the guardband/pipeline full
333 	 * should be set such that it satisfies both the fixed and variable
334 	 * timings.
335 	 * For this set the vmin as crtc_vtotal. With this we never need to
336 	 * change anything to do with the guardband.
337 	 */
338 	return crtc_state->hw.adjusted_mode.crtc_vtotal;
339 }
340 
341 static
342 int intel_vrr_compute_vmax(struct intel_connector *connector,
343 			   const struct drm_display_mode *adjusted_mode)
344 {
345 	const struct drm_display_info *info = &connector->base.display_info;
346 	int vmax;
347 
348 	vmax = adjusted_mode->crtc_clock * 1000 /
349 		(adjusted_mode->crtc_htotal * info->monitor_range.min_vfreq);
350 	vmax = max_t(int, vmax, adjusted_mode->crtc_vtotal);
351 
352 	return vmax;
353 }
354 
355 static bool intel_vrr_dc_balance_possible(const struct intel_crtc_state *crtc_state)
356 {
357 	struct intel_display *display = to_intel_display(crtc_state);
358 	struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
359 	enum pipe pipe = crtc->pipe;
360 
361 	/*
362 	 * FIXME: Currently Firmware supports DC Balancing on PIPE A
363 	 * and PIPE B. Account those limitation while computing DC
364 	 * Balance parameters.
365 	 */
366 	return (HAS_VRR_DC_BALANCE(display) &&
367 		((pipe == PIPE_A) || (pipe == PIPE_B)));
368 }
369 
370 static void
371 intel_vrr_dc_balance_compute_config(struct intel_crtc_state *crtc_state)
372 {
373 	int guardband_usec, adjustment_usec;
374 	struct drm_display_mode *adjusted_mode = &crtc_state->hw.adjusted_mode;
375 
376 	if (!intel_vrr_dc_balance_possible(crtc_state) || !crtc_state->vrr.enable)
377 		return;
378 
379 	crtc_state->vrr.dc_balance.vmax = crtc_state->vrr.vmax;
380 	crtc_state->vrr.dc_balance.vmin = crtc_state->vrr.vmin;
381 	crtc_state->vrr.dc_balance.max_increase =
382 		crtc_state->vrr.vmax - crtc_state->vrr.vmin;
383 	crtc_state->vrr.dc_balance.max_decrease =
384 		crtc_state->vrr.vmax - crtc_state->vrr.vmin;
385 	crtc_state->vrr.dc_balance.guardband =
386 		DIV_ROUND_UP(crtc_state->vrr.dc_balance.vmax *
387 			     DCB_CORRECTION_SENSITIVITY, 100);
388 	guardband_usec =
389 		intel_scanlines_to_usecs(adjusted_mode,
390 					 crtc_state->vrr.dc_balance.guardband);
391 	/*
392 	 *  The correction_aggressiveness/100 is the number of milliseconds to
393 	 *  adjust by when the balance is at twice the guardband.
394 	 *  guardband_slope = correction_aggressiveness / (guardband * 100)
395 	 */
396 	adjustment_usec = DCB_CORRECTION_AGGRESSIVENESS * 10;
397 	crtc_state->vrr.dc_balance.slope =
398 		DIV_ROUND_UP(adjustment_usec, guardband_usec);
399 	crtc_state->vrr.dc_balance.vblank_target =
400 		DIV_ROUND_UP((crtc_state->vrr.vmax - crtc_state->vrr.vmin) *
401 			     DCB_BLANK_TARGET, 100);
402 	crtc_state->vrr.dc_balance.enable = true;
403 }
404 
405 void
406 intel_vrr_compute_config(struct intel_crtc_state *crtc_state,
407 			 struct drm_connector_state *conn_state)
408 {
409 	struct intel_display *display = to_intel_display(crtc_state);
410 	struct intel_connector *connector =
411 		to_intel_connector(conn_state->connector);
412 	struct intel_dp *intel_dp = intel_attached_dp(connector);
413 	bool is_edp = intel_dp_is_edp(intel_dp);
414 	struct drm_display_mode *adjusted_mode = &crtc_state->hw.adjusted_mode;
415 	int vmin, vmax;
416 
417 	if (!HAS_VRR(display))
418 		return;
419 
420 	if (adjusted_mode->flags & DRM_MODE_FLAG_INTERLACE)
421 		return;
422 
423 	crtc_state->vrr.in_range =
424 		intel_vrr_is_in_range(connector, drm_mode_vrefresh(adjusted_mode));
425 
426 	/*
427 	 * Allow fixed refresh rate with VRR Timing Generator.
428 	 * For now set the vrr.in_range to 0, to allow fixed_rr but skip actual
429 	 * VRR and LRR.
430 	 * #TODO For actual VRR with joiner, we need to figure out how to
431 	 * correctly sequence transcoder level stuff vs. pipe level stuff
432 	 * in the commit.
433 	 */
434 	if (crtc_state->joiner_pipes)
435 		crtc_state->vrr.in_range = false;
436 
437 	vmin = intel_vrr_compute_vmin(crtc_state);
438 
439 	if (crtc_state->vrr.in_range) {
440 		if (HAS_LRR(display))
441 			crtc_state->update_lrr = true;
442 		vmax = intel_vrr_compute_vmax(connector, adjusted_mode);
443 	} else {
444 		vmax = vmin;
445 	}
446 
447 	if (crtc_state->uapi.vrr_enabled && vmin < vmax)
448 		intel_vrr_compute_vrr_timings(crtc_state, vmin, vmax);
449 	else if (is_cmrr_frac_required(crtc_state) && is_edp)
450 		intel_vrr_compute_cmrr_timings(crtc_state);
451 	else
452 		intel_vrr_compute_fixed_rr_timings(crtc_state);
453 
454 	if (HAS_AS_SDP(display)) {
455 		crtc_state->vrr.vsync_start =
456 			(crtc_state->hw.adjusted_mode.crtc_vtotal -
457 			 crtc_state->hw.adjusted_mode.crtc_vsync_start);
458 		crtc_state->vrr.vsync_end =
459 			(crtc_state->hw.adjusted_mode.crtc_vtotal -
460 			 crtc_state->hw.adjusted_mode.crtc_vsync_end);
461 	}
462 
463 	intel_vrr_dc_balance_compute_config(crtc_state);
464 }
465 
466 static int
467 intel_vrr_max_hw_guardband(const struct intel_crtc_state *crtc_state)
468 {
469 	struct intel_display *display = to_intel_display(crtc_state);
470 	int max_pipeline_full = REG_FIELD_MAX(VRR_CTL_PIPELINE_FULL_MASK);
471 
472 	if (DISPLAY_VER(display) >= 13)
473 		return REG_FIELD_MAX(XELPD_VRR_CTL_VRR_GUARDBAND_MASK);
474 	else
475 		return intel_vrr_pipeline_full_to_guardband(crtc_state,
476 							    max_pipeline_full);
477 }
478 
479 static int
480 intel_vrr_max_vblank_guardband(const struct intel_crtc_state *crtc_state)
481 {
482 	struct intel_display *display = to_intel_display(crtc_state);
483 	const struct drm_display_mode *adjusted_mode = &crtc_state->hw.adjusted_mode;
484 
485 	return crtc_state->vrr.vmin -
486 	       adjusted_mode->crtc_vdisplay -
487 	       crtc_state->set_context_latency -
488 	       intel_vrr_extra_vblank_delay(display);
489 }
490 
491 static int
492 intel_vrr_max_guardband(struct intel_crtc_state *crtc_state)
493 {
494 	return min(intel_vrr_max_hw_guardband(crtc_state),
495 		   intel_vrr_max_vblank_guardband(crtc_state));
496 }
497 
498 static
499 int intel_vrr_compute_optimized_guardband(struct intel_crtc_state *crtc_state)
500 {
501 	struct intel_display *display = to_intel_display(crtc_state);
502 	struct skl_prefill_ctx prefill_ctx;
503 	int prefill_latency_us;
504 	int guardband = 0;
505 
506 	skl_prefill_init_worst(&prefill_ctx, crtc_state);
507 
508 	/*
509 	 * The SoC power controller runs SAGV mutually exclusive with package C states,
510 	 * so the max of package C and SAGV latencies is used to compute the min prefill guardband.
511 	 * PM delay = max(sagv_latency, pkgc_max_latency (highest enabled wm level 1 and up))
512 	 */
513 	prefill_latency_us = max(display->sagv.block_time_us,
514 				 skl_watermark_max_latency(display, 1));
515 
516 	guardband = skl_prefill_min_guardband(&prefill_ctx,
517 					      crtc_state,
518 					      prefill_latency_us);
519 
520 	if (intel_crtc_has_dp_encoder(crtc_state)) {
521 		guardband = max(guardband, intel_psr_min_guardband(crtc_state));
522 		guardband = max(guardband, intel_dp_sdp_min_guardband(crtc_state, true));
523 	}
524 
525 	return guardband;
526 }
527 
528 static bool intel_vrr_use_optimized_guardband(const struct intel_crtc_state *crtc_state)
529 {
530 	/*
531 	 * #TODO: Enable optimized guardband for HDMI
532 	 * For HDMI lot of infoframes are transmitted a line or two after vsync.
533 	 * Since with optimized guardband the double bufferring point is at delayed vblank,
534 	 * we need to ensure that vsync happens after delayed vblank for the HDMI case.
535 	 */
536 	if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI))
537 		return false;
538 
539 	return true;
540 }
541 
542 void intel_vrr_compute_guardband(struct intel_crtc_state *crtc_state)
543 {
544 	struct intel_display *display = to_intel_display(crtc_state);
545 	struct drm_display_mode *adjusted_mode = &crtc_state->hw.adjusted_mode;
546 	struct drm_display_mode *pipe_mode = &crtc_state->hw.pipe_mode;
547 	int guardband;
548 
549 	if (!intel_vrr_possible(crtc_state))
550 		return;
551 
552 	if (intel_vrr_use_optimized_guardband(crtc_state))
553 		guardband = intel_vrr_compute_optimized_guardband(crtc_state);
554 	else
555 		guardband = crtc_state->vrr.vmin - adjusted_mode->crtc_vdisplay;
556 
557 	crtc_state->vrr.guardband = min(guardband, intel_vrr_max_guardband(crtc_state));
558 
559 	if (intel_vrr_always_use_vrr_tg(display)) {
560 		adjusted_mode->crtc_vblank_start  =
561 			adjusted_mode->crtc_vtotal - crtc_state->vrr.guardband;
562 		/*
563 		 * pipe_mode has already been derived from the
564 		 * original adjusted_mode, keep the two in sync.
565 		 */
566 		pipe_mode->crtc_vblank_start =
567 			adjusted_mode->crtc_vblank_start;
568 	}
569 
570 	if (DISPLAY_VER(display) < 13)
571 		crtc_state->vrr.pipeline_full =
572 			intel_vrr_guardband_to_pipeline_full(crtc_state,
573 							     crtc_state->vrr.guardband);
574 }
575 
576 static u32 trans_vrr_ctl(const struct intel_crtc_state *crtc_state)
577 {
578 	struct intel_display *display = to_intel_display(crtc_state);
579 
580 	if (DISPLAY_VER(display) >= 14)
581 		return VRR_CTL_FLIP_LINE_EN |
582 			XELPD_VRR_CTL_VRR_GUARDBAND(crtc_state->vrr.guardband);
583 	else if (DISPLAY_VER(display) >= 13)
584 		return VRR_CTL_IGN_MAX_SHIFT | VRR_CTL_FLIP_LINE_EN |
585 			XELPD_VRR_CTL_VRR_GUARDBAND(crtc_state->vrr.guardband);
586 	else
587 		return VRR_CTL_IGN_MAX_SHIFT | VRR_CTL_FLIP_LINE_EN |
588 			VRR_CTL_PIPELINE_FULL(crtc_state->vrr.pipeline_full) |
589 			VRR_CTL_PIPELINE_FULL_OVERRIDE;
590 }
591 
592 void intel_vrr_set_transcoder_timings(const struct intel_crtc_state *crtc_state)
593 {
594 	struct intel_display *display = to_intel_display(crtc_state);
595 	enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
596 
597 	if (!HAS_VRR(display))
598 		return;
599 
600 	/*
601 	 * This bit seems to have two meanings depending on the platform:
602 	 * TGL: generate VRR "safe window" for DSB vblank waits
603 	 * ADL/DG2: make TRANS_SET_CONTEXT_LATENCY effective with VRR
604 	 */
605 	if (IS_DISPLAY_VER(display, 12, 13))
606 		intel_de_rmw(display, CHICKEN_TRANS(display, cpu_transcoder),
607 			     0, PIPE_VBLANK_WITH_DELAY);
608 
609 	if (!intel_vrr_possible(crtc_state)) {
610 		intel_de_write(display,
611 			       TRANS_VRR_CTL(display, cpu_transcoder), 0);
612 		return;
613 	}
614 
615 	if (crtc_state->cmrr.enable) {
616 		intel_de_write(display, TRANS_CMRR_M_HI(display, cpu_transcoder),
617 			       upper_32_bits(crtc_state->cmrr.cmrr_m));
618 		intel_de_write(display, TRANS_CMRR_M_LO(display, cpu_transcoder),
619 			       lower_32_bits(crtc_state->cmrr.cmrr_m));
620 		intel_de_write(display, TRANS_CMRR_N_HI(display, cpu_transcoder),
621 			       upper_32_bits(crtc_state->cmrr.cmrr_n));
622 		intel_de_write(display, TRANS_CMRR_N_LO(display, cpu_transcoder),
623 			       lower_32_bits(crtc_state->cmrr.cmrr_n));
624 	}
625 
626 	intel_vrr_set_fixed_rr_timings(crtc_state);
627 
628 	if (!intel_vrr_always_use_vrr_tg(display))
629 		intel_de_write(display, TRANS_VRR_CTL(display, cpu_transcoder),
630 			       trans_vrr_ctl(crtc_state));
631 
632 	if (HAS_AS_SDP(display))
633 		intel_de_write(display,
634 			       TRANS_VRR_VSYNC(display, cpu_transcoder),
635 			       VRR_VSYNC_END(crtc_state->vrr.vsync_end) |
636 			       VRR_VSYNC_START(crtc_state->vrr.vsync_start));
637 
638 	/*
639 	 * For BMG and LNL+ onwards the EMP_AS_SDP_TL is used for programming
640 	 * double buffering point and transmission line for VRR packets for
641 	 * HDMI2.1/DP/eDP/DP->HDMI2.1 PCON.
642 	 * Since currently we support VRR only for DP/eDP, so this is programmed
643 	 * to for Adaptive Sync SDP to Vsync start.
644 	 */
645 	if (DISPLAY_VERx100(display) == 1401 || DISPLAY_VER(display) >= 20)
646 		intel_de_write(display,
647 			       EMP_AS_SDP_TL(display, cpu_transcoder),
648 			       EMP_AS_SDP_DB_TL(crtc_state->vrr.vsync_start));
649 }
650 
651 void
652 intel_vrr_dcb_increment_flip_count(struct intel_crtc_state *crtc_state,
653 				   struct intel_crtc *crtc)
654 {
655 	struct intel_display *display = to_intel_display(crtc_state);
656 	enum pipe pipe = crtc->pipe;
657 
658 	if (!crtc_state->vrr.dc_balance.enable)
659 		return;
660 
661 	intel_de_write(display, PIPEDMC_DCB_FLIP_COUNT(pipe),
662 		       ++crtc->dc_balance.flip_count);
663 }
664 
665 void
666 intel_vrr_dcb_reset(const struct intel_crtc_state *old_crtc_state,
667 		    struct intel_crtc *crtc)
668 {
669 	struct intel_display *display = to_intel_display(old_crtc_state);
670 	enum pipe pipe = crtc->pipe;
671 
672 	if (!old_crtc_state->vrr.dc_balance.enable)
673 		return;
674 
675 	intel_de_write(display, PIPEDMC_DCB_FLIP_COUNT(pipe), 0);
676 	intel_de_write(display, PIPEDMC_DCB_BALANCE_RESET(pipe), 0);
677 }
678 
679 void intel_vrr_send_push(struct intel_dsb *dsb,
680 			 const struct intel_crtc_state *crtc_state)
681 {
682 	struct intel_display *display = to_intel_display(crtc_state);
683 	enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
684 
685 	if (!crtc_state->vrr.enable)
686 		return;
687 
688 	if (dsb)
689 		intel_dsb_nonpost_start(dsb);
690 
691 	intel_de_write_dsb(display, dsb,
692 			   TRANS_PUSH(display, cpu_transcoder),
693 			   TRANS_PUSH_EN | TRANS_PUSH_SEND);
694 
695 	if (dsb)
696 		intel_dsb_nonpost_end(dsb);
697 }
698 
699 void intel_vrr_check_push_sent(struct intel_dsb *dsb,
700 			       const struct intel_crtc_state *crtc_state)
701 {
702 	struct intel_display *display = to_intel_display(crtc_state);
703 	struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
704 	enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
705 
706 	if (!crtc_state->vrr.enable)
707 		return;
708 
709 	/*
710 	 * Make sure the push send bit has cleared. This should
711 	 * already be the case as long as the caller makes sure
712 	 * this is called after the delayed vblank has occurred.
713 	 */
714 	if (dsb) {
715 		int wait_us, count;
716 
717 		wait_us = 2;
718 		count = 1;
719 
720 		/*
721 		 * If the bit hasn't cleared the DSB will
722 		 * raise the poll error interrupt.
723 		 */
724 		intel_dsb_poll(dsb, TRANS_PUSH(display, cpu_transcoder),
725 			       TRANS_PUSH_SEND, 0, wait_us, count);
726 	} else {
727 		if (intel_vrr_is_push_sent(crtc_state))
728 			drm_err(display->drm, "[CRTC:%d:%s] VRR push send still pending\n",
729 				crtc->base.base.id, crtc->base.name);
730 	}
731 }
732 
733 bool intel_vrr_is_push_sent(const struct intel_crtc_state *crtc_state)
734 {
735 	struct intel_display *display = to_intel_display(crtc_state);
736 	enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
737 
738 	if (!crtc_state->vrr.enable)
739 		return false;
740 
741 	return intel_de_read(display, TRANS_PUSH(display, cpu_transcoder)) & TRANS_PUSH_SEND;
742 }
743 
744 bool intel_vrr_always_use_vrr_tg(struct intel_display *display)
745 {
746 	if (!HAS_VRR(display))
747 		return false;
748 
749 	if (DISPLAY_VER(display) >= 30)
750 		return true;
751 
752 	return false;
753 }
754 
755 static int intel_vrr_hw_vmin(const struct intel_crtc_state *crtc_state)
756 {
757 	struct intel_display *display = to_intel_display(crtc_state);
758 
759 	return intel_vrr_hw_value(crtc_state, crtc_state->vrr.vmin) -
760 		intel_vrr_vmin_flipline_offset(display);
761 }
762 
763 static int intel_vrr_hw_vmax(const struct intel_crtc_state *crtc_state)
764 {
765 	return intel_vrr_hw_value(crtc_state, crtc_state->vrr.vmax);
766 }
767 
768 static int intel_vrr_hw_flipline(const struct intel_crtc_state *crtc_state)
769 {
770 	return intel_vrr_hw_value(crtc_state, crtc_state->vrr.flipline);
771 }
772 
773 static void intel_vrr_set_vrr_timings(const struct intel_crtc_state *crtc_state)
774 {
775 	struct intel_display *display = to_intel_display(crtc_state);
776 	enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
777 
778 	intel_de_write(display, TRANS_VRR_VMIN(display, cpu_transcoder),
779 		       intel_vrr_hw_vmin(crtc_state) - 1);
780 	intel_de_write(display, TRANS_VRR_VMAX(display, cpu_transcoder),
781 		       intel_vrr_hw_vmax(crtc_state) - 1);
782 	intel_de_write(display, TRANS_VRR_FLIPLINE(display, cpu_transcoder),
783 		       intel_vrr_hw_flipline(crtc_state) - 1);
784 }
785 
786 static void
787 intel_vrr_enable_dc_balancing(const struct intel_crtc_state *crtc_state)
788 {
789 	struct intel_display *display = to_intel_display(crtc_state);
790 	enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
791 	struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
792 	enum pipe pipe = crtc->pipe;
793 	u32 vrr_ctl = intel_de_read(display, TRANS_VRR_CTL(display, cpu_transcoder));
794 
795 	if (!crtc_state->vrr.dc_balance.enable)
796 		return;
797 
798 	intel_de_write(display, TRANS_VRR_DCB_ADJ_VMAX_CFG(cpu_transcoder),
799 		       VRR_DCB_ADJ_VMAX(crtc_state->vrr.vmax - 1));
800 	intel_de_write(display, TRANS_VRR_DCB_ADJ_VMAX_CFG_LIVE(cpu_transcoder),
801 		       VRR_DCB_ADJ_VMAX(crtc_state->vrr.vmax - 1));
802 	intel_de_write(display, TRANS_VRR_DCB_VMAX(cpu_transcoder),
803 		       VRR_DCB_VMAX(crtc_state->vrr.vmax - 1));
804 	intel_de_write(display, TRANS_VRR_DCB_VMAX_LIVE(cpu_transcoder),
805 		       VRR_DCB_VMAX(crtc_state->vrr.vmax - 1));
806 	intel_de_write(display, TRANS_VRR_DCB_FLIPLINE(cpu_transcoder),
807 		       VRR_DCB_FLIPLINE(crtc_state->vrr.flipline - 1));
808 	intel_de_write(display, TRANS_VRR_DCB_FLIPLINE_LIVE(cpu_transcoder),
809 		       VRR_DCB_FLIPLINE(crtc_state->vrr.flipline - 1));
810 	intel_de_write(display, TRANS_VRR_DCB_ADJ_FLIPLINE_CFG_LIVE(cpu_transcoder),
811 		       VRR_DCB_ADJ_FLIPLINE(crtc_state->vrr.flipline - 1));
812 	intel_de_write(display, TRANS_VRR_DCB_ADJ_FLIPLINE_CFG(cpu_transcoder),
813 		       VRR_DCB_ADJ_FLIPLINE(crtc_state->vrr.flipline - 1));
814 	intel_de_write(display, PIPEDMC_DCB_VMIN(pipe),
815 		       crtc_state->vrr.dc_balance.vmin - 1);
816 	intel_de_write(display, PIPEDMC_DCB_VMAX(pipe),
817 		       crtc_state->vrr.dc_balance.vmax - 1);
818 	intel_de_write(display, PIPEDMC_DCB_MAX_INCREASE(pipe),
819 		       crtc_state->vrr.dc_balance.max_increase);
820 	intel_de_write(display, PIPEDMC_DCB_MAX_DECREASE(pipe),
821 		       crtc_state->vrr.dc_balance.max_decrease);
822 	intel_de_write(display, PIPEDMC_DCB_GUARDBAND(pipe),
823 		       crtc_state->vrr.dc_balance.guardband);
824 	intel_de_write(display, PIPEDMC_DCB_SLOPE(pipe),
825 		       crtc_state->vrr.dc_balance.slope);
826 	intel_de_write(display, PIPEDMC_DCB_VBLANK(pipe),
827 		       crtc_state->vrr.dc_balance.vblank_target);
828 	intel_dmc_configure_dc_balance_event(display, pipe, true);
829 	intel_de_write(display, TRANS_ADAPTIVE_SYNC_DCB_CTL(cpu_transcoder),
830 		       ADAPTIVE_SYNC_COUNTER_EN);
831 	intel_pipedmc_dcb_enable(NULL, crtc);
832 
833 	vrr_ctl |= VRR_CTL_DCB_ADJ_ENABLE;
834 	intel_de_write(display, TRANS_VRR_CTL(display, cpu_transcoder), vrr_ctl);
835 }
836 
837 static void
838 intel_vrr_disable_dc_balancing(const struct intel_crtc_state *old_crtc_state)
839 {
840 	struct intel_display *display = to_intel_display(old_crtc_state);
841 	enum transcoder cpu_transcoder = old_crtc_state->cpu_transcoder;
842 	struct intel_crtc *crtc = to_intel_crtc(old_crtc_state->uapi.crtc);
843 	enum pipe pipe = crtc->pipe;
844 	u32 vrr_ctl = intel_de_read(display, TRANS_VRR_CTL(display, cpu_transcoder));
845 
846 	if (!old_crtc_state->vrr.dc_balance.enable)
847 		return;
848 
849 	intel_pipedmc_dcb_disable(NULL, crtc);
850 	intel_dmc_configure_dc_balance_event(display, pipe, false);
851 	intel_de_write(display, TRANS_ADAPTIVE_SYNC_DCB_CTL(cpu_transcoder), 0);
852 	intel_de_write(display, PIPEDMC_DCB_VMIN(pipe), 0);
853 	intel_de_write(display, PIPEDMC_DCB_VMAX(pipe), 0);
854 	intel_de_write(display, PIPEDMC_DCB_MAX_INCREASE(pipe), 0);
855 	intel_de_write(display, PIPEDMC_DCB_MAX_DECREASE(pipe), 0);
856 	intel_de_write(display, PIPEDMC_DCB_GUARDBAND(pipe), 0);
857 	intel_de_write(display, PIPEDMC_DCB_SLOPE(pipe), 0);
858 	intel_de_write(display, PIPEDMC_DCB_VBLANK(pipe), 0);
859 	intel_de_write(display, TRANS_VRR_DCB_ADJ_VMAX_CFG_LIVE(cpu_transcoder), 0);
860 	intel_de_write(display, TRANS_VRR_DCB_ADJ_FLIPLINE_CFG_LIVE(cpu_transcoder), 0);
861 	intel_de_write(display, TRANS_VRR_DCB_VMAX_LIVE(cpu_transcoder), 0);
862 	intel_de_write(display, TRANS_VRR_DCB_FLIPLINE_LIVE(cpu_transcoder), 0);
863 	intel_de_write(display, TRANS_VRR_DCB_ADJ_VMAX_CFG(cpu_transcoder), 0);
864 	intel_de_write(display, TRANS_VRR_DCB_ADJ_FLIPLINE_CFG(cpu_transcoder), 0);
865 	intel_de_write(display, TRANS_VRR_DCB_VMAX(cpu_transcoder), 0);
866 	intel_de_write(display, TRANS_VRR_DCB_FLIPLINE(cpu_transcoder), 0);
867 
868 	vrr_ctl &= ~VRR_CTL_DCB_ADJ_ENABLE;
869 	intel_de_write(display, TRANS_VRR_CTL(display, cpu_transcoder), vrr_ctl);
870 }
871 
872 static void intel_vrr_tg_enable(const struct intel_crtc_state *crtc_state,
873 				bool cmrr_enable)
874 {
875 	struct intel_display *display = to_intel_display(crtc_state);
876 	enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
877 	u32 vrr_ctl;
878 
879 	intel_de_write(display, TRANS_PUSH(display, cpu_transcoder), TRANS_PUSH_EN);
880 
881 	vrr_ctl = VRR_CTL_VRR_ENABLE | trans_vrr_ctl(crtc_state);
882 
883 	/*
884 	 * FIXME this might be broken as bspec seems to imply that
885 	 * even VRR_CTL_CMRR_ENABLE is armed by TRANS_CMRR_N_HI
886 	 * when enabling CMRR (but not when disabling CMRR?).
887 	 */
888 	if (cmrr_enable)
889 		vrr_ctl |= VRR_CTL_CMRR_ENABLE;
890 
891 	intel_de_write(display, TRANS_VRR_CTL(display, cpu_transcoder), vrr_ctl);
892 }
893 
894 static void intel_vrr_tg_disable(const struct intel_crtc_state *old_crtc_state)
895 {
896 	struct intel_display *display = to_intel_display(old_crtc_state);
897 	enum transcoder cpu_transcoder = old_crtc_state->cpu_transcoder;
898 
899 	intel_de_write(display, TRANS_VRR_CTL(display, cpu_transcoder),
900 		       trans_vrr_ctl(old_crtc_state));
901 
902 	if (intel_de_wait_for_clear_ms(display,
903 				       TRANS_VRR_STATUS(display, cpu_transcoder),
904 				       VRR_STATUS_VRR_EN_LIVE, 1000))
905 		drm_err(display->drm, "Timed out waiting for VRR live status to clear\n");
906 
907 	intel_de_write(display, TRANS_PUSH(display, cpu_transcoder), 0);
908 }
909 
910 void intel_vrr_enable(const struct intel_crtc_state *crtc_state)
911 {
912 	struct intel_display *display = to_intel_display(crtc_state);
913 
914 	if (!crtc_state->vrr.enable)
915 		return;
916 
917 	intel_vrr_set_vrr_timings(crtc_state);
918 	intel_vrr_enable_dc_balancing(crtc_state);
919 
920 	if (!intel_vrr_always_use_vrr_tg(display))
921 		intel_vrr_tg_enable(crtc_state, crtc_state->cmrr.enable);
922 }
923 
924 void intel_vrr_disable(const struct intel_crtc_state *old_crtc_state)
925 {
926 	struct intel_display *display = to_intel_display(old_crtc_state);
927 
928 	if (!old_crtc_state->vrr.enable)
929 		return;
930 
931 	if (!intel_vrr_always_use_vrr_tg(display))
932 		intel_vrr_tg_disable(old_crtc_state);
933 
934 	intel_vrr_disable_dc_balancing(old_crtc_state);
935 	intel_vrr_set_fixed_rr_timings(old_crtc_state);
936 }
937 
938 void intel_vrr_transcoder_enable(const struct intel_crtc_state *crtc_state)
939 {
940 	struct intel_display *display = to_intel_display(crtc_state);
941 
942 	if (!intel_vrr_possible(crtc_state))
943 		return;
944 
945 	if (intel_vrr_always_use_vrr_tg(display))
946 		intel_vrr_tg_enable(crtc_state, false);
947 }
948 
949 void intel_vrr_transcoder_disable(const struct intel_crtc_state *old_crtc_state)
950 {
951 	struct intel_display *display = to_intel_display(old_crtc_state);
952 
953 	if (!intel_vrr_possible(old_crtc_state))
954 		return;
955 
956 	if (intel_vrr_always_use_vrr_tg(display))
957 		intel_vrr_tg_disable(old_crtc_state);
958 }
959 
960 bool intel_vrr_is_fixed_rr(const struct intel_crtc_state *crtc_state)
961 {
962 	return crtc_state->vrr.flipline &&
963 	       crtc_state->vrr.flipline == crtc_state->vrr.vmax &&
964 	       crtc_state->vrr.flipline == crtc_state->vrr.vmin;
965 }
966 
967 static
968 void intel_vrr_get_dc_balance_config(struct intel_crtc_state *crtc_state)
969 {
970 	u32 reg_val;
971 	struct intel_display *display = to_intel_display(crtc_state);
972 	struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
973 	enum pipe pipe = crtc->pipe;
974 
975 	if (!intel_vrr_dc_balance_possible(crtc_state))
976 		return;
977 
978 	reg_val = intel_de_read(display, PIPEDMC_DCB_VMIN(pipe));
979 	crtc_state->vrr.dc_balance.vmin = reg_val ? reg_val + 1 : 0;
980 
981 	reg_val = intel_de_read(display, PIPEDMC_DCB_VMAX(pipe));
982 	crtc_state->vrr.dc_balance.vmax = reg_val ? reg_val + 1 : 0;
983 
984 	crtc_state->vrr.dc_balance.guardband =
985 		intel_de_read(display, PIPEDMC_DCB_GUARDBAND(pipe));
986 	crtc_state->vrr.dc_balance.max_increase =
987 		intel_de_read(display, PIPEDMC_DCB_MAX_INCREASE(pipe));
988 	crtc_state->vrr.dc_balance.max_decrease =
989 		intel_de_read(display, PIPEDMC_DCB_MAX_DECREASE(pipe));
990 	crtc_state->vrr.dc_balance.slope =
991 		intel_de_read(display, PIPEDMC_DCB_SLOPE(pipe));
992 	crtc_state->vrr.dc_balance.vblank_target =
993 		intel_de_read(display, PIPEDMC_DCB_VBLANK(pipe));
994 }
995 
996 void intel_vrr_get_config(struct intel_crtc_state *crtc_state)
997 {
998 	struct intel_display *display = to_intel_display(crtc_state);
999 	enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
1000 	u32 trans_vrr_ctl, trans_vrr_vsync;
1001 	bool vrr_enable;
1002 
1003 	trans_vrr_ctl = intel_de_read(display,
1004 				      TRANS_VRR_CTL(display, cpu_transcoder));
1005 
1006 	if (HAS_CMRR(display))
1007 		crtc_state->cmrr.enable = (trans_vrr_ctl & VRR_CTL_CMRR_ENABLE);
1008 
1009 	if (crtc_state->cmrr.enable) {
1010 		crtc_state->cmrr.cmrr_n =
1011 			intel_de_read64_2x32(display, TRANS_CMRR_N_LO(display, cpu_transcoder),
1012 					     TRANS_CMRR_N_HI(display, cpu_transcoder));
1013 		crtc_state->cmrr.cmrr_m =
1014 			intel_de_read64_2x32(display, TRANS_CMRR_M_LO(display, cpu_transcoder),
1015 					     TRANS_CMRR_M_HI(display, cpu_transcoder));
1016 	}
1017 
1018 	if (DISPLAY_VER(display) >= 13) {
1019 		crtc_state->vrr.guardband =
1020 			REG_FIELD_GET(XELPD_VRR_CTL_VRR_GUARDBAND_MASK, trans_vrr_ctl);
1021 	} else {
1022 		if (trans_vrr_ctl & VRR_CTL_PIPELINE_FULL_OVERRIDE) {
1023 			crtc_state->vrr.pipeline_full =
1024 				REG_FIELD_GET(VRR_CTL_PIPELINE_FULL_MASK, trans_vrr_ctl);
1025 
1026 			crtc_state->vrr.guardband =
1027 				intel_vrr_pipeline_full_to_guardband(crtc_state,
1028 								     crtc_state->vrr.pipeline_full);
1029 		}
1030 	}
1031 
1032 	if (trans_vrr_ctl & VRR_CTL_FLIP_LINE_EN) {
1033 		crtc_state->vrr.flipline = intel_de_read(display,
1034 							 TRANS_VRR_FLIPLINE(display, cpu_transcoder)) + 1;
1035 		crtc_state->vrr.vmax = intel_de_read(display,
1036 						     TRANS_VRR_VMAX(display, cpu_transcoder)) + 1;
1037 		crtc_state->vrr.vmin = intel_de_read(display,
1038 						     TRANS_VRR_VMIN(display, cpu_transcoder)) + 1;
1039 
1040 		if (DISPLAY_VER(display) < 13) {
1041 			/* undo what intel_vrr_hw_value() does when writing the values */
1042 			crtc_state->vrr.flipline += crtc_state->set_context_latency;
1043 			crtc_state->vrr.vmax += crtc_state->set_context_latency;
1044 			crtc_state->vrr.vmin += crtc_state->set_context_latency;
1045 
1046 			crtc_state->vrr.vmin += intel_vrr_vmin_flipline_offset(display);
1047 		}
1048 
1049 		/*
1050 		 * For platforms that always use VRR Timing Generator, the VTOTAL.Vtotal
1051 		 * bits are not filled. Since for these platforms TRAN_VMIN is always
1052 		 * filled with crtc_vtotal, use TRAN_VRR_VMIN to get the vtotal for
1053 		 * adjusted_mode.
1054 		 */
1055 		if (intel_vrr_always_use_vrr_tg(display))
1056 			crtc_state->hw.adjusted_mode.crtc_vtotal =
1057 				intel_vrr_vmin_vtotal(crtc_state);
1058 
1059 		if (HAS_AS_SDP(display)) {
1060 			trans_vrr_vsync =
1061 				intel_de_read(display,
1062 					      TRANS_VRR_VSYNC(display, cpu_transcoder));
1063 			crtc_state->vrr.vsync_start =
1064 				REG_FIELD_GET(VRR_VSYNC_START_MASK, trans_vrr_vsync);
1065 			crtc_state->vrr.vsync_end =
1066 				REG_FIELD_GET(VRR_VSYNC_END_MASK, trans_vrr_vsync);
1067 		}
1068 	}
1069 
1070 	vrr_enable = trans_vrr_ctl & VRR_CTL_VRR_ENABLE;
1071 
1072 	if (intel_vrr_always_use_vrr_tg(display))
1073 		crtc_state->vrr.enable = vrr_enable && !intel_vrr_is_fixed_rr(crtc_state);
1074 	else
1075 		crtc_state->vrr.enable = vrr_enable;
1076 
1077 	intel_vrr_get_dc_balance_config(crtc_state);
1078 
1079 	/*
1080 	 * #TODO: For Both VRR and CMRR the flag I915_MODE_FLAG_VRR is set for mode_flags.
1081 	 * Since CMRR is currently disabled, set this flag for VRR for now.
1082 	 * Need to keep this in mind while re-enabling CMRR.
1083 	 */
1084 	if (crtc_state->vrr.enable)
1085 		crtc_state->mode_flags |= I915_MODE_FLAG_VRR;
1086 
1087 	/*
1088 	 * For platforms that always use the VRR timing generator, we overwrite
1089 	 * crtc_vblank_start with vtotal - guardband to reflect the delayed
1090 	 * vblank start. This works for both default and optimized guardband values.
1091 	 * On other platforms, we keep the original value from
1092 	 * intel_get_transcoder_timings() and apply adjustments only in VRR-specific
1093 	 * paths as needed.
1094 	 */
1095 	if (intel_vrr_always_use_vrr_tg(display))
1096 		crtc_state->hw.adjusted_mode.crtc_vblank_start =
1097 			crtc_state->hw.adjusted_mode.crtc_vtotal -
1098 			crtc_state->vrr.guardband;
1099 }
1100 
1101 int intel_vrr_safe_window_start(const struct intel_crtc_state *crtc_state)
1102 {
1103 	struct intel_display *display = to_intel_display(crtc_state);
1104 
1105 	if (DISPLAY_VER(display) >= 30)
1106 		return crtc_state->hw.adjusted_mode.crtc_vdisplay -
1107 		       crtc_state->set_context_latency;
1108 	else
1109 		return crtc_state->hw.adjusted_mode.crtc_vdisplay;
1110 }
1111 
1112 static int
1113 intel_vrr_dcb_vmin_vblank_start(const struct intel_crtc_state *crtc_state)
1114 {
1115 	return (intel_vrr_dcb_vmin_vblank_start_next(crtc_state) < 0) ?
1116 		intel_vrr_dcb_vmin_vblank_start_final(crtc_state) :
1117 		intel_vrr_dcb_vmin_vblank_start_next(crtc_state);
1118 }
1119 
1120 int intel_vrr_vmin_safe_window_end(const struct intel_crtc_state *crtc_state)
1121 {
1122 	int vmin_vblank_start = crtc_state->vrr.dc_balance.enable ?
1123 			intel_vrr_dcb_vmin_vblank_start(crtc_state) :
1124 			intel_vrr_vmin_vblank_start(crtc_state);
1125 
1126 	return vmin_vblank_start - crtc_state->set_context_latency;
1127 }
1128 
1129 int intel_vrr_dcb_vmin_vblank_start_next(const struct intel_crtc_state *crtc_state)
1130 {
1131 	struct intel_display *display = to_intel_display(crtc_state);
1132 	enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
1133 	u32 tmp = 0;
1134 
1135 	tmp = intel_de_read(display, TRANS_VRR_DCB_ADJ_FLIPLINE_CFG_LIVE(cpu_transcoder));
1136 
1137 	if (REG_FIELD_GET(VRR_DCB_ADJ_FLIPLINE_CNT_MASK, tmp) == 0)
1138 		return -EINVAL;
1139 
1140 	return intel_vrr_vblank_start(crtc_state, VRR_DCB_ADJ_FLIPLINE(tmp) + 1);
1141 }
1142 
1143 int intel_vrr_dcb_vmax_vblank_start_next(const struct intel_crtc_state *crtc_state)
1144 {
1145 	struct intel_display *display = to_intel_display(crtc_state);
1146 	enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
1147 	u32 tmp = 0;
1148 
1149 	tmp = intel_de_read(display, TRANS_VRR_DCB_ADJ_VMAX_CFG_LIVE(cpu_transcoder));
1150 
1151 	if (REG_FIELD_GET(VRR_DCB_ADJ_VMAX_CNT_MASK, tmp) == 0)
1152 		return -EINVAL;
1153 
1154 	return intel_vrr_vblank_start(crtc_state, VRR_DCB_ADJ_VMAX(tmp) + 1);
1155 }
1156 
1157 int intel_vrr_dcb_vmin_vblank_start_final(const struct intel_crtc_state *crtc_state)
1158 {
1159 	struct intel_display *display = to_intel_display(crtc_state);
1160 	enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
1161 	u32 tmp = 0;
1162 
1163 	tmp = intel_de_read(display, TRANS_VRR_DCB_FLIPLINE_LIVE(cpu_transcoder));
1164 
1165 	return intel_vrr_vblank_start(crtc_state, VRR_DCB_FLIPLINE(tmp) + 1);
1166 }
1167 
1168 int intel_vrr_dcb_vmax_vblank_start_final(const struct intel_crtc_state *crtc_state)
1169 {
1170 	struct intel_display *display = to_intel_display(crtc_state);
1171 	enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
1172 	u32 tmp = 0;
1173 
1174 	tmp = intel_de_read(display, TRANS_VRR_DCB_VMAX_LIVE(cpu_transcoder));
1175 
1176 	return intel_vrr_vblank_start(crtc_state, VRR_DCB_VMAX(tmp) + 1);
1177 }
1178