xref: /linux/drivers/gpu/drm/i915/display/intel_vrr.c (revision 22c55fb9eb92395d999b8404d73e58540d11bdd8)
1 // SPDX-License-Identifier: MIT
2 /*
3  * Copyright © 2020 Intel Corporation
4  *
5  */
6 
7 #include <drm/drm_print.h>
8 
9 #include "intel_de.h"
10 #include "intel_display_regs.h"
11 #include "intel_display_types.h"
12 #include "intel_dp.h"
13 #include "intel_vrr.h"
14 #include "intel_vrr_regs.h"
15 
16 #define FIXED_POINT_PRECISION		100
17 #define CMRR_PRECISION_TOLERANCE	10
18 
19 bool intel_vrr_is_capable(struct intel_connector *connector)
20 {
21 	struct intel_display *display = to_intel_display(connector);
22 	const struct drm_display_info *info = &connector->base.display_info;
23 	struct intel_dp *intel_dp;
24 
25 	/*
26 	 * DP Sink is capable of VRR video timings if
27 	 * Ignore MSA bit is set in DPCD.
28 	 * EDID monitor range also should be atleast 10 for reasonable
29 	 * Adaptive Sync or Variable Refresh Rate end user experience.
30 	 */
31 	switch (connector->base.connector_type) {
32 	case DRM_MODE_CONNECTOR_eDP:
33 		if (!connector->panel.vbt.vrr)
34 			return false;
35 		fallthrough;
36 	case DRM_MODE_CONNECTOR_DisplayPort:
37 		if (connector->mst.dp)
38 			return false;
39 		intel_dp = intel_attached_dp(connector);
40 
41 		if (!drm_dp_sink_can_do_video_without_timing_msa(intel_dp->dpcd))
42 			return false;
43 
44 		break;
45 	default:
46 		return false;
47 	}
48 
49 	return HAS_VRR(display) &&
50 		info->monitor_range.max_vfreq - info->monitor_range.min_vfreq > 10;
51 }
52 
53 bool intel_vrr_is_in_range(struct intel_connector *connector, int vrefresh)
54 {
55 	const struct drm_display_info *info = &connector->base.display_info;
56 
57 	return intel_vrr_is_capable(connector) &&
58 		vrefresh >= info->monitor_range.min_vfreq &&
59 		vrefresh <= info->monitor_range.max_vfreq;
60 }
61 
62 bool intel_vrr_possible(const struct intel_crtc_state *crtc_state)
63 {
64 	return crtc_state->vrr.flipline;
65 }
66 
67 void
68 intel_vrr_check_modeset(struct intel_atomic_state *state)
69 {
70 	int i;
71 	struct intel_crtc_state *old_crtc_state, *new_crtc_state;
72 	struct intel_crtc *crtc;
73 
74 	for_each_oldnew_intel_crtc_in_state(state, crtc, old_crtc_state,
75 					    new_crtc_state, i) {
76 		if (new_crtc_state->uapi.vrr_enabled !=
77 		    old_crtc_state->uapi.vrr_enabled)
78 			new_crtc_state->uapi.mode_changed = true;
79 	}
80 }
81 
82 static int intel_vrr_real_vblank_delay(const struct intel_crtc_state *crtc_state)
83 {
84 	return crtc_state->hw.adjusted_mode.crtc_vblank_start -
85 		crtc_state->hw.adjusted_mode.crtc_vdisplay;
86 }
87 
88 static int intel_vrr_extra_vblank_delay(struct intel_display *display)
89 {
90 	/*
91 	 * On ICL/TGL VRR hardware inserts one extra scanline
92 	 * just after vactive, which pushes the vmin decision
93 	 * boundary ahead accordingly. We'll include the extra
94 	 * scanline in our vblank delay estimates to make sure
95 	 * that we never underestimate how long we have until
96 	 * the delayed vblank has passed.
97 	 */
98 	return DISPLAY_VER(display) < 13 ? 1 : 0;
99 }
100 
101 int intel_vrr_vblank_delay(const struct intel_crtc_state *crtc_state)
102 {
103 	struct intel_display *display = to_intel_display(crtc_state);
104 
105 	return intel_vrr_real_vblank_delay(crtc_state) +
106 		intel_vrr_extra_vblank_delay(display);
107 }
108 
109 static int intel_vrr_flipline_offset(struct intel_display *display)
110 {
111 	/* ICL/TGL hardware imposes flipline>=vmin+1 */
112 	return DISPLAY_VER(display) < 13 ? 1 : 0;
113 }
114 
115 static int intel_vrr_vmin_flipline(const struct intel_crtc_state *crtc_state)
116 {
117 	struct intel_display *display = to_intel_display(crtc_state);
118 
119 	return crtc_state->vrr.vmin + intel_vrr_flipline_offset(display);
120 }
121 
122 /*
123  * Without VRR registers get latched at:
124  *  vblank_start
125  *
126  * With VRR the earliest registers can get latched is:
127  *  intel_vrr_vmin_vblank_start(), which if we want to maintain
128  *  the correct min vtotal is >=vblank_start+1
129  *
130  * The latest point registers can get latched is the vmax decision boundary:
131  *  intel_vrr_vmax_vblank_start()
132  *
133  * Between those two points the vblank exit starts (and hence registers get
134  * latched) ASAP after a push is sent.
135  *
136  * framestart_delay is programmable 1-4.
137  */
138 static int intel_vrr_vblank_exit_length(const struct intel_crtc_state *crtc_state)
139 {
140 	struct intel_display *display = to_intel_display(crtc_state);
141 
142 	if (DISPLAY_VER(display) >= 13)
143 		return crtc_state->vrr.guardband;
144 	else
145 		/* hardware imposes one extra scanline somewhere */
146 		return crtc_state->vrr.pipeline_full + crtc_state->framestart_delay + 1;
147 }
148 
149 int intel_vrr_vmin_vtotal(const struct intel_crtc_state *crtc_state)
150 {
151 	struct intel_display *display = to_intel_display(crtc_state);
152 
153 	/* Min vblank actually determined by flipline */
154 	if (DISPLAY_VER(display) >= 13)
155 		return intel_vrr_vmin_flipline(crtc_state);
156 	else
157 		return intel_vrr_vmin_flipline(crtc_state) +
158 			intel_vrr_real_vblank_delay(crtc_state);
159 }
160 
161 int intel_vrr_vmax_vtotal(const struct intel_crtc_state *crtc_state)
162 {
163 	struct intel_display *display = to_intel_display(crtc_state);
164 
165 	if (DISPLAY_VER(display) >= 13)
166 		return crtc_state->vrr.vmax;
167 	else
168 		return crtc_state->vrr.vmax +
169 			intel_vrr_real_vblank_delay(crtc_state);
170 }
171 
172 int intel_vrr_vmin_vblank_start(const struct intel_crtc_state *crtc_state)
173 {
174 	return intel_vrr_vmin_vtotal(crtc_state) - intel_vrr_vblank_exit_length(crtc_state);
175 }
176 
177 int intel_vrr_vmax_vblank_start(const struct intel_crtc_state *crtc_state)
178 {
179 	return intel_vrr_vmax_vtotal(crtc_state) - intel_vrr_vblank_exit_length(crtc_state);
180 }
181 
182 static bool
183 is_cmrr_frac_required(struct intel_crtc_state *crtc_state)
184 {
185 	struct intel_display *display = to_intel_display(crtc_state);
186 	int calculated_refresh_k, actual_refresh_k, pixel_clock_per_line;
187 	struct drm_display_mode *adjusted_mode = &crtc_state->hw.adjusted_mode;
188 
189 	/* Avoid CMRR for now till we have VRR with fixed timings working */
190 	if (!HAS_CMRR(display) || true)
191 		return false;
192 
193 	actual_refresh_k =
194 		drm_mode_vrefresh(adjusted_mode) * FIXED_POINT_PRECISION;
195 	pixel_clock_per_line =
196 		adjusted_mode->crtc_clock * 1000 / adjusted_mode->crtc_htotal;
197 	calculated_refresh_k =
198 		pixel_clock_per_line * FIXED_POINT_PRECISION / adjusted_mode->crtc_vtotal;
199 
200 	if ((actual_refresh_k - calculated_refresh_k) < CMRR_PRECISION_TOLERANCE)
201 		return false;
202 
203 	return true;
204 }
205 
206 static unsigned int
207 cmrr_get_vtotal(struct intel_crtc_state *crtc_state, bool video_mode_required)
208 {
209 	int multiplier_m = 1, multiplier_n = 1, vtotal, desired_refresh_rate;
210 	u64 adjusted_pixel_rate;
211 	struct drm_display_mode *adjusted_mode = &crtc_state->hw.adjusted_mode;
212 
213 	desired_refresh_rate = drm_mode_vrefresh(adjusted_mode);
214 
215 	if (video_mode_required) {
216 		multiplier_m = 1001;
217 		multiplier_n = 1000;
218 	}
219 
220 	crtc_state->cmrr.cmrr_n = mul_u32_u32(desired_refresh_rate * adjusted_mode->crtc_htotal,
221 					      multiplier_n);
222 	vtotal = DIV_ROUND_UP_ULL(mul_u32_u32(adjusted_mode->crtc_clock * 1000, multiplier_n),
223 				  crtc_state->cmrr.cmrr_n);
224 	adjusted_pixel_rate = mul_u32_u32(adjusted_mode->crtc_clock * 1000, multiplier_m);
225 	crtc_state->cmrr.cmrr_m = do_div(adjusted_pixel_rate, crtc_state->cmrr.cmrr_n);
226 
227 	return vtotal;
228 }
229 
230 static
231 void intel_vrr_compute_cmrr_timings(struct intel_crtc_state *crtc_state)
232 {
233 	crtc_state->cmrr.enable = true;
234 	/*
235 	 * TODO: Compute precise target refresh rate to determine
236 	 * if video_mode_required should be true. Currently set to
237 	 * false due to uncertainty about the precise target
238 	 * refresh Rate.
239 	 */
240 	crtc_state->vrr.vmax = cmrr_get_vtotal(crtc_state, false);
241 	crtc_state->vrr.vmin = crtc_state->vrr.vmax;
242 	crtc_state->vrr.flipline = crtc_state->vrr.vmin;
243 	crtc_state->mode_flags |= I915_MODE_FLAG_VRR;
244 }
245 
246 static
247 void intel_vrr_compute_vrr_timings(struct intel_crtc_state *crtc_state)
248 {
249 	crtc_state->vrr.enable = true;
250 	crtc_state->mode_flags |= I915_MODE_FLAG_VRR;
251 }
252 
253 /*
254  * For fixed refresh rate mode Vmin, Vmax and Flipline all are set to
255  * Vtotal value.
256  */
257 static
258 int intel_vrr_fixed_rr_vtotal(const struct intel_crtc_state *crtc_state)
259 {
260 	struct intel_display *display = to_intel_display(crtc_state);
261 	int crtc_vtotal = crtc_state->hw.adjusted_mode.crtc_vtotal;
262 
263 	if (DISPLAY_VER(display) >= 13)
264 		return crtc_vtotal;
265 	else
266 		return crtc_vtotal -
267 			intel_vrr_real_vblank_delay(crtc_state);
268 }
269 
270 static
271 int intel_vrr_fixed_rr_vmax(const struct intel_crtc_state *crtc_state)
272 {
273 	return intel_vrr_fixed_rr_vtotal(crtc_state);
274 }
275 
276 static
277 int intel_vrr_fixed_rr_vmin(const struct intel_crtc_state *crtc_state)
278 {
279 	struct intel_display *display = to_intel_display(crtc_state);
280 
281 	return intel_vrr_fixed_rr_vtotal(crtc_state) -
282 		intel_vrr_flipline_offset(display);
283 }
284 
285 static
286 int intel_vrr_fixed_rr_flipline(const struct intel_crtc_state *crtc_state)
287 {
288 	return intel_vrr_fixed_rr_vtotal(crtc_state);
289 }
290 
291 void intel_vrr_set_fixed_rr_timings(const struct intel_crtc_state *crtc_state)
292 {
293 	struct intel_display *display = to_intel_display(crtc_state);
294 	enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
295 
296 	if (!intel_vrr_possible(crtc_state))
297 		return;
298 
299 	intel_de_write(display, TRANS_VRR_VMIN(display, cpu_transcoder),
300 		       intel_vrr_fixed_rr_vmin(crtc_state) - 1);
301 	intel_de_write(display, TRANS_VRR_VMAX(display, cpu_transcoder),
302 		       intel_vrr_fixed_rr_vmax(crtc_state) - 1);
303 	intel_de_write(display, TRANS_VRR_FLIPLINE(display, cpu_transcoder),
304 		       intel_vrr_fixed_rr_flipline(crtc_state) - 1);
305 }
306 
307 static
308 void intel_vrr_compute_fixed_rr_timings(struct intel_crtc_state *crtc_state)
309 {
310 	/*
311 	 * For fixed rr,  vmin = vmax = flipline.
312 	 * vmin is already set to crtc_vtotal set vmax and flipline the same.
313 	 */
314 	crtc_state->vrr.vmax = crtc_state->hw.adjusted_mode.crtc_vtotal;
315 	crtc_state->vrr.flipline = crtc_state->hw.adjusted_mode.crtc_vtotal;
316 }
317 
318 static
319 int intel_vrr_compute_vmin(struct intel_crtc_state *crtc_state)
320 {
321 	/*
322 	 * To make fixed rr and vrr work seamless the guardband/pipeline full
323 	 * should be set such that it satisfies both the fixed and variable
324 	 * timings.
325 	 * For this set the vmin as crtc_vtotal. With this we never need to
326 	 * change anything to do with the guardband.
327 	 */
328 	return crtc_state->hw.adjusted_mode.crtc_vtotal;
329 }
330 
331 static
332 int intel_vrr_compute_vmax(struct intel_connector *connector,
333 			   const struct drm_display_mode *adjusted_mode)
334 {
335 	const struct drm_display_info *info = &connector->base.display_info;
336 	int vmax;
337 
338 	vmax = adjusted_mode->crtc_clock * 1000 /
339 		(adjusted_mode->crtc_htotal * info->monitor_range.min_vfreq);
340 	vmax = max_t(int, vmax, adjusted_mode->crtc_vtotal);
341 
342 	return vmax;
343 }
344 
345 void
346 intel_vrr_compute_config(struct intel_crtc_state *crtc_state,
347 			 struct drm_connector_state *conn_state)
348 {
349 	struct intel_display *display = to_intel_display(crtc_state);
350 	struct intel_connector *connector =
351 		to_intel_connector(conn_state->connector);
352 	struct intel_dp *intel_dp = intel_attached_dp(connector);
353 	bool is_edp = intel_dp_is_edp(intel_dp);
354 	struct drm_display_mode *adjusted_mode = &crtc_state->hw.adjusted_mode;
355 	int vmin, vmax;
356 
357 	if (!HAS_VRR(display))
358 		return;
359 
360 	if (adjusted_mode->flags & DRM_MODE_FLAG_INTERLACE)
361 		return;
362 
363 	crtc_state->vrr.in_range =
364 		intel_vrr_is_in_range(connector, drm_mode_vrefresh(adjusted_mode));
365 
366 	/*
367 	 * Allow fixed refresh rate with VRR Timing Generator.
368 	 * For now set the vrr.in_range to 0, to allow fixed_rr but skip actual
369 	 * VRR and LRR.
370 	 * #TODO For actual VRR with joiner, we need to figure out how to
371 	 * correctly sequence transcoder level stuff vs. pipe level stuff
372 	 * in the commit.
373 	 */
374 	if (crtc_state->joiner_pipes)
375 		crtc_state->vrr.in_range = false;
376 
377 	vmin = intel_vrr_compute_vmin(crtc_state);
378 
379 	if (crtc_state->vrr.in_range) {
380 		if (HAS_LRR(display))
381 			crtc_state->update_lrr = true;
382 		vmax = intel_vrr_compute_vmax(connector, adjusted_mode);
383 	} else {
384 		vmax = vmin;
385 	}
386 
387 	crtc_state->vrr.vmin = vmin;
388 	crtc_state->vrr.vmax = vmax;
389 
390 	crtc_state->vrr.flipline = crtc_state->vrr.vmin;
391 
392 	if (crtc_state->uapi.vrr_enabled && vmin < vmax)
393 		intel_vrr_compute_vrr_timings(crtc_state);
394 	else if (is_cmrr_frac_required(crtc_state) && is_edp)
395 		intel_vrr_compute_cmrr_timings(crtc_state);
396 	else
397 		intel_vrr_compute_fixed_rr_timings(crtc_state);
398 
399 	/*
400 	 * flipline determines the min vblank length the hardware will
401 	 * generate, and on ICL/TGL flipline>=vmin+1, hence we reduce
402 	 * vmin by one to make sure we can get the actual min vblank length.
403 	 */
404 	crtc_state->vrr.vmin -= intel_vrr_flipline_offset(display);
405 
406 	if (HAS_AS_SDP(display)) {
407 		crtc_state->vrr.vsync_start =
408 			(crtc_state->hw.adjusted_mode.crtc_vtotal -
409 			 crtc_state->hw.adjusted_mode.vsync_start);
410 		crtc_state->vrr.vsync_end =
411 			(crtc_state->hw.adjusted_mode.crtc_vtotal -
412 			 crtc_state->hw.adjusted_mode.vsync_end);
413 	}
414 }
415 
416 void intel_vrr_compute_config_late(struct intel_crtc_state *crtc_state)
417 {
418 	struct intel_display *display = to_intel_display(crtc_state);
419 	const struct drm_display_mode *adjusted_mode = &crtc_state->hw.adjusted_mode;
420 
421 	if (!intel_vrr_possible(crtc_state))
422 		return;
423 
424 	if (DISPLAY_VER(display) >= 13) {
425 		crtc_state->vrr.guardband =
426 			crtc_state->vrr.vmin - adjusted_mode->crtc_vblank_start;
427 	} else {
428 		/* hardware imposes one extra scanline somewhere */
429 		crtc_state->vrr.pipeline_full =
430 			min(255, crtc_state->vrr.vmin - adjusted_mode->crtc_vblank_start -
431 			    crtc_state->framestart_delay - 1);
432 
433 		/*
434 		 * vmin/vmax/flipline also need to be adjusted by
435 		 * the vblank delay to maintain correct vtotals.
436 		 */
437 		crtc_state->vrr.vmin -= intel_vrr_real_vblank_delay(crtc_state);
438 		crtc_state->vrr.vmax -= intel_vrr_real_vblank_delay(crtc_state);
439 		crtc_state->vrr.flipline -= intel_vrr_real_vblank_delay(crtc_state);
440 	}
441 }
442 
443 static u32 trans_vrr_ctl(const struct intel_crtc_state *crtc_state)
444 {
445 	struct intel_display *display = to_intel_display(crtc_state);
446 
447 	if (DISPLAY_VER(display) >= 14)
448 		return VRR_CTL_FLIP_LINE_EN |
449 			XELPD_VRR_CTL_VRR_GUARDBAND(crtc_state->vrr.guardband);
450 	else if (DISPLAY_VER(display) >= 13)
451 		return VRR_CTL_IGN_MAX_SHIFT | VRR_CTL_FLIP_LINE_EN |
452 			XELPD_VRR_CTL_VRR_GUARDBAND(crtc_state->vrr.guardband);
453 	else
454 		return VRR_CTL_IGN_MAX_SHIFT | VRR_CTL_FLIP_LINE_EN |
455 			VRR_CTL_PIPELINE_FULL(crtc_state->vrr.pipeline_full) |
456 			VRR_CTL_PIPELINE_FULL_OVERRIDE;
457 }
458 
459 void intel_vrr_set_transcoder_timings(const struct intel_crtc_state *crtc_state)
460 {
461 	struct intel_display *display = to_intel_display(crtc_state);
462 	enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
463 
464 	/*
465 	 * This bit seems to have two meanings depending on the platform:
466 	 * TGL: generate VRR "safe window" for DSB vblank waits
467 	 * ADL/DG2: make TRANS_SET_CONTEXT_LATENCY effective with VRR
468 	 */
469 	if (IS_DISPLAY_VER(display, 12, 13))
470 		intel_de_rmw(display, CHICKEN_TRANS(display, cpu_transcoder),
471 			     0, PIPE_VBLANK_WITH_DELAY);
472 
473 	if (!intel_vrr_possible(crtc_state)) {
474 		intel_de_write(display,
475 			       TRANS_VRR_CTL(display, cpu_transcoder), 0);
476 		return;
477 	}
478 
479 	if (crtc_state->cmrr.enable) {
480 		intel_de_write(display, TRANS_CMRR_M_HI(display, cpu_transcoder),
481 			       upper_32_bits(crtc_state->cmrr.cmrr_m));
482 		intel_de_write(display, TRANS_CMRR_M_LO(display, cpu_transcoder),
483 			       lower_32_bits(crtc_state->cmrr.cmrr_m));
484 		intel_de_write(display, TRANS_CMRR_N_HI(display, cpu_transcoder),
485 			       upper_32_bits(crtc_state->cmrr.cmrr_n));
486 		intel_de_write(display, TRANS_CMRR_N_LO(display, cpu_transcoder),
487 			       lower_32_bits(crtc_state->cmrr.cmrr_n));
488 	}
489 
490 	intel_vrr_set_fixed_rr_timings(crtc_state);
491 
492 	if (!intel_vrr_always_use_vrr_tg(display) && !crtc_state->vrr.enable)
493 		intel_de_write(display, TRANS_VRR_CTL(display, cpu_transcoder),
494 			       trans_vrr_ctl(crtc_state));
495 
496 	if (HAS_AS_SDP(display))
497 		intel_de_write(display,
498 			       TRANS_VRR_VSYNC(display, cpu_transcoder),
499 			       VRR_VSYNC_END(crtc_state->vrr.vsync_end) |
500 			       VRR_VSYNC_START(crtc_state->vrr.vsync_start));
501 }
502 
503 void intel_vrr_send_push(struct intel_dsb *dsb,
504 			 const struct intel_crtc_state *crtc_state)
505 {
506 	struct intel_display *display = to_intel_display(crtc_state);
507 	enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
508 
509 	if (!crtc_state->vrr.enable)
510 		return;
511 
512 	if (dsb)
513 		intel_dsb_nonpost_start(dsb);
514 
515 	intel_de_write_dsb(display, dsb,
516 			   TRANS_PUSH(display, cpu_transcoder),
517 			   TRANS_PUSH_EN | TRANS_PUSH_SEND);
518 
519 	if (dsb)
520 		intel_dsb_nonpost_end(dsb);
521 }
522 
523 void intel_vrr_check_push_sent(struct intel_dsb *dsb,
524 			       const struct intel_crtc_state *crtc_state)
525 {
526 	struct intel_display *display = to_intel_display(crtc_state);
527 	struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
528 	enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
529 
530 	if (!crtc_state->vrr.enable)
531 		return;
532 
533 	/*
534 	 * Make sure the push send bit has cleared. This should
535 	 * already be the case as long as the caller makes sure
536 	 * this is called after the delayed vblank has occurred.
537 	 */
538 	if (dsb) {
539 		int wait_us, count;
540 
541 		wait_us = 2;
542 		count = 1;
543 
544 		/*
545 		 * If the bit hasn't cleared the DSB will
546 		 * raise the poll error interrupt.
547 		 */
548 		intel_dsb_poll(dsb, TRANS_PUSH(display, cpu_transcoder),
549 			       TRANS_PUSH_SEND, 0, wait_us, count);
550 	} else {
551 		if (intel_vrr_is_push_sent(crtc_state))
552 			drm_err(display->drm, "[CRTC:%d:%s] VRR push send still pending\n",
553 				crtc->base.base.id, crtc->base.name);
554 	}
555 }
556 
557 bool intel_vrr_is_push_sent(const struct intel_crtc_state *crtc_state)
558 {
559 	struct intel_display *display = to_intel_display(crtc_state);
560 	enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
561 
562 	if (!crtc_state->vrr.enable)
563 		return false;
564 
565 	return intel_de_read(display, TRANS_PUSH(display, cpu_transcoder)) & TRANS_PUSH_SEND;
566 }
567 
568 bool intel_vrr_always_use_vrr_tg(struct intel_display *display)
569 {
570 	if (!HAS_VRR(display))
571 		return false;
572 
573 	if (DISPLAY_VER(display) >= 30)
574 		return true;
575 
576 	return false;
577 }
578 
579 static
580 void intel_vrr_set_db_point_and_transmission_line(const struct intel_crtc_state *crtc_state)
581 {
582 	struct intel_display *display = to_intel_display(crtc_state);
583 	enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
584 
585 	/*
586 	 * For BMG and LNL+ onwards the EMP_AS_SDP_TL is used for programming
587 	 * double buffering point and transmission line for VRR packets for
588 	 * HDMI2.1/DP/eDP/DP->HDMI2.1 PCON.
589 	 * Since currently we support VRR only for DP/eDP, so this is programmed
590 	 * to for Adaptive Sync SDP to Vsync start.
591 	 */
592 	if (DISPLAY_VERx100(display) == 1401 || DISPLAY_VER(display) >= 20)
593 		intel_de_write(display,
594 			       EMP_AS_SDP_TL(display, cpu_transcoder),
595 			       EMP_AS_SDP_DB_TL(crtc_state->vrr.vsync_start));
596 }
597 
598 void intel_vrr_enable(const struct intel_crtc_state *crtc_state)
599 {
600 	struct intel_display *display = to_intel_display(crtc_state);
601 	enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
602 
603 	if (!crtc_state->vrr.enable)
604 		return;
605 
606 	intel_de_write(display, TRANS_VRR_VMIN(display, cpu_transcoder),
607 		       crtc_state->vrr.vmin - 1);
608 	intel_de_write(display, TRANS_VRR_VMAX(display, cpu_transcoder),
609 		       crtc_state->vrr.vmax - 1);
610 	intel_de_write(display, TRANS_VRR_FLIPLINE(display, cpu_transcoder),
611 		       crtc_state->vrr.flipline - 1);
612 
613 	intel_de_write(display, TRANS_PUSH(display, cpu_transcoder),
614 		       TRANS_PUSH_EN);
615 
616 	if (!intel_vrr_always_use_vrr_tg(display)) {
617 		intel_vrr_set_db_point_and_transmission_line(crtc_state);
618 
619 		if (crtc_state->cmrr.enable) {
620 			intel_de_write(display, TRANS_VRR_CTL(display, cpu_transcoder),
621 				       VRR_CTL_VRR_ENABLE | VRR_CTL_CMRR_ENABLE |
622 				       trans_vrr_ctl(crtc_state));
623 		} else {
624 			intel_de_write(display, TRANS_VRR_CTL(display, cpu_transcoder),
625 				       VRR_CTL_VRR_ENABLE | trans_vrr_ctl(crtc_state));
626 		}
627 	}
628 }
629 
630 void intel_vrr_disable(const struct intel_crtc_state *old_crtc_state)
631 {
632 	struct intel_display *display = to_intel_display(old_crtc_state);
633 	enum transcoder cpu_transcoder = old_crtc_state->cpu_transcoder;
634 
635 	if (!old_crtc_state->vrr.enable)
636 		return;
637 
638 	if (!intel_vrr_always_use_vrr_tg(display)) {
639 		intel_de_write(display, TRANS_VRR_CTL(display, cpu_transcoder),
640 			       trans_vrr_ctl(old_crtc_state));
641 		intel_de_wait_for_clear(display,
642 					TRANS_VRR_STATUS(display, cpu_transcoder),
643 					VRR_STATUS_VRR_EN_LIVE, 1000);
644 		intel_de_write(display, TRANS_PUSH(display, cpu_transcoder), 0);
645 	}
646 
647 	intel_vrr_set_fixed_rr_timings(old_crtc_state);
648 }
649 
650 void intel_vrr_transcoder_enable(const struct intel_crtc_state *crtc_state)
651 {
652 	struct intel_display *display = to_intel_display(crtc_state);
653 	enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
654 
655 	if (!HAS_VRR(display))
656 		return;
657 
658 	if (!intel_vrr_possible(crtc_state))
659 		return;
660 
661 	if (!intel_vrr_always_use_vrr_tg(display)) {
662 		intel_de_write(display, TRANS_VRR_CTL(display, cpu_transcoder),
663 			       trans_vrr_ctl(crtc_state));
664 		return;
665 	}
666 
667 	intel_de_write(display, TRANS_PUSH(display, cpu_transcoder),
668 		       TRANS_PUSH_EN);
669 
670 	intel_vrr_set_db_point_and_transmission_line(crtc_state);
671 
672 	intel_de_write(display, TRANS_VRR_CTL(display, cpu_transcoder),
673 		       VRR_CTL_VRR_ENABLE | trans_vrr_ctl(crtc_state));
674 }
675 
676 void intel_vrr_transcoder_disable(const struct intel_crtc_state *crtc_state)
677 {
678 	struct intel_display *display = to_intel_display(crtc_state);
679 	enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
680 
681 	if (!HAS_VRR(display))
682 		return;
683 
684 	if (!intel_vrr_possible(crtc_state))
685 		return;
686 
687 	intel_de_write(display, TRANS_VRR_CTL(display, cpu_transcoder), 0);
688 
689 	intel_de_wait_for_clear(display, TRANS_VRR_STATUS(display, cpu_transcoder),
690 				VRR_STATUS_VRR_EN_LIVE, 1000);
691 	intel_de_write(display, TRANS_PUSH(display, cpu_transcoder), 0);
692 }
693 
694 bool intel_vrr_is_fixed_rr(const struct intel_crtc_state *crtc_state)
695 {
696 	return crtc_state->vrr.flipline &&
697 	       crtc_state->vrr.flipline == crtc_state->vrr.vmax &&
698 	       crtc_state->vrr.flipline == intel_vrr_vmin_flipline(crtc_state);
699 }
700 
701 void intel_vrr_get_config(struct intel_crtc_state *crtc_state)
702 {
703 	struct intel_display *display = to_intel_display(crtc_state);
704 	enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
705 	u32 trans_vrr_ctl, trans_vrr_vsync;
706 	bool vrr_enable;
707 
708 	trans_vrr_ctl = intel_de_read(display,
709 				      TRANS_VRR_CTL(display, cpu_transcoder));
710 
711 	if (HAS_CMRR(display))
712 		crtc_state->cmrr.enable = (trans_vrr_ctl & VRR_CTL_CMRR_ENABLE);
713 
714 	if (crtc_state->cmrr.enable) {
715 		crtc_state->cmrr.cmrr_n =
716 			intel_de_read64_2x32(display, TRANS_CMRR_N_LO(display, cpu_transcoder),
717 					     TRANS_CMRR_N_HI(display, cpu_transcoder));
718 		crtc_state->cmrr.cmrr_m =
719 			intel_de_read64_2x32(display, TRANS_CMRR_M_LO(display, cpu_transcoder),
720 					     TRANS_CMRR_M_HI(display, cpu_transcoder));
721 	}
722 
723 	if (DISPLAY_VER(display) >= 13)
724 		crtc_state->vrr.guardband =
725 			REG_FIELD_GET(XELPD_VRR_CTL_VRR_GUARDBAND_MASK, trans_vrr_ctl);
726 	else
727 		if (trans_vrr_ctl & VRR_CTL_PIPELINE_FULL_OVERRIDE)
728 			crtc_state->vrr.pipeline_full =
729 				REG_FIELD_GET(VRR_CTL_PIPELINE_FULL_MASK, trans_vrr_ctl);
730 
731 	if (trans_vrr_ctl & VRR_CTL_FLIP_LINE_EN) {
732 		crtc_state->vrr.flipline = intel_de_read(display,
733 							 TRANS_VRR_FLIPLINE(display, cpu_transcoder)) + 1;
734 		crtc_state->vrr.vmax = intel_de_read(display,
735 						     TRANS_VRR_VMAX(display, cpu_transcoder)) + 1;
736 		crtc_state->vrr.vmin = intel_de_read(display,
737 						     TRANS_VRR_VMIN(display, cpu_transcoder)) + 1;
738 
739 		/*
740 		 * For platforms that always use VRR Timing Generator, the VTOTAL.Vtotal
741 		 * bits are not filled. Since for these platforms TRAN_VMIN is always
742 		 * filled with crtc_vtotal, use TRAN_VRR_VMIN to get the vtotal for
743 		 * adjusted_mode.
744 		 */
745 		if (intel_vrr_always_use_vrr_tg(display))
746 			crtc_state->hw.adjusted_mode.crtc_vtotal =
747 				intel_vrr_vmin_vtotal(crtc_state);
748 
749 		if (HAS_AS_SDP(display)) {
750 			trans_vrr_vsync =
751 				intel_de_read(display,
752 					      TRANS_VRR_VSYNC(display, cpu_transcoder));
753 			crtc_state->vrr.vsync_start =
754 				REG_FIELD_GET(VRR_VSYNC_START_MASK, trans_vrr_vsync);
755 			crtc_state->vrr.vsync_end =
756 				REG_FIELD_GET(VRR_VSYNC_END_MASK, trans_vrr_vsync);
757 		}
758 	}
759 
760 	vrr_enable = trans_vrr_ctl & VRR_CTL_VRR_ENABLE;
761 
762 	if (intel_vrr_always_use_vrr_tg(display))
763 		crtc_state->vrr.enable = vrr_enable && !intel_vrr_is_fixed_rr(crtc_state);
764 	else
765 		crtc_state->vrr.enable = vrr_enable;
766 
767 	/*
768 	 * #TODO: For Both VRR and CMRR the flag I915_MODE_FLAG_VRR is set for mode_flags.
769 	 * Since CMRR is currently disabled, set this flag for VRR for now.
770 	 * Need to keep this in mind while re-enabling CMRR.
771 	 */
772 	if (crtc_state->vrr.enable)
773 		crtc_state->mode_flags |= I915_MODE_FLAG_VRR;
774 }
775