xref: /linux/drivers/gpu/drm/i915/display/intel_vrr.c (revision 4b132aacb0768ac1e652cf517097ea6f237214b9)
1 // SPDX-License-Identifier: MIT
2 /*
3  * Copyright © 2020 Intel Corporation
4  *
5  */
6 
7 #include "i915_drv.h"
8 #include "i915_reg.h"
9 #include "intel_de.h"
10 #include "intel_display_types.h"
11 #include "intel_vrr.h"
12 #include "intel_vrr_regs.h"
13 #include "intel_dp.h"
14 
15 #define FIXED_POINT_PRECISION		100
16 #define CMRR_PRECISION_TOLERANCE	10
17 
18 bool intel_vrr_is_capable(struct intel_connector *connector)
19 {
20 	const struct drm_display_info *info = &connector->base.display_info;
21 	struct drm_i915_private *i915 = to_i915(connector->base.dev);
22 	struct intel_dp *intel_dp;
23 
24 	/*
25 	 * DP Sink is capable of VRR video timings if
26 	 * Ignore MSA bit is set in DPCD.
27 	 * EDID monitor range also should be atleast 10 for reasonable
28 	 * Adaptive Sync or Variable Refresh Rate end user experience.
29 	 */
30 	switch (connector->base.connector_type) {
31 	case DRM_MODE_CONNECTOR_eDP:
32 		if (!connector->panel.vbt.vrr)
33 			return false;
34 		fallthrough;
35 	case DRM_MODE_CONNECTOR_DisplayPort:
36 		intel_dp = intel_attached_dp(connector);
37 
38 		if (!drm_dp_sink_can_do_video_without_timing_msa(intel_dp->dpcd))
39 			return false;
40 
41 		break;
42 	default:
43 		return false;
44 	}
45 
46 	return HAS_VRR(i915) &&
47 		info->monitor_range.max_vfreq - info->monitor_range.min_vfreq > 10;
48 }
49 
50 bool intel_vrr_is_in_range(struct intel_connector *connector, int vrefresh)
51 {
52 	const struct drm_display_info *info = &connector->base.display_info;
53 
54 	return intel_vrr_is_capable(connector) &&
55 		vrefresh >= info->monitor_range.min_vfreq &&
56 		vrefresh <= info->monitor_range.max_vfreq;
57 }
58 
59 void
60 intel_vrr_check_modeset(struct intel_atomic_state *state)
61 {
62 	int i;
63 	struct intel_crtc_state *old_crtc_state, *new_crtc_state;
64 	struct intel_crtc *crtc;
65 
66 	for_each_oldnew_intel_crtc_in_state(state, crtc, old_crtc_state,
67 					    new_crtc_state, i) {
68 		if (new_crtc_state->uapi.vrr_enabled !=
69 		    old_crtc_state->uapi.vrr_enabled)
70 			new_crtc_state->uapi.mode_changed = true;
71 	}
72 }
73 
74 /*
75  * Without VRR registers get latched at:
76  *  vblank_start
77  *
78  * With VRR the earliest registers can get latched is:
79  *  intel_vrr_vmin_vblank_start(), which if we want to maintain
80  *  the correct min vtotal is >=vblank_start+1
81  *
82  * The latest point registers can get latched is the vmax decision boundary:
83  *  intel_vrr_vmax_vblank_start()
84  *
85  * Between those two points the vblank exit starts (and hence registers get
86  * latched) ASAP after a push is sent.
87  *
88  * framestart_delay is programmable 1-4.
89  */
90 static int intel_vrr_vblank_exit_length(const struct intel_crtc_state *crtc_state)
91 {
92 	struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
93 	struct drm_i915_private *i915 = to_i915(crtc->base.dev);
94 
95 	if (DISPLAY_VER(i915) >= 13)
96 		return crtc_state->vrr.guardband;
97 	else
98 		/* The hw imposes the extra scanline before frame start */
99 		return crtc_state->vrr.pipeline_full + crtc_state->framestart_delay + 1;
100 }
101 
102 int intel_vrr_vmin_vblank_start(const struct intel_crtc_state *crtc_state)
103 {
104 	/* Min vblank actually determined by flipline that is always >=vmin+1 */
105 	return crtc_state->vrr.vmin + 1 - intel_vrr_vblank_exit_length(crtc_state);
106 }
107 
108 int intel_vrr_vmax_vblank_start(const struct intel_crtc_state *crtc_state)
109 {
110 	return crtc_state->vrr.vmax - intel_vrr_vblank_exit_length(crtc_state);
111 }
112 
113 static bool
114 is_cmrr_frac_required(struct intel_crtc_state *crtc_state)
115 {
116 	int calculated_refresh_k, actual_refresh_k, pixel_clock_per_line;
117 	struct drm_display_mode *adjusted_mode = &crtc_state->hw.adjusted_mode;
118 	struct drm_i915_private *i915 = to_i915(crtc_state->uapi.crtc->dev);
119 
120 	if (!HAS_CMRR(i915))
121 		return false;
122 
123 	actual_refresh_k =
124 		drm_mode_vrefresh(adjusted_mode) * FIXED_POINT_PRECISION;
125 	pixel_clock_per_line =
126 		adjusted_mode->crtc_clock * 1000 / adjusted_mode->crtc_htotal;
127 	calculated_refresh_k =
128 		pixel_clock_per_line * FIXED_POINT_PRECISION / adjusted_mode->crtc_vtotal;
129 
130 	if ((actual_refresh_k - calculated_refresh_k) < CMRR_PRECISION_TOLERANCE)
131 		return false;
132 
133 	return true;
134 }
135 
136 static unsigned int
137 cmrr_get_vtotal(struct intel_crtc_state *crtc_state, bool video_mode_required)
138 {
139 	int multiplier_m = 1, multiplier_n = 1, vtotal, desired_refresh_rate;
140 	u64 adjusted_pixel_rate;
141 	struct drm_display_mode *adjusted_mode = &crtc_state->hw.adjusted_mode;
142 
143 	desired_refresh_rate = drm_mode_vrefresh(adjusted_mode);
144 
145 	if (video_mode_required) {
146 		multiplier_m = 1001;
147 		multiplier_n = 1000;
148 	}
149 
150 	crtc_state->cmrr.cmrr_n = mul_u32_u32(desired_refresh_rate * adjusted_mode->crtc_htotal,
151 					      multiplier_n);
152 	vtotal = DIV_ROUND_UP_ULL(mul_u32_u32(adjusted_mode->crtc_clock * 1000, multiplier_n),
153 				  crtc_state->cmrr.cmrr_n);
154 	adjusted_pixel_rate = mul_u32_u32(adjusted_mode->crtc_clock * 1000, multiplier_m);
155 	crtc_state->cmrr.cmrr_m = do_div(adjusted_pixel_rate, crtc_state->cmrr.cmrr_n);
156 
157 	return vtotal;
158 }
159 
160 void
161 intel_vrr_compute_config(struct intel_crtc_state *crtc_state,
162 			 struct drm_connector_state *conn_state)
163 {
164 	struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
165 	struct drm_i915_private *i915 = to_i915(crtc->base.dev);
166 	struct intel_connector *connector =
167 		to_intel_connector(conn_state->connector);
168 	struct intel_dp *intel_dp = intel_attached_dp(connector);
169 	bool is_edp = intel_dp_is_edp(intel_dp);
170 	struct drm_display_mode *adjusted_mode = &crtc_state->hw.adjusted_mode;
171 	const struct drm_display_info *info = &connector->base.display_info;
172 	int vmin, vmax;
173 
174 	/*
175 	 * FIXME all joined pipes share the same transcoder.
176 	 * Need to account for that during VRR toggle/push/etc.
177 	 */
178 	if (crtc_state->joiner_pipes)
179 		return;
180 
181 	if (adjusted_mode->flags & DRM_MODE_FLAG_INTERLACE)
182 		return;
183 
184 	crtc_state->vrr.in_range =
185 		intel_vrr_is_in_range(connector, drm_mode_vrefresh(adjusted_mode));
186 	if (!crtc_state->vrr.in_range)
187 		return;
188 
189 	if (HAS_LRR(i915))
190 		crtc_state->update_lrr = true;
191 
192 	vmin = DIV_ROUND_UP(adjusted_mode->crtc_clock * 1000,
193 			    adjusted_mode->crtc_htotal * info->monitor_range.max_vfreq);
194 	vmax = adjusted_mode->crtc_clock * 1000 /
195 		(adjusted_mode->crtc_htotal * info->monitor_range.min_vfreq);
196 
197 	vmin = max_t(int, vmin, adjusted_mode->crtc_vtotal);
198 	vmax = max_t(int, vmax, adjusted_mode->crtc_vtotal);
199 
200 	if (vmin >= vmax)
201 		return;
202 
203 	/*
204 	 * flipline determines the min vblank length the hardware will
205 	 * generate, and flipline>=vmin+1, hence we reduce vmin by one
206 	 * to make sure we can get the actual min vblank length.
207 	 */
208 	crtc_state->vrr.vmin = vmin - 1;
209 	crtc_state->vrr.vmax = vmax;
210 
211 	crtc_state->vrr.flipline = crtc_state->vrr.vmin + 1;
212 
213 	/*
214 	 * When panel is VRR capable and userspace has
215 	 * not enabled adaptive sync mode then Fixed Average
216 	 * Vtotal mode should be enabled.
217 	 */
218 	if (crtc_state->uapi.vrr_enabled) {
219 		crtc_state->vrr.enable = true;
220 		crtc_state->mode_flags |= I915_MODE_FLAG_VRR;
221 	} else if (is_cmrr_frac_required(crtc_state) && is_edp) {
222 		crtc_state->vrr.enable = true;
223 		crtc_state->cmrr.enable = true;
224 		/*
225 		 * TODO: Compute precise target refresh rate to determine
226 		 * if video_mode_required should be true. Currently set to
227 		 * false due to uncertainty about the precise target
228 		 * refresh Rate.
229 		 */
230 		crtc_state->vrr.vmax = cmrr_get_vtotal(crtc_state, false);
231 		crtc_state->vrr.vmin = crtc_state->vrr.vmax;
232 		crtc_state->vrr.flipline = crtc_state->vrr.vmin;
233 		crtc_state->mode_flags |= I915_MODE_FLAG_VRR;
234 	}
235 
236 	if (intel_dp_as_sdp_supported(intel_dp) &&
237 	    crtc_state->vrr.enable) {
238 		crtc_state->vrr.vsync_start =
239 			(crtc_state->hw.adjusted_mode.crtc_vtotal -
240 			 crtc_state->hw.adjusted_mode.vsync_start);
241 		crtc_state->vrr.vsync_end =
242 			(crtc_state->hw.adjusted_mode.crtc_vtotal -
243 			 crtc_state->hw.adjusted_mode.vsync_end);
244 	}
245 
246 	/*
247 	 * For XE_LPD+, we use guardband and pipeline override
248 	 * is deprecated.
249 	 */
250 	if (DISPLAY_VER(i915) >= 13) {
251 		crtc_state->vrr.guardband =
252 			crtc_state->vrr.vmin + 1 - adjusted_mode->crtc_vblank_start;
253 	} else {
254 		crtc_state->vrr.pipeline_full =
255 			min(255, crtc_state->vrr.vmin - adjusted_mode->crtc_vblank_start -
256 			    crtc_state->framestart_delay - 1);
257 	}
258 }
259 
260 static u32 trans_vrr_ctl(const struct intel_crtc_state *crtc_state)
261 {
262 	struct drm_i915_private *i915 = to_i915(crtc_state->uapi.crtc->dev);
263 
264 	if (DISPLAY_VER(i915) >= 13)
265 		return VRR_CTL_IGN_MAX_SHIFT | VRR_CTL_FLIP_LINE_EN |
266 			XELPD_VRR_CTL_VRR_GUARDBAND(crtc_state->vrr.guardband);
267 	else
268 		return VRR_CTL_IGN_MAX_SHIFT | VRR_CTL_FLIP_LINE_EN |
269 			VRR_CTL_PIPELINE_FULL(crtc_state->vrr.pipeline_full) |
270 			VRR_CTL_PIPELINE_FULL_OVERRIDE;
271 }
272 
273 void intel_vrr_set_transcoder_timings(const struct intel_crtc_state *crtc_state)
274 {
275 	struct drm_i915_private *dev_priv = to_i915(crtc_state->uapi.crtc->dev);
276 	enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
277 
278 	/*
279 	 * This bit seems to have two meanings depending on the platform:
280 	 * TGL: generate VRR "safe window" for DSB vblank waits
281 	 * ADL/DG2: make TRANS_SET_CONTEXT_LATENCY effective with VRR
282 	 */
283 	if (IS_DISPLAY_VER(dev_priv, 12, 13))
284 		intel_de_rmw(dev_priv, CHICKEN_TRANS(cpu_transcoder),
285 			     0, PIPE_VBLANK_WITH_DELAY);
286 
287 	if (!crtc_state->vrr.flipline) {
288 		intel_de_write(dev_priv,
289 			       TRANS_VRR_CTL(dev_priv, cpu_transcoder), 0);
290 		return;
291 	}
292 
293 	if (crtc_state->cmrr.enable) {
294 		intel_de_write(dev_priv, TRANS_CMRR_M_HI(dev_priv, cpu_transcoder),
295 			       upper_32_bits(crtc_state->cmrr.cmrr_m));
296 		intel_de_write(dev_priv, TRANS_CMRR_M_LO(dev_priv, cpu_transcoder),
297 			       lower_32_bits(crtc_state->cmrr.cmrr_m));
298 		intel_de_write(dev_priv, TRANS_CMRR_N_HI(dev_priv, cpu_transcoder),
299 			       upper_32_bits(crtc_state->cmrr.cmrr_n));
300 		intel_de_write(dev_priv, TRANS_CMRR_N_LO(dev_priv, cpu_transcoder),
301 			       lower_32_bits(crtc_state->cmrr.cmrr_n));
302 	}
303 
304 	intel_de_write(dev_priv, TRANS_VRR_VMIN(dev_priv, cpu_transcoder),
305 		       crtc_state->vrr.vmin - 1);
306 	intel_de_write(dev_priv, TRANS_VRR_VMAX(dev_priv, cpu_transcoder),
307 		       crtc_state->vrr.vmax - 1);
308 	intel_de_write(dev_priv, TRANS_VRR_CTL(dev_priv, cpu_transcoder),
309 		       trans_vrr_ctl(crtc_state));
310 	intel_de_write(dev_priv, TRANS_VRR_FLIPLINE(dev_priv, cpu_transcoder),
311 		       crtc_state->vrr.flipline - 1);
312 }
313 
314 void intel_vrr_send_push(const struct intel_crtc_state *crtc_state)
315 {
316 	struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
317 	struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
318 	enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
319 
320 	if (!crtc_state->vrr.enable)
321 		return;
322 
323 	intel_de_write(dev_priv, TRANS_PUSH(dev_priv, cpu_transcoder),
324 		       TRANS_PUSH_EN | TRANS_PUSH_SEND);
325 }
326 
327 bool intel_vrr_is_push_sent(const struct intel_crtc_state *crtc_state)
328 {
329 	struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
330 	struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
331 	enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
332 
333 	if (!crtc_state->vrr.enable)
334 		return false;
335 
336 	return intel_de_read(dev_priv, TRANS_PUSH(dev_priv, cpu_transcoder)) & TRANS_PUSH_SEND;
337 }
338 
339 void intel_vrr_enable(const struct intel_crtc_state *crtc_state)
340 {
341 	struct drm_i915_private *dev_priv = to_i915(crtc_state->uapi.crtc->dev);
342 	enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
343 
344 	if (!crtc_state->vrr.enable)
345 		return;
346 
347 	intel_de_write(dev_priv, TRANS_PUSH(dev_priv, cpu_transcoder),
348 		       TRANS_PUSH_EN);
349 
350 	if (HAS_AS_SDP(dev_priv))
351 		intel_de_write(dev_priv,
352 			       TRANS_VRR_VSYNC(dev_priv, cpu_transcoder),
353 			       VRR_VSYNC_END(crtc_state->vrr.vsync_end) |
354 			       VRR_VSYNC_START(crtc_state->vrr.vsync_start));
355 
356 	if (crtc_state->cmrr.enable) {
357 		intel_de_write(dev_priv, TRANS_VRR_CTL(dev_priv, cpu_transcoder),
358 			       VRR_CTL_VRR_ENABLE | VRR_CTL_CMRR_ENABLE |
359 			       trans_vrr_ctl(crtc_state));
360 	} else {
361 		intel_de_write(dev_priv, TRANS_VRR_CTL(dev_priv, cpu_transcoder),
362 			       VRR_CTL_VRR_ENABLE | trans_vrr_ctl(crtc_state));
363 	}
364 }
365 
366 void intel_vrr_disable(const struct intel_crtc_state *old_crtc_state)
367 {
368 	struct intel_crtc *crtc = to_intel_crtc(old_crtc_state->uapi.crtc);
369 	struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
370 	enum transcoder cpu_transcoder = old_crtc_state->cpu_transcoder;
371 
372 	if (!old_crtc_state->vrr.enable)
373 		return;
374 
375 	intel_de_write(dev_priv, TRANS_VRR_CTL(dev_priv, cpu_transcoder),
376 		       trans_vrr_ctl(old_crtc_state));
377 	intel_de_wait_for_clear(dev_priv,
378 				TRANS_VRR_STATUS(dev_priv, cpu_transcoder),
379 				VRR_STATUS_VRR_EN_LIVE, 1000);
380 	intel_de_write(dev_priv, TRANS_PUSH(dev_priv, cpu_transcoder), 0);
381 
382 	if (HAS_AS_SDP(dev_priv))
383 		intel_de_write(dev_priv,
384 			       TRANS_VRR_VSYNC(dev_priv, cpu_transcoder), 0);
385 }
386 
387 void intel_vrr_get_config(struct intel_crtc_state *crtc_state)
388 {
389 	struct drm_i915_private *dev_priv = to_i915(crtc_state->uapi.crtc->dev);
390 	enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
391 	u32 trans_vrr_ctl, trans_vrr_vsync;
392 
393 	trans_vrr_ctl = intel_de_read(dev_priv,
394 				      TRANS_VRR_CTL(dev_priv, cpu_transcoder));
395 
396 	crtc_state->vrr.enable = trans_vrr_ctl & VRR_CTL_VRR_ENABLE;
397 	if (HAS_CMRR(dev_priv))
398 		crtc_state->cmrr.enable = (trans_vrr_ctl & VRR_CTL_CMRR_ENABLE);
399 
400 	if (crtc_state->cmrr.enable) {
401 		crtc_state->cmrr.cmrr_n =
402 			intel_de_read64_2x32(dev_priv, TRANS_CMRR_N_LO(dev_priv, cpu_transcoder),
403 					     TRANS_CMRR_N_HI(dev_priv, cpu_transcoder));
404 		crtc_state->cmrr.cmrr_m =
405 			intel_de_read64_2x32(dev_priv, TRANS_CMRR_M_LO(dev_priv, cpu_transcoder),
406 					     TRANS_CMRR_M_HI(dev_priv, cpu_transcoder));
407 	}
408 
409 	if (DISPLAY_VER(dev_priv) >= 13)
410 		crtc_state->vrr.guardband =
411 			REG_FIELD_GET(XELPD_VRR_CTL_VRR_GUARDBAND_MASK, trans_vrr_ctl);
412 	else
413 		if (trans_vrr_ctl & VRR_CTL_PIPELINE_FULL_OVERRIDE)
414 			crtc_state->vrr.pipeline_full =
415 				REG_FIELD_GET(VRR_CTL_PIPELINE_FULL_MASK, trans_vrr_ctl);
416 
417 	if (trans_vrr_ctl & VRR_CTL_FLIP_LINE_EN) {
418 		crtc_state->vrr.flipline = intel_de_read(dev_priv,
419 							 TRANS_VRR_FLIPLINE(dev_priv, cpu_transcoder)) + 1;
420 		crtc_state->vrr.vmax = intel_de_read(dev_priv,
421 						     TRANS_VRR_VMAX(dev_priv, cpu_transcoder)) + 1;
422 		crtc_state->vrr.vmin = intel_de_read(dev_priv,
423 						     TRANS_VRR_VMIN(dev_priv, cpu_transcoder)) + 1;
424 	}
425 
426 	if (crtc_state->vrr.enable) {
427 		crtc_state->mode_flags |= I915_MODE_FLAG_VRR;
428 
429 		if (HAS_AS_SDP(dev_priv)) {
430 			trans_vrr_vsync =
431 				intel_de_read(dev_priv,
432 					      TRANS_VRR_VSYNC(dev_priv, cpu_transcoder));
433 			crtc_state->vrr.vsync_start =
434 				REG_FIELD_GET(VRR_VSYNC_START_MASK, trans_vrr_vsync);
435 			crtc_state->vrr.vsync_end =
436 				REG_FIELD_GET(VRR_VSYNC_END_MASK, trans_vrr_vsync);
437 		}
438 	}
439 }
440