xref: /linux/drivers/gpu/drm/i915/display/intel_vrr.c (revision 7f71507851fc7764b36a3221839607d3a45c2025)
1 // SPDX-License-Identifier: MIT
2 /*
3  * Copyright © 2020 Intel Corporation
4  *
5  */
6 
7 #include "i915_drv.h"
8 #include "i915_reg.h"
9 #include "intel_de.h"
10 #include "intel_display_types.h"
11 #include "intel_vrr.h"
12 #include "intel_vrr_regs.h"
13 #include "intel_dp.h"
14 
15 #define FIXED_POINT_PRECISION		100
16 #define CMRR_PRECISION_TOLERANCE	10
17 
18 bool intel_vrr_is_capable(struct intel_connector *connector)
19 {
20 	struct intel_display *display = to_intel_display(connector);
21 	const struct drm_display_info *info = &connector->base.display_info;
22 	struct intel_dp *intel_dp;
23 
24 	/*
25 	 * DP Sink is capable of VRR video timings if
26 	 * Ignore MSA bit is set in DPCD.
27 	 * EDID monitor range also should be atleast 10 for reasonable
28 	 * Adaptive Sync or Variable Refresh Rate end user experience.
29 	 */
30 	switch (connector->base.connector_type) {
31 	case DRM_MODE_CONNECTOR_eDP:
32 		if (!connector->panel.vbt.vrr)
33 			return false;
34 		fallthrough;
35 	case DRM_MODE_CONNECTOR_DisplayPort:
36 		intel_dp = intel_attached_dp(connector);
37 
38 		if (!drm_dp_sink_can_do_video_without_timing_msa(intel_dp->dpcd))
39 			return false;
40 
41 		break;
42 	default:
43 		return false;
44 	}
45 
46 	return HAS_VRR(display) &&
47 		info->monitor_range.max_vfreq - info->monitor_range.min_vfreq > 10;
48 }
49 
50 bool intel_vrr_is_in_range(struct intel_connector *connector, int vrefresh)
51 {
52 	const struct drm_display_info *info = &connector->base.display_info;
53 
54 	return intel_vrr_is_capable(connector) &&
55 		vrefresh >= info->monitor_range.min_vfreq &&
56 		vrefresh <= info->monitor_range.max_vfreq;
57 }
58 
59 bool intel_vrr_possible(const struct intel_crtc_state *crtc_state)
60 {
61 	return crtc_state->vrr.flipline;
62 }
63 
64 void
65 intel_vrr_check_modeset(struct intel_atomic_state *state)
66 {
67 	int i;
68 	struct intel_crtc_state *old_crtc_state, *new_crtc_state;
69 	struct intel_crtc *crtc;
70 
71 	for_each_oldnew_intel_crtc_in_state(state, crtc, old_crtc_state,
72 					    new_crtc_state, i) {
73 		if (new_crtc_state->uapi.vrr_enabled !=
74 		    old_crtc_state->uapi.vrr_enabled)
75 			new_crtc_state->uapi.mode_changed = true;
76 	}
77 }
78 
79 /*
80  * Without VRR registers get latched at:
81  *  vblank_start
82  *
83  * With VRR the earliest registers can get latched is:
84  *  intel_vrr_vmin_vblank_start(), which if we want to maintain
85  *  the correct min vtotal is >=vblank_start+1
86  *
87  * The latest point registers can get latched is the vmax decision boundary:
88  *  intel_vrr_vmax_vblank_start()
89  *
90  * Between those two points the vblank exit starts (and hence registers get
91  * latched) ASAP after a push is sent.
92  *
93  * framestart_delay is programmable 1-4.
94  */
95 static int intel_vrr_vblank_exit_length(const struct intel_crtc_state *crtc_state)
96 {
97 	struct intel_display *display = to_intel_display(crtc_state);
98 
99 	if (DISPLAY_VER(display) >= 13)
100 		return crtc_state->vrr.guardband;
101 	else
102 		/* The hw imposes the extra scanline before frame start */
103 		return crtc_state->vrr.pipeline_full + crtc_state->framestart_delay + 1;
104 }
105 
106 int intel_vrr_vmin_vblank_start(const struct intel_crtc_state *crtc_state)
107 {
108 	/* Min vblank actually determined by flipline that is always >=vmin+1 */
109 	return crtc_state->vrr.vmin + 1 - intel_vrr_vblank_exit_length(crtc_state);
110 }
111 
112 int intel_vrr_vmax_vblank_start(const struct intel_crtc_state *crtc_state)
113 {
114 	return crtc_state->vrr.vmax - intel_vrr_vblank_exit_length(crtc_state);
115 }
116 
117 static bool
118 is_cmrr_frac_required(struct intel_crtc_state *crtc_state)
119 {
120 	struct intel_display *display = to_intel_display(crtc_state);
121 	int calculated_refresh_k, actual_refresh_k, pixel_clock_per_line;
122 	struct drm_display_mode *adjusted_mode = &crtc_state->hw.adjusted_mode;
123 
124 	if (!HAS_CMRR(display))
125 		return false;
126 
127 	actual_refresh_k =
128 		drm_mode_vrefresh(adjusted_mode) * FIXED_POINT_PRECISION;
129 	pixel_clock_per_line =
130 		adjusted_mode->crtc_clock * 1000 / adjusted_mode->crtc_htotal;
131 	calculated_refresh_k =
132 		pixel_clock_per_line * FIXED_POINT_PRECISION / adjusted_mode->crtc_vtotal;
133 
134 	if ((actual_refresh_k - calculated_refresh_k) < CMRR_PRECISION_TOLERANCE)
135 		return false;
136 
137 	return true;
138 }
139 
140 static unsigned int
141 cmrr_get_vtotal(struct intel_crtc_state *crtc_state, bool video_mode_required)
142 {
143 	int multiplier_m = 1, multiplier_n = 1, vtotal, desired_refresh_rate;
144 	u64 adjusted_pixel_rate;
145 	struct drm_display_mode *adjusted_mode = &crtc_state->hw.adjusted_mode;
146 
147 	desired_refresh_rate = drm_mode_vrefresh(adjusted_mode);
148 
149 	if (video_mode_required) {
150 		multiplier_m = 1001;
151 		multiplier_n = 1000;
152 	}
153 
154 	crtc_state->cmrr.cmrr_n = mul_u32_u32(desired_refresh_rate * adjusted_mode->crtc_htotal,
155 					      multiplier_n);
156 	vtotal = DIV_ROUND_UP_ULL(mul_u32_u32(adjusted_mode->crtc_clock * 1000, multiplier_n),
157 				  crtc_state->cmrr.cmrr_n);
158 	adjusted_pixel_rate = mul_u32_u32(adjusted_mode->crtc_clock * 1000, multiplier_m);
159 	crtc_state->cmrr.cmrr_m = do_div(adjusted_pixel_rate, crtc_state->cmrr.cmrr_n);
160 
161 	return vtotal;
162 }
163 
164 void
165 intel_vrr_compute_config(struct intel_crtc_state *crtc_state,
166 			 struct drm_connector_state *conn_state)
167 {
168 	struct intel_display *display = to_intel_display(crtc_state);
169 	struct intel_connector *connector =
170 		to_intel_connector(conn_state->connector);
171 	struct intel_dp *intel_dp = intel_attached_dp(connector);
172 	bool is_edp = intel_dp_is_edp(intel_dp);
173 	struct drm_display_mode *adjusted_mode = &crtc_state->hw.adjusted_mode;
174 	const struct drm_display_info *info = &connector->base.display_info;
175 	int vmin, vmax;
176 
177 	/*
178 	 * FIXME all joined pipes share the same transcoder.
179 	 * Need to account for that during VRR toggle/push/etc.
180 	 */
181 	if (crtc_state->joiner_pipes)
182 		return;
183 
184 	if (adjusted_mode->flags & DRM_MODE_FLAG_INTERLACE)
185 		return;
186 
187 	crtc_state->vrr.in_range =
188 		intel_vrr_is_in_range(connector, drm_mode_vrefresh(adjusted_mode));
189 	if (!crtc_state->vrr.in_range)
190 		return;
191 
192 	if (HAS_LRR(display))
193 		crtc_state->update_lrr = true;
194 
195 	vmin = DIV_ROUND_UP(adjusted_mode->crtc_clock * 1000,
196 			    adjusted_mode->crtc_htotal * info->monitor_range.max_vfreq);
197 	vmax = adjusted_mode->crtc_clock * 1000 /
198 		(adjusted_mode->crtc_htotal * info->monitor_range.min_vfreq);
199 
200 	vmin = max_t(int, vmin, adjusted_mode->crtc_vtotal);
201 	vmax = max_t(int, vmax, adjusted_mode->crtc_vtotal);
202 
203 	if (vmin >= vmax)
204 		return;
205 
206 	/*
207 	 * flipline determines the min vblank length the hardware will
208 	 * generate, and flipline>=vmin+1, hence we reduce vmin by one
209 	 * to make sure we can get the actual min vblank length.
210 	 */
211 	crtc_state->vrr.vmin = vmin - 1;
212 	crtc_state->vrr.vmax = vmax;
213 
214 	crtc_state->vrr.flipline = crtc_state->vrr.vmin + 1;
215 
216 	/*
217 	 * When panel is VRR capable and userspace has
218 	 * not enabled adaptive sync mode then Fixed Average
219 	 * Vtotal mode should be enabled.
220 	 */
221 	if (crtc_state->uapi.vrr_enabled) {
222 		crtc_state->vrr.enable = true;
223 		crtc_state->mode_flags |= I915_MODE_FLAG_VRR;
224 	} else if (is_cmrr_frac_required(crtc_state) && is_edp) {
225 		crtc_state->vrr.enable = true;
226 		crtc_state->cmrr.enable = true;
227 		/*
228 		 * TODO: Compute precise target refresh rate to determine
229 		 * if video_mode_required should be true. Currently set to
230 		 * false due to uncertainty about the precise target
231 		 * refresh Rate.
232 		 */
233 		crtc_state->vrr.vmax = cmrr_get_vtotal(crtc_state, false);
234 		crtc_state->vrr.vmin = crtc_state->vrr.vmax;
235 		crtc_state->vrr.flipline = crtc_state->vrr.vmin;
236 		crtc_state->mode_flags |= I915_MODE_FLAG_VRR;
237 	}
238 
239 	if (intel_dp->as_sdp_supported && crtc_state->vrr.enable) {
240 		crtc_state->vrr.vsync_start =
241 			(crtc_state->hw.adjusted_mode.crtc_vtotal -
242 			 crtc_state->hw.adjusted_mode.vsync_start);
243 		crtc_state->vrr.vsync_end =
244 			(crtc_state->hw.adjusted_mode.crtc_vtotal -
245 			 crtc_state->hw.adjusted_mode.vsync_end);
246 	}
247 }
248 
249 void intel_vrr_compute_config_late(struct intel_crtc_state *crtc_state)
250 {
251 	struct intel_display *display = to_intel_display(crtc_state);
252 	const struct drm_display_mode *adjusted_mode = &crtc_state->hw.adjusted_mode;
253 
254 	if (!intel_vrr_possible(crtc_state))
255 		return;
256 
257 	if (DISPLAY_VER(display) >= 13) {
258 		crtc_state->vrr.guardband =
259 			crtc_state->vrr.vmin + 1 - adjusted_mode->crtc_vblank_start;
260 	} else {
261 		crtc_state->vrr.pipeline_full =
262 			min(255, crtc_state->vrr.vmin - adjusted_mode->crtc_vblank_start -
263 			    crtc_state->framestart_delay - 1);
264 	}
265 }
266 
267 static u32 trans_vrr_ctl(const struct intel_crtc_state *crtc_state)
268 {
269 	struct intel_display *display = to_intel_display(crtc_state);
270 
271 	if (DISPLAY_VER(display) >= 13)
272 		return VRR_CTL_IGN_MAX_SHIFT | VRR_CTL_FLIP_LINE_EN |
273 			XELPD_VRR_CTL_VRR_GUARDBAND(crtc_state->vrr.guardband);
274 	else
275 		return VRR_CTL_IGN_MAX_SHIFT | VRR_CTL_FLIP_LINE_EN |
276 			VRR_CTL_PIPELINE_FULL(crtc_state->vrr.pipeline_full) |
277 			VRR_CTL_PIPELINE_FULL_OVERRIDE;
278 }
279 
280 void intel_vrr_set_transcoder_timings(const struct intel_crtc_state *crtc_state)
281 {
282 	struct intel_display *display = to_intel_display(crtc_state);
283 	enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
284 
285 	/*
286 	 * This bit seems to have two meanings depending on the platform:
287 	 * TGL: generate VRR "safe window" for DSB vblank waits
288 	 * ADL/DG2: make TRANS_SET_CONTEXT_LATENCY effective with VRR
289 	 */
290 	if (IS_DISPLAY_VER(display, 12, 13))
291 		intel_de_rmw(display, CHICKEN_TRANS(cpu_transcoder),
292 			     0, PIPE_VBLANK_WITH_DELAY);
293 
294 	if (!intel_vrr_possible(crtc_state)) {
295 		intel_de_write(display,
296 			       TRANS_VRR_CTL(display, cpu_transcoder), 0);
297 		return;
298 	}
299 
300 	if (crtc_state->cmrr.enable) {
301 		intel_de_write(display, TRANS_CMRR_M_HI(display, cpu_transcoder),
302 			       upper_32_bits(crtc_state->cmrr.cmrr_m));
303 		intel_de_write(display, TRANS_CMRR_M_LO(display, cpu_transcoder),
304 			       lower_32_bits(crtc_state->cmrr.cmrr_m));
305 		intel_de_write(display, TRANS_CMRR_N_HI(display, cpu_transcoder),
306 			       upper_32_bits(crtc_state->cmrr.cmrr_n));
307 		intel_de_write(display, TRANS_CMRR_N_LO(display, cpu_transcoder),
308 			       lower_32_bits(crtc_state->cmrr.cmrr_n));
309 	}
310 
311 	intel_de_write(display, TRANS_VRR_VMIN(display, cpu_transcoder),
312 		       crtc_state->vrr.vmin - 1);
313 	intel_de_write(display, TRANS_VRR_VMAX(display, cpu_transcoder),
314 		       crtc_state->vrr.vmax - 1);
315 	intel_de_write(display, TRANS_VRR_CTL(display, cpu_transcoder),
316 		       trans_vrr_ctl(crtc_state));
317 	intel_de_write(display, TRANS_VRR_FLIPLINE(display, cpu_transcoder),
318 		       crtc_state->vrr.flipline - 1);
319 }
320 
321 void intel_vrr_send_push(const struct intel_crtc_state *crtc_state)
322 {
323 	struct intel_display *display = to_intel_display(crtc_state);
324 	enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
325 
326 	if (!crtc_state->vrr.enable)
327 		return;
328 
329 	intel_de_write(display, TRANS_PUSH(display, cpu_transcoder),
330 		       TRANS_PUSH_EN | TRANS_PUSH_SEND);
331 }
332 
333 bool intel_vrr_is_push_sent(const struct intel_crtc_state *crtc_state)
334 {
335 	struct intel_display *display = to_intel_display(crtc_state);
336 	enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
337 
338 	if (!crtc_state->vrr.enable)
339 		return false;
340 
341 	return intel_de_read(display, TRANS_PUSH(display, cpu_transcoder)) & TRANS_PUSH_SEND;
342 }
343 
344 void intel_vrr_enable(const struct intel_crtc_state *crtc_state)
345 {
346 	struct intel_display *display = to_intel_display(crtc_state);
347 	enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
348 
349 	if (!crtc_state->vrr.enable)
350 		return;
351 
352 	intel_de_write(display, TRANS_PUSH(display, cpu_transcoder),
353 		       TRANS_PUSH_EN);
354 
355 	if (HAS_AS_SDP(display))
356 		intel_de_write(display,
357 			       TRANS_VRR_VSYNC(display, cpu_transcoder),
358 			       VRR_VSYNC_END(crtc_state->vrr.vsync_end) |
359 			       VRR_VSYNC_START(crtc_state->vrr.vsync_start));
360 
361 	if (crtc_state->cmrr.enable) {
362 		intel_de_write(display, TRANS_VRR_CTL(display, cpu_transcoder),
363 			       VRR_CTL_VRR_ENABLE | VRR_CTL_CMRR_ENABLE |
364 			       trans_vrr_ctl(crtc_state));
365 	} else {
366 		intel_de_write(display, TRANS_VRR_CTL(display, cpu_transcoder),
367 			       VRR_CTL_VRR_ENABLE | trans_vrr_ctl(crtc_state));
368 	}
369 }
370 
371 void intel_vrr_disable(const struct intel_crtc_state *old_crtc_state)
372 {
373 	struct intel_display *display = to_intel_display(old_crtc_state);
374 	enum transcoder cpu_transcoder = old_crtc_state->cpu_transcoder;
375 
376 	if (!old_crtc_state->vrr.enable)
377 		return;
378 
379 	intel_de_write(display, TRANS_VRR_CTL(display, cpu_transcoder),
380 		       trans_vrr_ctl(old_crtc_state));
381 	intel_de_wait_for_clear(display,
382 				TRANS_VRR_STATUS(display, cpu_transcoder),
383 				VRR_STATUS_VRR_EN_LIVE, 1000);
384 	intel_de_write(display, TRANS_PUSH(display, cpu_transcoder), 0);
385 
386 	if (HAS_AS_SDP(display))
387 		intel_de_write(display,
388 			       TRANS_VRR_VSYNC(display, cpu_transcoder), 0);
389 }
390 
391 void intel_vrr_get_config(struct intel_crtc_state *crtc_state)
392 {
393 	struct intel_display *display = to_intel_display(crtc_state);
394 	enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
395 	u32 trans_vrr_ctl, trans_vrr_vsync;
396 
397 	trans_vrr_ctl = intel_de_read(display,
398 				      TRANS_VRR_CTL(display, cpu_transcoder));
399 
400 	crtc_state->vrr.enable = trans_vrr_ctl & VRR_CTL_VRR_ENABLE;
401 	if (HAS_CMRR(display))
402 		crtc_state->cmrr.enable = (trans_vrr_ctl & VRR_CTL_CMRR_ENABLE);
403 
404 	if (crtc_state->cmrr.enable) {
405 		crtc_state->cmrr.cmrr_n =
406 			intel_de_read64_2x32(display, TRANS_CMRR_N_LO(display, cpu_transcoder),
407 					     TRANS_CMRR_N_HI(display, cpu_transcoder));
408 		crtc_state->cmrr.cmrr_m =
409 			intel_de_read64_2x32(display, TRANS_CMRR_M_LO(display, cpu_transcoder),
410 					     TRANS_CMRR_M_HI(display, cpu_transcoder));
411 	}
412 
413 	if (DISPLAY_VER(display) >= 13)
414 		crtc_state->vrr.guardband =
415 			REG_FIELD_GET(XELPD_VRR_CTL_VRR_GUARDBAND_MASK, trans_vrr_ctl);
416 	else
417 		if (trans_vrr_ctl & VRR_CTL_PIPELINE_FULL_OVERRIDE)
418 			crtc_state->vrr.pipeline_full =
419 				REG_FIELD_GET(VRR_CTL_PIPELINE_FULL_MASK, trans_vrr_ctl);
420 
421 	if (trans_vrr_ctl & VRR_CTL_FLIP_LINE_EN) {
422 		crtc_state->vrr.flipline = intel_de_read(display,
423 							 TRANS_VRR_FLIPLINE(display, cpu_transcoder)) + 1;
424 		crtc_state->vrr.vmax = intel_de_read(display,
425 						     TRANS_VRR_VMAX(display, cpu_transcoder)) + 1;
426 		crtc_state->vrr.vmin = intel_de_read(display,
427 						     TRANS_VRR_VMIN(display, cpu_transcoder)) + 1;
428 	}
429 
430 	if (crtc_state->vrr.enable) {
431 		crtc_state->mode_flags |= I915_MODE_FLAG_VRR;
432 
433 		if (HAS_AS_SDP(display)) {
434 			trans_vrr_vsync =
435 				intel_de_read(display,
436 					      TRANS_VRR_VSYNC(display, cpu_transcoder));
437 			crtc_state->vrr.vsync_start =
438 				REG_FIELD_GET(VRR_VSYNC_START_MASK, trans_vrr_vsync);
439 			crtc_state->vrr.vsync_end =
440 				REG_FIELD_GET(VRR_VSYNC_END_MASK, trans_vrr_vsync);
441 		}
442 	}
443 }
444