xref: /linux/drivers/gpu/drm/i915/display/intel_vrr.c (revision 3a39d672e7f48b8d6b91a09afa4b55352773b4b5)
1 // SPDX-License-Identifier: MIT
2 /*
3  * Copyright © 2020 Intel Corporation
4  *
5  */
6 
7 #include "i915_drv.h"
8 #include "i915_reg.h"
9 #include "intel_de.h"
10 #include "intel_display_types.h"
11 #include "intel_vrr.h"
12 #include "intel_vrr_regs.h"
13 #include "intel_dp.h"
14 
15 #define FIXED_POINT_PRECISION		100
16 #define CMRR_PRECISION_TOLERANCE	10
17 
intel_vrr_is_capable(struct intel_connector * connector)18 bool intel_vrr_is_capable(struct intel_connector *connector)
19 {
20 	struct intel_display *display = to_intel_display(connector);
21 	const struct drm_display_info *info = &connector->base.display_info;
22 	struct intel_dp *intel_dp;
23 
24 	/*
25 	 * DP Sink is capable of VRR video timings if
26 	 * Ignore MSA bit is set in DPCD.
27 	 * EDID monitor range also should be atleast 10 for reasonable
28 	 * Adaptive Sync or Variable Refresh Rate end user experience.
29 	 */
30 	switch (connector->base.connector_type) {
31 	case DRM_MODE_CONNECTOR_eDP:
32 		if (!connector->panel.vbt.vrr)
33 			return false;
34 		fallthrough;
35 	case DRM_MODE_CONNECTOR_DisplayPort:
36 		intel_dp = intel_attached_dp(connector);
37 
38 		if (!drm_dp_sink_can_do_video_without_timing_msa(intel_dp->dpcd))
39 			return false;
40 
41 		break;
42 	default:
43 		return false;
44 	}
45 
46 	return HAS_VRR(display) &&
47 		info->monitor_range.max_vfreq - info->monitor_range.min_vfreq > 10;
48 }
49 
intel_vrr_is_in_range(struct intel_connector * connector,int vrefresh)50 bool intel_vrr_is_in_range(struct intel_connector *connector, int vrefresh)
51 {
52 	const struct drm_display_info *info = &connector->base.display_info;
53 
54 	return intel_vrr_is_capable(connector) &&
55 		vrefresh >= info->monitor_range.min_vfreq &&
56 		vrefresh <= info->monitor_range.max_vfreq;
57 }
58 
59 void
intel_vrr_check_modeset(struct intel_atomic_state * state)60 intel_vrr_check_modeset(struct intel_atomic_state *state)
61 {
62 	int i;
63 	struct intel_crtc_state *old_crtc_state, *new_crtc_state;
64 	struct intel_crtc *crtc;
65 
66 	for_each_oldnew_intel_crtc_in_state(state, crtc, old_crtc_state,
67 					    new_crtc_state, i) {
68 		if (new_crtc_state->uapi.vrr_enabled !=
69 		    old_crtc_state->uapi.vrr_enabled)
70 			new_crtc_state->uapi.mode_changed = true;
71 	}
72 }
73 
74 /*
75  * Without VRR registers get latched at:
76  *  vblank_start
77  *
78  * With VRR the earliest registers can get latched is:
79  *  intel_vrr_vmin_vblank_start(), which if we want to maintain
80  *  the correct min vtotal is >=vblank_start+1
81  *
82  * The latest point registers can get latched is the vmax decision boundary:
83  *  intel_vrr_vmax_vblank_start()
84  *
85  * Between those two points the vblank exit starts (and hence registers get
86  * latched) ASAP after a push is sent.
87  *
88  * framestart_delay is programmable 1-4.
89  */
intel_vrr_vblank_exit_length(const struct intel_crtc_state * crtc_state)90 static int intel_vrr_vblank_exit_length(const struct intel_crtc_state *crtc_state)
91 {
92 	struct intel_display *display = to_intel_display(crtc_state);
93 
94 	if (DISPLAY_VER(display) >= 13)
95 		return crtc_state->vrr.guardband;
96 	else
97 		/* The hw imposes the extra scanline before frame start */
98 		return crtc_state->vrr.pipeline_full + crtc_state->framestart_delay + 1;
99 }
100 
intel_vrr_vmin_vblank_start(const struct intel_crtc_state * crtc_state)101 int intel_vrr_vmin_vblank_start(const struct intel_crtc_state *crtc_state)
102 {
103 	/* Min vblank actually determined by flipline that is always >=vmin+1 */
104 	return crtc_state->vrr.vmin + 1 - intel_vrr_vblank_exit_length(crtc_state);
105 }
106 
intel_vrr_vmax_vblank_start(const struct intel_crtc_state * crtc_state)107 int intel_vrr_vmax_vblank_start(const struct intel_crtc_state *crtc_state)
108 {
109 	return crtc_state->vrr.vmax - intel_vrr_vblank_exit_length(crtc_state);
110 }
111 
112 static bool
is_cmrr_frac_required(struct intel_crtc_state * crtc_state)113 is_cmrr_frac_required(struct intel_crtc_state *crtc_state)
114 {
115 	struct intel_display *display = to_intel_display(crtc_state);
116 	int calculated_refresh_k, actual_refresh_k, pixel_clock_per_line;
117 	struct drm_display_mode *adjusted_mode = &crtc_state->hw.adjusted_mode;
118 
119 	if (!HAS_CMRR(display))
120 		return false;
121 
122 	actual_refresh_k =
123 		drm_mode_vrefresh(adjusted_mode) * FIXED_POINT_PRECISION;
124 	pixel_clock_per_line =
125 		adjusted_mode->crtc_clock * 1000 / adjusted_mode->crtc_htotal;
126 	calculated_refresh_k =
127 		pixel_clock_per_line * FIXED_POINT_PRECISION / adjusted_mode->crtc_vtotal;
128 
129 	if ((actual_refresh_k - calculated_refresh_k) < CMRR_PRECISION_TOLERANCE)
130 		return false;
131 
132 	return true;
133 }
134 
135 static unsigned int
cmrr_get_vtotal(struct intel_crtc_state * crtc_state,bool video_mode_required)136 cmrr_get_vtotal(struct intel_crtc_state *crtc_state, bool video_mode_required)
137 {
138 	int multiplier_m = 1, multiplier_n = 1, vtotal, desired_refresh_rate;
139 	u64 adjusted_pixel_rate;
140 	struct drm_display_mode *adjusted_mode = &crtc_state->hw.adjusted_mode;
141 
142 	desired_refresh_rate = drm_mode_vrefresh(adjusted_mode);
143 
144 	if (video_mode_required) {
145 		multiplier_m = 1001;
146 		multiplier_n = 1000;
147 	}
148 
149 	crtc_state->cmrr.cmrr_n = mul_u32_u32(desired_refresh_rate * adjusted_mode->crtc_htotal,
150 					      multiplier_n);
151 	vtotal = DIV_ROUND_UP_ULL(mul_u32_u32(adjusted_mode->crtc_clock * 1000, multiplier_n),
152 				  crtc_state->cmrr.cmrr_n);
153 	adjusted_pixel_rate = mul_u32_u32(adjusted_mode->crtc_clock * 1000, multiplier_m);
154 	crtc_state->cmrr.cmrr_m = do_div(adjusted_pixel_rate, crtc_state->cmrr.cmrr_n);
155 
156 	return vtotal;
157 }
158 
159 void
intel_vrr_compute_config(struct intel_crtc_state * crtc_state,struct drm_connector_state * conn_state)160 intel_vrr_compute_config(struct intel_crtc_state *crtc_state,
161 			 struct drm_connector_state *conn_state)
162 {
163 	struct intel_display *display = to_intel_display(crtc_state);
164 	struct intel_connector *connector =
165 		to_intel_connector(conn_state->connector);
166 	struct intel_dp *intel_dp = intel_attached_dp(connector);
167 	bool is_edp = intel_dp_is_edp(intel_dp);
168 	struct drm_display_mode *adjusted_mode = &crtc_state->hw.adjusted_mode;
169 	const struct drm_display_info *info = &connector->base.display_info;
170 	int vmin, vmax;
171 
172 	/*
173 	 * FIXME all joined pipes share the same transcoder.
174 	 * Need to account for that during VRR toggle/push/etc.
175 	 */
176 	if (crtc_state->joiner_pipes)
177 		return;
178 
179 	if (adjusted_mode->flags & DRM_MODE_FLAG_INTERLACE)
180 		return;
181 
182 	crtc_state->vrr.in_range =
183 		intel_vrr_is_in_range(connector, drm_mode_vrefresh(adjusted_mode));
184 	if (!crtc_state->vrr.in_range)
185 		return;
186 
187 	if (HAS_LRR(display))
188 		crtc_state->update_lrr = true;
189 
190 	vmin = DIV_ROUND_UP(adjusted_mode->crtc_clock * 1000,
191 			    adjusted_mode->crtc_htotal * info->monitor_range.max_vfreq);
192 	vmax = adjusted_mode->crtc_clock * 1000 /
193 		(adjusted_mode->crtc_htotal * info->monitor_range.min_vfreq);
194 
195 	vmin = max_t(int, vmin, adjusted_mode->crtc_vtotal);
196 	vmax = max_t(int, vmax, adjusted_mode->crtc_vtotal);
197 
198 	if (vmin >= vmax)
199 		return;
200 
201 	/*
202 	 * flipline determines the min vblank length the hardware will
203 	 * generate, and flipline>=vmin+1, hence we reduce vmin by one
204 	 * to make sure we can get the actual min vblank length.
205 	 */
206 	crtc_state->vrr.vmin = vmin - 1;
207 	crtc_state->vrr.vmax = vmax;
208 
209 	crtc_state->vrr.flipline = crtc_state->vrr.vmin + 1;
210 
211 	/*
212 	 * When panel is VRR capable and userspace has
213 	 * not enabled adaptive sync mode then Fixed Average
214 	 * Vtotal mode should be enabled.
215 	 */
216 	if (crtc_state->uapi.vrr_enabled) {
217 		crtc_state->vrr.enable = true;
218 		crtc_state->mode_flags |= I915_MODE_FLAG_VRR;
219 	} else if (is_cmrr_frac_required(crtc_state) && is_edp) {
220 		crtc_state->vrr.enable = true;
221 		crtc_state->cmrr.enable = true;
222 		/*
223 		 * TODO: Compute precise target refresh rate to determine
224 		 * if video_mode_required should be true. Currently set to
225 		 * false due to uncertainty about the precise target
226 		 * refresh Rate.
227 		 */
228 		crtc_state->vrr.vmax = cmrr_get_vtotal(crtc_state, false);
229 		crtc_state->vrr.vmin = crtc_state->vrr.vmax;
230 		crtc_state->vrr.flipline = crtc_state->vrr.vmin;
231 		crtc_state->mode_flags |= I915_MODE_FLAG_VRR;
232 	}
233 
234 	if (intel_dp->as_sdp_supported && crtc_state->vrr.enable) {
235 		crtc_state->vrr.vsync_start =
236 			(crtc_state->hw.adjusted_mode.crtc_vtotal -
237 			 crtc_state->hw.adjusted_mode.vsync_start);
238 		crtc_state->vrr.vsync_end =
239 			(crtc_state->hw.adjusted_mode.crtc_vtotal -
240 			 crtc_state->hw.adjusted_mode.vsync_end);
241 	}
242 
243 	/*
244 	 * For XE_LPD+, we use guardband and pipeline override
245 	 * is deprecated.
246 	 */
247 	if (DISPLAY_VER(display) >= 13) {
248 		crtc_state->vrr.guardband =
249 			crtc_state->vrr.vmin + 1 - adjusted_mode->crtc_vblank_start;
250 	} else {
251 		crtc_state->vrr.pipeline_full =
252 			min(255, crtc_state->vrr.vmin - adjusted_mode->crtc_vblank_start -
253 			    crtc_state->framestart_delay - 1);
254 	}
255 }
256 
trans_vrr_ctl(const struct intel_crtc_state * crtc_state)257 static u32 trans_vrr_ctl(const struct intel_crtc_state *crtc_state)
258 {
259 	struct intel_display *display = to_intel_display(crtc_state);
260 
261 	if (DISPLAY_VER(display) >= 13)
262 		return VRR_CTL_IGN_MAX_SHIFT | VRR_CTL_FLIP_LINE_EN |
263 			XELPD_VRR_CTL_VRR_GUARDBAND(crtc_state->vrr.guardband);
264 	else
265 		return VRR_CTL_IGN_MAX_SHIFT | VRR_CTL_FLIP_LINE_EN |
266 			VRR_CTL_PIPELINE_FULL(crtc_state->vrr.pipeline_full) |
267 			VRR_CTL_PIPELINE_FULL_OVERRIDE;
268 }
269 
intel_vrr_set_transcoder_timings(const struct intel_crtc_state * crtc_state)270 void intel_vrr_set_transcoder_timings(const struct intel_crtc_state *crtc_state)
271 {
272 	struct intel_display *display = to_intel_display(crtc_state);
273 	enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
274 
275 	/*
276 	 * This bit seems to have two meanings depending on the platform:
277 	 * TGL: generate VRR "safe window" for DSB vblank waits
278 	 * ADL/DG2: make TRANS_SET_CONTEXT_LATENCY effective with VRR
279 	 */
280 	if (IS_DISPLAY_VER(display, 12, 13))
281 		intel_de_rmw(display, CHICKEN_TRANS(cpu_transcoder),
282 			     0, PIPE_VBLANK_WITH_DELAY);
283 
284 	if (!crtc_state->vrr.flipline) {
285 		intel_de_write(display,
286 			       TRANS_VRR_CTL(display, cpu_transcoder), 0);
287 		return;
288 	}
289 
290 	if (crtc_state->cmrr.enable) {
291 		intel_de_write(display, TRANS_CMRR_M_HI(display, cpu_transcoder),
292 			       upper_32_bits(crtc_state->cmrr.cmrr_m));
293 		intel_de_write(display, TRANS_CMRR_M_LO(display, cpu_transcoder),
294 			       lower_32_bits(crtc_state->cmrr.cmrr_m));
295 		intel_de_write(display, TRANS_CMRR_N_HI(display, cpu_transcoder),
296 			       upper_32_bits(crtc_state->cmrr.cmrr_n));
297 		intel_de_write(display, TRANS_CMRR_N_LO(display, cpu_transcoder),
298 			       lower_32_bits(crtc_state->cmrr.cmrr_n));
299 	}
300 
301 	intel_de_write(display, TRANS_VRR_VMIN(display, cpu_transcoder),
302 		       crtc_state->vrr.vmin - 1);
303 	intel_de_write(display, TRANS_VRR_VMAX(display, cpu_transcoder),
304 		       crtc_state->vrr.vmax - 1);
305 	intel_de_write(display, TRANS_VRR_CTL(display, cpu_transcoder),
306 		       trans_vrr_ctl(crtc_state));
307 	intel_de_write(display, TRANS_VRR_FLIPLINE(display, cpu_transcoder),
308 		       crtc_state->vrr.flipline - 1);
309 }
310 
intel_vrr_send_push(const struct intel_crtc_state * crtc_state)311 void intel_vrr_send_push(const struct intel_crtc_state *crtc_state)
312 {
313 	struct intel_display *display = to_intel_display(crtc_state);
314 	enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
315 
316 	if (!crtc_state->vrr.enable)
317 		return;
318 
319 	intel_de_write(display, TRANS_PUSH(display, cpu_transcoder),
320 		       TRANS_PUSH_EN | TRANS_PUSH_SEND);
321 }
322 
intel_vrr_is_push_sent(const struct intel_crtc_state * crtc_state)323 bool intel_vrr_is_push_sent(const struct intel_crtc_state *crtc_state)
324 {
325 	struct intel_display *display = to_intel_display(crtc_state);
326 	enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
327 
328 	if (!crtc_state->vrr.enable)
329 		return false;
330 
331 	return intel_de_read(display, TRANS_PUSH(display, cpu_transcoder)) & TRANS_PUSH_SEND;
332 }
333 
intel_vrr_enable(const struct intel_crtc_state * crtc_state)334 void intel_vrr_enable(const struct intel_crtc_state *crtc_state)
335 {
336 	struct intel_display *display = to_intel_display(crtc_state);
337 	enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
338 
339 	if (!crtc_state->vrr.enable)
340 		return;
341 
342 	intel_de_write(display, TRANS_PUSH(display, cpu_transcoder),
343 		       TRANS_PUSH_EN);
344 
345 	if (HAS_AS_SDP(display))
346 		intel_de_write(display,
347 			       TRANS_VRR_VSYNC(display, cpu_transcoder),
348 			       VRR_VSYNC_END(crtc_state->vrr.vsync_end) |
349 			       VRR_VSYNC_START(crtc_state->vrr.vsync_start));
350 
351 	if (crtc_state->cmrr.enable) {
352 		intel_de_write(display, TRANS_VRR_CTL(display, cpu_transcoder),
353 			       VRR_CTL_VRR_ENABLE | VRR_CTL_CMRR_ENABLE |
354 			       trans_vrr_ctl(crtc_state));
355 	} else {
356 		intel_de_write(display, TRANS_VRR_CTL(display, cpu_transcoder),
357 			       VRR_CTL_VRR_ENABLE | trans_vrr_ctl(crtc_state));
358 	}
359 }
360 
intel_vrr_disable(const struct intel_crtc_state * old_crtc_state)361 void intel_vrr_disable(const struct intel_crtc_state *old_crtc_state)
362 {
363 	struct intel_display *display = to_intel_display(old_crtc_state);
364 	enum transcoder cpu_transcoder = old_crtc_state->cpu_transcoder;
365 
366 	if (!old_crtc_state->vrr.enable)
367 		return;
368 
369 	intel_de_write(display, TRANS_VRR_CTL(display, cpu_transcoder),
370 		       trans_vrr_ctl(old_crtc_state));
371 	intel_de_wait_for_clear(display,
372 				TRANS_VRR_STATUS(display, cpu_transcoder),
373 				VRR_STATUS_VRR_EN_LIVE, 1000);
374 	intel_de_write(display, TRANS_PUSH(display, cpu_transcoder), 0);
375 
376 	if (HAS_AS_SDP(display))
377 		intel_de_write(display,
378 			       TRANS_VRR_VSYNC(display, cpu_transcoder), 0);
379 }
380 
intel_vrr_get_config(struct intel_crtc_state * crtc_state)381 void intel_vrr_get_config(struct intel_crtc_state *crtc_state)
382 {
383 	struct intel_display *display = to_intel_display(crtc_state);
384 	enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
385 	u32 trans_vrr_ctl, trans_vrr_vsync;
386 
387 	trans_vrr_ctl = intel_de_read(display,
388 				      TRANS_VRR_CTL(display, cpu_transcoder));
389 
390 	crtc_state->vrr.enable = trans_vrr_ctl & VRR_CTL_VRR_ENABLE;
391 	if (HAS_CMRR(display))
392 		crtc_state->cmrr.enable = (trans_vrr_ctl & VRR_CTL_CMRR_ENABLE);
393 
394 	if (crtc_state->cmrr.enable) {
395 		crtc_state->cmrr.cmrr_n =
396 			intel_de_read64_2x32(display, TRANS_CMRR_N_LO(display, cpu_transcoder),
397 					     TRANS_CMRR_N_HI(display, cpu_transcoder));
398 		crtc_state->cmrr.cmrr_m =
399 			intel_de_read64_2x32(display, TRANS_CMRR_M_LO(display, cpu_transcoder),
400 					     TRANS_CMRR_M_HI(display, cpu_transcoder));
401 	}
402 
403 	if (DISPLAY_VER(display) >= 13)
404 		crtc_state->vrr.guardband =
405 			REG_FIELD_GET(XELPD_VRR_CTL_VRR_GUARDBAND_MASK, trans_vrr_ctl);
406 	else
407 		if (trans_vrr_ctl & VRR_CTL_PIPELINE_FULL_OVERRIDE)
408 			crtc_state->vrr.pipeline_full =
409 				REG_FIELD_GET(VRR_CTL_PIPELINE_FULL_MASK, trans_vrr_ctl);
410 
411 	if (trans_vrr_ctl & VRR_CTL_FLIP_LINE_EN) {
412 		crtc_state->vrr.flipline = intel_de_read(display,
413 							 TRANS_VRR_FLIPLINE(display, cpu_transcoder)) + 1;
414 		crtc_state->vrr.vmax = intel_de_read(display,
415 						     TRANS_VRR_VMAX(display, cpu_transcoder)) + 1;
416 		crtc_state->vrr.vmin = intel_de_read(display,
417 						     TRANS_VRR_VMIN(display, cpu_transcoder)) + 1;
418 	}
419 
420 	if (crtc_state->vrr.enable) {
421 		crtc_state->mode_flags |= I915_MODE_FLAG_VRR;
422 
423 		if (HAS_AS_SDP(display)) {
424 			trans_vrr_vsync =
425 				intel_de_read(display,
426 					      TRANS_VRR_VSYNC(display, cpu_transcoder));
427 			crtc_state->vrr.vsync_start =
428 				REG_FIELD_GET(VRR_VSYNC_START_MASK, trans_vrr_vsync);
429 			crtc_state->vrr.vsync_end =
430 				REG_FIELD_GET(VRR_VSYNC_END_MASK, trans_vrr_vsync);
431 		}
432 	}
433 }
434