xref: /linux/drivers/gpu/drm/i915/display/intel_vrr.c (revision e6a901a00822659181c93c86d8bbc2a17779fddc)
1 // SPDX-License-Identifier: MIT
2 /*
3  * Copyright © 2020 Intel Corporation
4  *
5  */
6 
7 #include "i915_drv.h"
8 #include "i915_reg.h"
9 #include "intel_de.h"
10 #include "intel_display_types.h"
11 #include "intel_vrr.h"
12 #include "intel_dp.h"
13 
14 bool intel_vrr_is_capable(struct intel_connector *connector)
15 {
16 	const struct drm_display_info *info = &connector->base.display_info;
17 	struct drm_i915_private *i915 = to_i915(connector->base.dev);
18 	struct intel_dp *intel_dp;
19 
20 	/*
21 	 * DP Sink is capable of VRR video timings if
22 	 * Ignore MSA bit is set in DPCD.
23 	 * EDID monitor range also should be atleast 10 for reasonable
24 	 * Adaptive Sync or Variable Refresh Rate end user experience.
25 	 */
26 	switch (connector->base.connector_type) {
27 	case DRM_MODE_CONNECTOR_eDP:
28 		if (!connector->panel.vbt.vrr)
29 			return false;
30 		fallthrough;
31 	case DRM_MODE_CONNECTOR_DisplayPort:
32 		intel_dp = intel_attached_dp(connector);
33 
34 		if (!drm_dp_sink_can_do_video_without_timing_msa(intel_dp->dpcd))
35 			return false;
36 
37 		break;
38 	default:
39 		return false;
40 	}
41 
42 	return HAS_VRR(i915) &&
43 		info->monitor_range.max_vfreq - info->monitor_range.min_vfreq > 10;
44 }
45 
46 bool intel_vrr_is_in_range(struct intel_connector *connector, int vrefresh)
47 {
48 	const struct drm_display_info *info = &connector->base.display_info;
49 
50 	return intel_vrr_is_capable(connector) &&
51 		vrefresh >= info->monitor_range.min_vfreq &&
52 		vrefresh <= info->monitor_range.max_vfreq;
53 }
54 
55 void
56 intel_vrr_check_modeset(struct intel_atomic_state *state)
57 {
58 	int i;
59 	struct intel_crtc_state *old_crtc_state, *new_crtc_state;
60 	struct intel_crtc *crtc;
61 
62 	for_each_oldnew_intel_crtc_in_state(state, crtc, old_crtc_state,
63 					    new_crtc_state, i) {
64 		if (new_crtc_state->uapi.vrr_enabled !=
65 		    old_crtc_state->uapi.vrr_enabled)
66 			new_crtc_state->uapi.mode_changed = true;
67 	}
68 }
69 
70 /*
71  * Without VRR registers get latched at:
72  *  vblank_start
73  *
74  * With VRR the earliest registers can get latched is:
75  *  intel_vrr_vmin_vblank_start(), which if we want to maintain
76  *  the correct min vtotal is >=vblank_start+1
77  *
78  * The latest point registers can get latched is the vmax decision boundary:
79  *  intel_vrr_vmax_vblank_start()
80  *
81  * Between those two points the vblank exit starts (and hence registers get
82  * latched) ASAP after a push is sent.
83  *
84  * framestart_delay is programmable 1-4.
85  */
86 static int intel_vrr_vblank_exit_length(const struct intel_crtc_state *crtc_state)
87 {
88 	struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
89 	struct drm_i915_private *i915 = to_i915(crtc->base.dev);
90 
91 	if (DISPLAY_VER(i915) >= 13)
92 		return crtc_state->vrr.guardband;
93 	else
94 		/* The hw imposes the extra scanline before frame start */
95 		return crtc_state->vrr.pipeline_full + crtc_state->framestart_delay + 1;
96 }
97 
98 int intel_vrr_vmin_vblank_start(const struct intel_crtc_state *crtc_state)
99 {
100 	/* Min vblank actually determined by flipline that is always >=vmin+1 */
101 	return crtc_state->vrr.vmin + 1 - intel_vrr_vblank_exit_length(crtc_state);
102 }
103 
104 int intel_vrr_vmax_vblank_start(const struct intel_crtc_state *crtc_state)
105 {
106 	return crtc_state->vrr.vmax - intel_vrr_vblank_exit_length(crtc_state);
107 }
108 
109 void
110 intel_vrr_compute_config(struct intel_crtc_state *crtc_state,
111 			 struct drm_connector_state *conn_state)
112 {
113 	struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
114 	struct drm_i915_private *i915 = to_i915(crtc->base.dev);
115 	struct intel_connector *connector =
116 		to_intel_connector(conn_state->connector);
117 	struct intel_dp *intel_dp = intel_attached_dp(connector);
118 	struct drm_display_mode *adjusted_mode = &crtc_state->hw.adjusted_mode;
119 	const struct drm_display_info *info = &connector->base.display_info;
120 	int vmin, vmax;
121 
122 	/*
123 	 * FIXME all joined pipes share the same transcoder.
124 	 * Need to account for that during VRR toggle/push/etc.
125 	 */
126 	if (crtc_state->bigjoiner_pipes)
127 		return;
128 
129 	if (adjusted_mode->flags & DRM_MODE_FLAG_INTERLACE)
130 		return;
131 
132 	crtc_state->vrr.in_range =
133 		intel_vrr_is_in_range(connector, drm_mode_vrefresh(adjusted_mode));
134 	if (!crtc_state->vrr.in_range)
135 		return;
136 
137 	if (HAS_LRR(i915))
138 		crtc_state->update_lrr = true;
139 
140 	vmin = DIV_ROUND_UP(adjusted_mode->crtc_clock * 1000,
141 			    adjusted_mode->crtc_htotal * info->monitor_range.max_vfreq);
142 	vmax = adjusted_mode->crtc_clock * 1000 /
143 		(adjusted_mode->crtc_htotal * info->monitor_range.min_vfreq);
144 
145 	vmin = max_t(int, vmin, adjusted_mode->crtc_vtotal);
146 	vmax = max_t(int, vmax, adjusted_mode->crtc_vtotal);
147 
148 	if (vmin >= vmax)
149 		return;
150 
151 	/*
152 	 * flipline determines the min vblank length the hardware will
153 	 * generate, and flipline>=vmin+1, hence we reduce vmin by one
154 	 * to make sure we can get the actual min vblank length.
155 	 */
156 	crtc_state->vrr.vmin = vmin - 1;
157 	crtc_state->vrr.vmax = vmax;
158 
159 	crtc_state->vrr.flipline = crtc_state->vrr.vmin + 1;
160 
161 	/*
162 	 * For XE_LPD+, we use guardband and pipeline override
163 	 * is deprecated.
164 	 */
165 	if (DISPLAY_VER(i915) >= 13) {
166 		crtc_state->vrr.guardband =
167 			crtc_state->vrr.vmin + 1 - adjusted_mode->crtc_vblank_start;
168 	} else {
169 		crtc_state->vrr.pipeline_full =
170 			min(255, crtc_state->vrr.vmin - adjusted_mode->crtc_vblank_start -
171 			    crtc_state->framestart_delay - 1);
172 	}
173 
174 	if (crtc_state->uapi.vrr_enabled) {
175 		crtc_state->vrr.enable = true;
176 		crtc_state->mode_flags |= I915_MODE_FLAG_VRR;
177 		if (intel_dp_as_sdp_supported(intel_dp)) {
178 			crtc_state->vrr.vsync_start =
179 				(crtc_state->hw.adjusted_mode.crtc_vtotal -
180 					crtc_state->hw.adjusted_mode.vsync_start);
181 			crtc_state->vrr.vsync_end =
182 				(crtc_state->hw.adjusted_mode.crtc_vtotal -
183 					crtc_state->hw.adjusted_mode.vsync_end);
184 		}
185 	}
186 }
187 
188 static u32 trans_vrr_ctl(const struct intel_crtc_state *crtc_state)
189 {
190 	struct drm_i915_private *i915 = to_i915(crtc_state->uapi.crtc->dev);
191 
192 	if (DISPLAY_VER(i915) >= 13)
193 		return VRR_CTL_IGN_MAX_SHIFT | VRR_CTL_FLIP_LINE_EN |
194 			XELPD_VRR_CTL_VRR_GUARDBAND(crtc_state->vrr.guardband);
195 	else
196 		return VRR_CTL_IGN_MAX_SHIFT | VRR_CTL_FLIP_LINE_EN |
197 			VRR_CTL_PIPELINE_FULL(crtc_state->vrr.pipeline_full) |
198 			VRR_CTL_PIPELINE_FULL_OVERRIDE;
199 }
200 
201 void intel_vrr_set_transcoder_timings(const struct intel_crtc_state *crtc_state)
202 {
203 	struct drm_i915_private *dev_priv = to_i915(crtc_state->uapi.crtc->dev);
204 	enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
205 
206 	/*
207 	 * This bit seems to have two meanings depending on the platform:
208 	 * TGL: generate VRR "safe window" for DSB vblank waits
209 	 * ADL/DG2: make TRANS_SET_CONTEXT_LATENCY effective with VRR
210 	 */
211 	if (IS_DISPLAY_VER(dev_priv, 12, 13))
212 		intel_de_rmw(dev_priv, CHICKEN_TRANS(cpu_transcoder),
213 			     0, PIPE_VBLANK_WITH_DELAY);
214 
215 	if (!crtc_state->vrr.flipline) {
216 		intel_de_write(dev_priv, TRANS_VRR_CTL(cpu_transcoder), 0);
217 		return;
218 	}
219 
220 	intel_de_write(dev_priv, TRANS_VRR_VMIN(cpu_transcoder), crtc_state->vrr.vmin - 1);
221 	intel_de_write(dev_priv, TRANS_VRR_VMAX(cpu_transcoder), crtc_state->vrr.vmax - 1);
222 	intel_de_write(dev_priv, TRANS_VRR_CTL(cpu_transcoder), trans_vrr_ctl(crtc_state));
223 	intel_de_write(dev_priv, TRANS_VRR_FLIPLINE(cpu_transcoder), crtc_state->vrr.flipline - 1);
224 }
225 
226 void intel_vrr_send_push(const struct intel_crtc_state *crtc_state)
227 {
228 	struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
229 	struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
230 	enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
231 
232 	if (!crtc_state->vrr.enable)
233 		return;
234 
235 	intel_de_write(dev_priv, TRANS_PUSH(cpu_transcoder),
236 		       TRANS_PUSH_EN | TRANS_PUSH_SEND);
237 }
238 
239 bool intel_vrr_is_push_sent(const struct intel_crtc_state *crtc_state)
240 {
241 	struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
242 	struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
243 	enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
244 
245 	if (!crtc_state->vrr.enable)
246 		return false;
247 
248 	return intel_de_read(dev_priv, TRANS_PUSH(cpu_transcoder)) & TRANS_PUSH_SEND;
249 }
250 
251 void intel_vrr_enable(const struct intel_crtc_state *crtc_state)
252 {
253 	struct drm_i915_private *dev_priv = to_i915(crtc_state->uapi.crtc->dev);
254 	enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
255 
256 	if (!crtc_state->vrr.enable)
257 		return;
258 
259 	intel_de_write(dev_priv, TRANS_PUSH(cpu_transcoder), TRANS_PUSH_EN);
260 
261 	if (HAS_AS_SDP(dev_priv))
262 		intel_de_write(dev_priv, TRANS_VRR_VSYNC(cpu_transcoder),
263 			       VRR_VSYNC_END(crtc_state->vrr.vsync_end) |
264 			       VRR_VSYNC_START(crtc_state->vrr.vsync_start));
265 
266 	intel_de_write(dev_priv, TRANS_VRR_CTL(cpu_transcoder),
267 		       VRR_CTL_VRR_ENABLE | trans_vrr_ctl(crtc_state));
268 }
269 
270 void intel_vrr_disable(const struct intel_crtc_state *old_crtc_state)
271 {
272 	struct intel_crtc *crtc = to_intel_crtc(old_crtc_state->uapi.crtc);
273 	struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
274 	enum transcoder cpu_transcoder = old_crtc_state->cpu_transcoder;
275 
276 	if (!old_crtc_state->vrr.enable)
277 		return;
278 
279 	intel_de_write(dev_priv, TRANS_VRR_CTL(cpu_transcoder),
280 		       trans_vrr_ctl(old_crtc_state));
281 	intel_de_wait_for_clear(dev_priv, TRANS_VRR_STATUS(cpu_transcoder),
282 				VRR_STATUS_VRR_EN_LIVE, 1000);
283 	intel_de_write(dev_priv, TRANS_PUSH(cpu_transcoder), 0);
284 
285 	if (HAS_AS_SDP(dev_priv))
286 		intel_de_write(dev_priv, TRANS_VRR_VSYNC(cpu_transcoder), 0);
287 }
288 
289 void intel_vrr_get_config(struct intel_crtc_state *crtc_state)
290 {
291 	struct drm_i915_private *dev_priv = to_i915(crtc_state->uapi.crtc->dev);
292 	enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
293 	u32 trans_vrr_ctl, trans_vrr_vsync;
294 
295 	trans_vrr_ctl = intel_de_read(dev_priv, TRANS_VRR_CTL(cpu_transcoder));
296 
297 	crtc_state->vrr.enable = trans_vrr_ctl & VRR_CTL_VRR_ENABLE;
298 
299 	if (DISPLAY_VER(dev_priv) >= 13)
300 		crtc_state->vrr.guardband =
301 			REG_FIELD_GET(XELPD_VRR_CTL_VRR_GUARDBAND_MASK, trans_vrr_ctl);
302 	else
303 		if (trans_vrr_ctl & VRR_CTL_PIPELINE_FULL_OVERRIDE)
304 			crtc_state->vrr.pipeline_full =
305 				REG_FIELD_GET(VRR_CTL_PIPELINE_FULL_MASK, trans_vrr_ctl);
306 
307 	if (trans_vrr_ctl & VRR_CTL_FLIP_LINE_EN) {
308 		crtc_state->vrr.flipline = intel_de_read(dev_priv, TRANS_VRR_FLIPLINE(cpu_transcoder)) + 1;
309 		crtc_state->vrr.vmax = intel_de_read(dev_priv, TRANS_VRR_VMAX(cpu_transcoder)) + 1;
310 		crtc_state->vrr.vmin = intel_de_read(dev_priv, TRANS_VRR_VMIN(cpu_transcoder)) + 1;
311 	}
312 
313 	if (crtc_state->vrr.enable) {
314 		crtc_state->mode_flags |= I915_MODE_FLAG_VRR;
315 
316 		if (HAS_AS_SDP(dev_priv)) {
317 			trans_vrr_vsync =
318 				intel_de_read(dev_priv, TRANS_VRR_VSYNC(cpu_transcoder));
319 			crtc_state->vrr.vsync_start =
320 				REG_FIELD_GET(VRR_VSYNC_START_MASK, trans_vrr_vsync);
321 			crtc_state->vrr.vsync_end =
322 				REG_FIELD_GET(VRR_VSYNC_END_MASK, trans_vrr_vsync);
323 		}
324 	}
325 }
326