1 // SPDX-License-Identifier: MIT
2 /*
3 * Copyright © 2020 Intel Corporation
4 *
5 */
6
7 #include "i915_reg.h"
8 #include "intel_de.h"
9 #include "intel_display_types.h"
10 #include "intel_vrr.h"
11 #include "intel_vrr_regs.h"
12 #include "intel_dp.h"
13
14 #define FIXED_POINT_PRECISION 100
15 #define CMRR_PRECISION_TOLERANCE 10
16
intel_vrr_is_capable(struct intel_connector * connector)17 bool intel_vrr_is_capable(struct intel_connector *connector)
18 {
19 struct intel_display *display = to_intel_display(connector);
20 const struct drm_display_info *info = &connector->base.display_info;
21 struct intel_dp *intel_dp;
22
23 /*
24 * DP Sink is capable of VRR video timings if
25 * Ignore MSA bit is set in DPCD.
26 * EDID monitor range also should be atleast 10 for reasonable
27 * Adaptive Sync or Variable Refresh Rate end user experience.
28 */
29 switch (connector->base.connector_type) {
30 case DRM_MODE_CONNECTOR_eDP:
31 if (!connector->panel.vbt.vrr)
32 return false;
33 fallthrough;
34 case DRM_MODE_CONNECTOR_DisplayPort:
35 intel_dp = intel_attached_dp(connector);
36
37 if (!drm_dp_sink_can_do_video_without_timing_msa(intel_dp->dpcd))
38 return false;
39
40 break;
41 default:
42 return false;
43 }
44
45 return HAS_VRR(display) &&
46 info->monitor_range.max_vfreq - info->monitor_range.min_vfreq > 10;
47 }
48
intel_vrr_is_in_range(struct intel_connector * connector,int vrefresh)49 bool intel_vrr_is_in_range(struct intel_connector *connector, int vrefresh)
50 {
51 const struct drm_display_info *info = &connector->base.display_info;
52
53 return intel_vrr_is_capable(connector) &&
54 vrefresh >= info->monitor_range.min_vfreq &&
55 vrefresh <= info->monitor_range.max_vfreq;
56 }
57
intel_vrr_possible(const struct intel_crtc_state * crtc_state)58 bool intel_vrr_possible(const struct intel_crtc_state *crtc_state)
59 {
60 return crtc_state->vrr.flipline;
61 }
62
63 void
intel_vrr_check_modeset(struct intel_atomic_state * state)64 intel_vrr_check_modeset(struct intel_atomic_state *state)
65 {
66 int i;
67 struct intel_crtc_state *old_crtc_state, *new_crtc_state;
68 struct intel_crtc *crtc;
69
70 for_each_oldnew_intel_crtc_in_state(state, crtc, old_crtc_state,
71 new_crtc_state, i) {
72 if (new_crtc_state->uapi.vrr_enabled !=
73 old_crtc_state->uapi.vrr_enabled)
74 new_crtc_state->uapi.mode_changed = true;
75 }
76 }
77
78 /*
79 * Without VRR registers get latched at:
80 * vblank_start
81 *
82 * With VRR the earliest registers can get latched is:
83 * intel_vrr_vmin_vblank_start(), which if we want to maintain
84 * the correct min vtotal is >=vblank_start+1
85 *
86 * The latest point registers can get latched is the vmax decision boundary:
87 * intel_vrr_vmax_vblank_start()
88 *
89 * Between those two points the vblank exit starts (and hence registers get
90 * latched) ASAP after a push is sent.
91 *
92 * framestart_delay is programmable 1-4.
93 */
intel_vrr_vblank_exit_length(const struct intel_crtc_state * crtc_state)94 static int intel_vrr_vblank_exit_length(const struct intel_crtc_state *crtc_state)
95 {
96 struct intel_display *display = to_intel_display(crtc_state);
97
98 if (DISPLAY_VER(display) >= 13)
99 return crtc_state->vrr.guardband;
100 else
101 /* The hw imposes the extra scanline before frame start */
102 return crtc_state->vrr.pipeline_full + crtc_state->framestart_delay + 1;
103 }
104
intel_vrr_vmin_vblank_start(const struct intel_crtc_state * crtc_state)105 int intel_vrr_vmin_vblank_start(const struct intel_crtc_state *crtc_state)
106 {
107 /* Min vblank actually determined by flipline that is always >=vmin+1 */
108 return crtc_state->vrr.vmin + 1 - intel_vrr_vblank_exit_length(crtc_state);
109 }
110
intel_vrr_vmax_vblank_start(const struct intel_crtc_state * crtc_state)111 int intel_vrr_vmax_vblank_start(const struct intel_crtc_state *crtc_state)
112 {
113 return crtc_state->vrr.vmax - intel_vrr_vblank_exit_length(crtc_state);
114 }
115
116 static bool
is_cmrr_frac_required(struct intel_crtc_state * crtc_state)117 is_cmrr_frac_required(struct intel_crtc_state *crtc_state)
118 {
119 struct intel_display *display = to_intel_display(crtc_state);
120 int calculated_refresh_k, actual_refresh_k, pixel_clock_per_line;
121 struct drm_display_mode *adjusted_mode = &crtc_state->hw.adjusted_mode;
122
123 if (!HAS_CMRR(display))
124 return false;
125
126 actual_refresh_k =
127 drm_mode_vrefresh(adjusted_mode) * FIXED_POINT_PRECISION;
128 pixel_clock_per_line =
129 adjusted_mode->crtc_clock * 1000 / adjusted_mode->crtc_htotal;
130 calculated_refresh_k =
131 pixel_clock_per_line * FIXED_POINT_PRECISION / adjusted_mode->crtc_vtotal;
132
133 if ((actual_refresh_k - calculated_refresh_k) < CMRR_PRECISION_TOLERANCE)
134 return false;
135
136 return true;
137 }
138
139 static unsigned int
cmrr_get_vtotal(struct intel_crtc_state * crtc_state,bool video_mode_required)140 cmrr_get_vtotal(struct intel_crtc_state *crtc_state, bool video_mode_required)
141 {
142 int multiplier_m = 1, multiplier_n = 1, vtotal, desired_refresh_rate;
143 u64 adjusted_pixel_rate;
144 struct drm_display_mode *adjusted_mode = &crtc_state->hw.adjusted_mode;
145
146 desired_refresh_rate = drm_mode_vrefresh(adjusted_mode);
147
148 if (video_mode_required) {
149 multiplier_m = 1001;
150 multiplier_n = 1000;
151 }
152
153 crtc_state->cmrr.cmrr_n = mul_u32_u32(desired_refresh_rate * adjusted_mode->crtc_htotal,
154 multiplier_n);
155 vtotal = DIV_ROUND_UP_ULL(mul_u32_u32(adjusted_mode->crtc_clock * 1000, multiplier_n),
156 crtc_state->cmrr.cmrr_n);
157 adjusted_pixel_rate = mul_u32_u32(adjusted_mode->crtc_clock * 1000, multiplier_m);
158 crtc_state->cmrr.cmrr_m = do_div(adjusted_pixel_rate, crtc_state->cmrr.cmrr_n);
159
160 return vtotal;
161 }
162
163 void
intel_vrr_compute_config(struct intel_crtc_state * crtc_state,struct drm_connector_state * conn_state)164 intel_vrr_compute_config(struct intel_crtc_state *crtc_state,
165 struct drm_connector_state *conn_state)
166 {
167 struct intel_display *display = to_intel_display(crtc_state);
168 struct intel_connector *connector =
169 to_intel_connector(conn_state->connector);
170 struct intel_dp *intel_dp = intel_attached_dp(connector);
171 bool is_edp = intel_dp_is_edp(intel_dp);
172 struct drm_display_mode *adjusted_mode = &crtc_state->hw.adjusted_mode;
173 const struct drm_display_info *info = &connector->base.display_info;
174 int vmin, vmax;
175
176 /*
177 * FIXME all joined pipes share the same transcoder.
178 * Need to account for that during VRR toggle/push/etc.
179 */
180 if (crtc_state->joiner_pipes)
181 return;
182
183 if (adjusted_mode->flags & DRM_MODE_FLAG_INTERLACE)
184 return;
185
186 crtc_state->vrr.in_range =
187 intel_vrr_is_in_range(connector, drm_mode_vrefresh(adjusted_mode));
188 if (!crtc_state->vrr.in_range)
189 return;
190
191 if (HAS_LRR(display))
192 crtc_state->update_lrr = true;
193
194 vmin = DIV_ROUND_UP(adjusted_mode->crtc_clock * 1000,
195 adjusted_mode->crtc_htotal * info->monitor_range.max_vfreq);
196 vmax = adjusted_mode->crtc_clock * 1000 /
197 (adjusted_mode->crtc_htotal * info->monitor_range.min_vfreq);
198
199 vmin = max_t(int, vmin, adjusted_mode->crtc_vtotal);
200 vmax = max_t(int, vmax, adjusted_mode->crtc_vtotal);
201
202 if (vmin >= vmax)
203 return;
204
205 /*
206 * flipline determines the min vblank length the hardware will
207 * generate, and flipline>=vmin+1, hence we reduce vmin by one
208 * to make sure we can get the actual min vblank length.
209 */
210 crtc_state->vrr.vmin = vmin - 1;
211 crtc_state->vrr.vmax = vmax;
212
213 crtc_state->vrr.flipline = crtc_state->vrr.vmin + 1;
214
215 /*
216 * When panel is VRR capable and userspace has
217 * not enabled adaptive sync mode then Fixed Average
218 * Vtotal mode should be enabled.
219 */
220 if (crtc_state->uapi.vrr_enabled) {
221 crtc_state->vrr.enable = true;
222 crtc_state->mode_flags |= I915_MODE_FLAG_VRR;
223 } else if (is_cmrr_frac_required(crtc_state) && is_edp) {
224 crtc_state->vrr.enable = true;
225 crtc_state->cmrr.enable = true;
226 /*
227 * TODO: Compute precise target refresh rate to determine
228 * if video_mode_required should be true. Currently set to
229 * false due to uncertainty about the precise target
230 * refresh Rate.
231 */
232 crtc_state->vrr.vmax = cmrr_get_vtotal(crtc_state, false);
233 crtc_state->vrr.vmin = crtc_state->vrr.vmax;
234 crtc_state->vrr.flipline = crtc_state->vrr.vmin;
235 crtc_state->mode_flags |= I915_MODE_FLAG_VRR;
236 }
237
238 if (intel_dp->as_sdp_supported && crtc_state->vrr.enable) {
239 crtc_state->vrr.vsync_start =
240 (crtc_state->hw.adjusted_mode.crtc_vtotal -
241 crtc_state->hw.adjusted_mode.vsync_start);
242 crtc_state->vrr.vsync_end =
243 (crtc_state->hw.adjusted_mode.crtc_vtotal -
244 crtc_state->hw.adjusted_mode.vsync_end);
245 }
246 }
247
intel_vrr_compute_config_late(struct intel_crtc_state * crtc_state)248 void intel_vrr_compute_config_late(struct intel_crtc_state *crtc_state)
249 {
250 struct intel_display *display = to_intel_display(crtc_state);
251 const struct drm_display_mode *adjusted_mode = &crtc_state->hw.adjusted_mode;
252
253 if (!intel_vrr_possible(crtc_state))
254 return;
255
256 if (DISPLAY_VER(display) >= 13) {
257 crtc_state->vrr.guardband =
258 crtc_state->vrr.vmin + 1 - adjusted_mode->crtc_vblank_start;
259 } else {
260 crtc_state->vrr.pipeline_full =
261 min(255, crtc_state->vrr.vmin - adjusted_mode->crtc_vblank_start -
262 crtc_state->framestart_delay - 1);
263 }
264 }
265
trans_vrr_ctl(const struct intel_crtc_state * crtc_state)266 static u32 trans_vrr_ctl(const struct intel_crtc_state *crtc_state)
267 {
268 struct intel_display *display = to_intel_display(crtc_state);
269
270 if (DISPLAY_VER(display) >= 13)
271 return VRR_CTL_IGN_MAX_SHIFT | VRR_CTL_FLIP_LINE_EN |
272 XELPD_VRR_CTL_VRR_GUARDBAND(crtc_state->vrr.guardband);
273 else
274 return VRR_CTL_IGN_MAX_SHIFT | VRR_CTL_FLIP_LINE_EN |
275 VRR_CTL_PIPELINE_FULL(crtc_state->vrr.pipeline_full) |
276 VRR_CTL_PIPELINE_FULL_OVERRIDE;
277 }
278
intel_vrr_set_transcoder_timings(const struct intel_crtc_state * crtc_state)279 void intel_vrr_set_transcoder_timings(const struct intel_crtc_state *crtc_state)
280 {
281 struct intel_display *display = to_intel_display(crtc_state);
282 enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
283
284 /*
285 * This bit seems to have two meanings depending on the platform:
286 * TGL: generate VRR "safe window" for DSB vblank waits
287 * ADL/DG2: make TRANS_SET_CONTEXT_LATENCY effective with VRR
288 */
289 if (IS_DISPLAY_VER(display, 12, 13))
290 intel_de_rmw(display, CHICKEN_TRANS(display, cpu_transcoder),
291 0, PIPE_VBLANK_WITH_DELAY);
292
293 if (!intel_vrr_possible(crtc_state)) {
294 intel_de_write(display,
295 TRANS_VRR_CTL(display, cpu_transcoder), 0);
296 return;
297 }
298
299 if (crtc_state->cmrr.enable) {
300 intel_de_write(display, TRANS_CMRR_M_HI(display, cpu_transcoder),
301 upper_32_bits(crtc_state->cmrr.cmrr_m));
302 intel_de_write(display, TRANS_CMRR_M_LO(display, cpu_transcoder),
303 lower_32_bits(crtc_state->cmrr.cmrr_m));
304 intel_de_write(display, TRANS_CMRR_N_HI(display, cpu_transcoder),
305 upper_32_bits(crtc_state->cmrr.cmrr_n));
306 intel_de_write(display, TRANS_CMRR_N_LO(display, cpu_transcoder),
307 lower_32_bits(crtc_state->cmrr.cmrr_n));
308 }
309
310 intel_de_write(display, TRANS_VRR_VMIN(display, cpu_transcoder),
311 crtc_state->vrr.vmin - 1);
312 intel_de_write(display, TRANS_VRR_VMAX(display, cpu_transcoder),
313 crtc_state->vrr.vmax - 1);
314 intel_de_write(display, TRANS_VRR_CTL(display, cpu_transcoder),
315 trans_vrr_ctl(crtc_state));
316 intel_de_write(display, TRANS_VRR_FLIPLINE(display, cpu_transcoder),
317 crtc_state->vrr.flipline - 1);
318 }
319
intel_vrr_send_push(const struct intel_crtc_state * crtc_state)320 void intel_vrr_send_push(const struct intel_crtc_state *crtc_state)
321 {
322 struct intel_display *display = to_intel_display(crtc_state);
323 enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
324
325 if (!crtc_state->vrr.enable)
326 return;
327
328 intel_de_write(display, TRANS_PUSH(display, cpu_transcoder),
329 TRANS_PUSH_EN | TRANS_PUSH_SEND);
330 }
331
intel_vrr_is_push_sent(const struct intel_crtc_state * crtc_state)332 bool intel_vrr_is_push_sent(const struct intel_crtc_state *crtc_state)
333 {
334 struct intel_display *display = to_intel_display(crtc_state);
335 enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
336
337 if (!crtc_state->vrr.enable)
338 return false;
339
340 return intel_de_read(display, TRANS_PUSH(display, cpu_transcoder)) & TRANS_PUSH_SEND;
341 }
342
intel_vrr_enable(const struct intel_crtc_state * crtc_state)343 void intel_vrr_enable(const struct intel_crtc_state *crtc_state)
344 {
345 struct intel_display *display = to_intel_display(crtc_state);
346 enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
347
348 if (!crtc_state->vrr.enable)
349 return;
350
351 intel_de_write(display, TRANS_PUSH(display, cpu_transcoder),
352 TRANS_PUSH_EN);
353
354 if (HAS_AS_SDP(display))
355 intel_de_write(display,
356 TRANS_VRR_VSYNC(display, cpu_transcoder),
357 VRR_VSYNC_END(crtc_state->vrr.vsync_end) |
358 VRR_VSYNC_START(crtc_state->vrr.vsync_start));
359
360 if (crtc_state->cmrr.enable) {
361 intel_de_write(display, TRANS_VRR_CTL(display, cpu_transcoder),
362 VRR_CTL_VRR_ENABLE | VRR_CTL_CMRR_ENABLE |
363 trans_vrr_ctl(crtc_state));
364 } else {
365 intel_de_write(display, TRANS_VRR_CTL(display, cpu_transcoder),
366 VRR_CTL_VRR_ENABLE | trans_vrr_ctl(crtc_state));
367 }
368 }
369
intel_vrr_disable(const struct intel_crtc_state * old_crtc_state)370 void intel_vrr_disable(const struct intel_crtc_state *old_crtc_state)
371 {
372 struct intel_display *display = to_intel_display(old_crtc_state);
373 enum transcoder cpu_transcoder = old_crtc_state->cpu_transcoder;
374
375 if (!old_crtc_state->vrr.enable)
376 return;
377
378 intel_de_write(display, TRANS_VRR_CTL(display, cpu_transcoder),
379 trans_vrr_ctl(old_crtc_state));
380 intel_de_wait_for_clear(display,
381 TRANS_VRR_STATUS(display, cpu_transcoder),
382 VRR_STATUS_VRR_EN_LIVE, 1000);
383 intel_de_write(display, TRANS_PUSH(display, cpu_transcoder), 0);
384
385 if (HAS_AS_SDP(display))
386 intel_de_write(display,
387 TRANS_VRR_VSYNC(display, cpu_transcoder), 0);
388 }
389
intel_vrr_get_config(struct intel_crtc_state * crtc_state)390 void intel_vrr_get_config(struct intel_crtc_state *crtc_state)
391 {
392 struct intel_display *display = to_intel_display(crtc_state);
393 enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
394 u32 trans_vrr_ctl, trans_vrr_vsync;
395
396 trans_vrr_ctl = intel_de_read(display,
397 TRANS_VRR_CTL(display, cpu_transcoder));
398
399 crtc_state->vrr.enable = trans_vrr_ctl & VRR_CTL_VRR_ENABLE;
400 if (HAS_CMRR(display))
401 crtc_state->cmrr.enable = (trans_vrr_ctl & VRR_CTL_CMRR_ENABLE);
402
403 if (crtc_state->cmrr.enable) {
404 crtc_state->cmrr.cmrr_n =
405 intel_de_read64_2x32(display, TRANS_CMRR_N_LO(display, cpu_transcoder),
406 TRANS_CMRR_N_HI(display, cpu_transcoder));
407 crtc_state->cmrr.cmrr_m =
408 intel_de_read64_2x32(display, TRANS_CMRR_M_LO(display, cpu_transcoder),
409 TRANS_CMRR_M_HI(display, cpu_transcoder));
410 }
411
412 if (DISPLAY_VER(display) >= 13)
413 crtc_state->vrr.guardband =
414 REG_FIELD_GET(XELPD_VRR_CTL_VRR_GUARDBAND_MASK, trans_vrr_ctl);
415 else
416 if (trans_vrr_ctl & VRR_CTL_PIPELINE_FULL_OVERRIDE)
417 crtc_state->vrr.pipeline_full =
418 REG_FIELD_GET(VRR_CTL_PIPELINE_FULL_MASK, trans_vrr_ctl);
419
420 if (trans_vrr_ctl & VRR_CTL_FLIP_LINE_EN) {
421 crtc_state->vrr.flipline = intel_de_read(display,
422 TRANS_VRR_FLIPLINE(display, cpu_transcoder)) + 1;
423 crtc_state->vrr.vmax = intel_de_read(display,
424 TRANS_VRR_VMAX(display, cpu_transcoder)) + 1;
425 crtc_state->vrr.vmin = intel_de_read(display,
426 TRANS_VRR_VMIN(display, cpu_transcoder)) + 1;
427 }
428
429 if (crtc_state->vrr.enable) {
430 crtc_state->mode_flags |= I915_MODE_FLAG_VRR;
431
432 if (HAS_AS_SDP(display)) {
433 trans_vrr_vsync =
434 intel_de_read(display,
435 TRANS_VRR_VSYNC(display, cpu_transcoder));
436 crtc_state->vrr.vsync_start =
437 REG_FIELD_GET(VRR_VSYNC_START_MASK, trans_vrr_vsync);
438 crtc_state->vrr.vsync_end =
439 REG_FIELD_GET(VRR_VSYNC_END_MASK, trans_vrr_vsync);
440 }
441 }
442 }
443