1 // SPDX-License-Identifier: MIT
2 /*
3 * Copyright © 2020 Intel Corporation
4 *
5 */
6
7 #include <drm/drm_print.h>
8
9 #include "intel_crtc.h"
10 #include "intel_de.h"
11 #include "intel_display_regs.h"
12 #include "intel_display_types.h"
13 #include "intel_dmc.h"
14 #include "intel_dmc_regs.h"
15 #include "intel_dp.h"
16 #include "intel_psr.h"
17 #include "intel_vrr.h"
18 #include "intel_vrr_regs.h"
19 #include "skl_prefill.h"
20 #include "skl_watermark.h"
21
22 #define FIXED_POINT_PRECISION 100
23 #define CMRR_PRECISION_TOLERANCE 10
24
25 /*
26 * Tunable parameters for DC Balance correction.
27 * These are captured based on experimentations.
28 */
29 #define DCB_CORRECTION_SENSITIVITY 30
30 #define DCB_CORRECTION_AGGRESSIVENESS 1000 /* ms × 100; 10 ms */
31 #define DCB_BLANK_TARGET 50
32
intel_vrr_is_capable(struct intel_connector * connector)33 bool intel_vrr_is_capable(struct intel_connector *connector)
34 {
35 struct intel_display *display = to_intel_display(connector);
36 const struct drm_display_info *info = &connector->base.display_info;
37 struct intel_dp *intel_dp;
38
39 if (!HAS_VRR(display))
40 return false;
41
42 /*
43 * DP Sink is capable of VRR video timings if
44 * Ignore MSA bit is set in DPCD.
45 * EDID monitor range also should be atleast 10 for reasonable
46 * Adaptive Sync or Variable Refresh Rate end user experience.
47 */
48 switch (connector->base.connector_type) {
49 case DRM_MODE_CONNECTOR_eDP:
50 if (!connector->panel.vbt.vrr)
51 return false;
52 fallthrough;
53 case DRM_MODE_CONNECTOR_DisplayPort:
54 if (connector->mst.dp)
55 return false;
56 intel_dp = intel_attached_dp(connector);
57
58 if (!drm_dp_sink_can_do_video_without_timing_msa(intel_dp->dpcd))
59 return false;
60
61 break;
62 default:
63 return false;
64 }
65
66 return info->monitor_range.max_vfreq - info->monitor_range.min_vfreq > 10;
67 }
68
intel_vrr_is_in_range(struct intel_connector * connector,int vrefresh)69 bool intel_vrr_is_in_range(struct intel_connector *connector, int vrefresh)
70 {
71 const struct drm_display_info *info = &connector->base.display_info;
72
73 return intel_vrr_is_capable(connector) &&
74 vrefresh >= info->monitor_range.min_vfreq &&
75 vrefresh <= info->monitor_range.max_vfreq;
76 }
77
intel_vrr_possible(const struct intel_crtc_state * crtc_state)78 bool intel_vrr_possible(const struct intel_crtc_state *crtc_state)
79 {
80 return crtc_state->vrr.flipline;
81 }
82
83 void
intel_vrr_check_modeset(struct intel_atomic_state * state)84 intel_vrr_check_modeset(struct intel_atomic_state *state)
85 {
86 int i;
87 struct intel_crtc_state *old_crtc_state, *new_crtc_state;
88 struct intel_crtc *crtc;
89
90 for_each_oldnew_intel_crtc_in_state(state, crtc, old_crtc_state,
91 new_crtc_state, i) {
92 if (new_crtc_state->uapi.vrr_enabled !=
93 old_crtc_state->uapi.vrr_enabled)
94 new_crtc_state->uapi.mode_changed = true;
95 }
96 }
97
intel_vrr_extra_vblank_delay(struct intel_display * display)98 static int intel_vrr_extra_vblank_delay(struct intel_display *display)
99 {
100 /*
101 * On ICL/TGL VRR hardware inserts one extra scanline
102 * just after vactive, which pushes the vmin decision
103 * boundary ahead accordingly, and thus reduces the
104 * max guardband length by one scanline.
105 */
106 return DISPLAY_VER(display) < 13 ? 1 : 0;
107 }
108
intel_vrr_vmin_flipline_offset(struct intel_display * display)109 static int intel_vrr_vmin_flipline_offset(struct intel_display *display)
110 {
111 /*
112 * ICL/TGL hardware imposes flipline>=vmin+1
113 *
114 * We reduce the vmin value to compensate when programming the
115 * hardware. This approach allows flipline to remain set at the
116 * original value, and thus the frame will have the desired
117 * minimum vtotal.
118 */
119 return DISPLAY_VER(display) < 13 ? 1 : 0;
120 }
121
intel_vrr_guardband_to_pipeline_full(const struct intel_crtc_state * crtc_state,int guardband)122 static int intel_vrr_guardband_to_pipeline_full(const struct intel_crtc_state *crtc_state,
123 int guardband)
124 {
125 /* hardware imposes one extra scanline somewhere */
126 return guardband - crtc_state->framestart_delay - 1;
127 }
128
intel_vrr_pipeline_full_to_guardband(const struct intel_crtc_state * crtc_state,int pipeline_full)129 static int intel_vrr_pipeline_full_to_guardband(const struct intel_crtc_state *crtc_state,
130 int pipeline_full)
131 {
132 /* hardware imposes one extra scanline somewhere */
133 return pipeline_full + crtc_state->framestart_delay + 1;
134 }
135
136 /*
137 * Without VRR registers get latched at:
138 * vblank_start
139 *
140 * With VRR the earliest registers can get latched is:
141 * intel_vrr_vmin_vblank_start(), which if we want to maintain
142 * the correct min vtotal is >=vblank_start+1
143 *
144 * The latest point registers can get latched is the vmax decision boundary:
145 * intel_vrr_vmax_vblank_start()
146 *
147 * Between those two points the vblank exit starts (and hence registers get
148 * latched) ASAP after a push is sent.
149 *
150 * framestart_delay is programmable 1-4.
151 */
152
intel_vrr_vmin_vtotal(const struct intel_crtc_state * crtc_state)153 int intel_vrr_vmin_vtotal(const struct intel_crtc_state *crtc_state)
154 {
155 /* Min vblank actually determined by flipline */
156 return crtc_state->vrr.vmin;
157 }
158
intel_vrr_vmax_vtotal(const struct intel_crtc_state * crtc_state)159 int intel_vrr_vmax_vtotal(const struct intel_crtc_state *crtc_state)
160 {
161 return crtc_state->vrr.vmax;
162 }
163
intel_vrr_vmin_vblank_start(const struct intel_crtc_state * crtc_state)164 int intel_vrr_vmin_vblank_start(const struct intel_crtc_state *crtc_state)
165 {
166 return intel_vrr_vmin_vtotal(crtc_state) - crtc_state->vrr.guardband;
167 }
168
intel_vrr_vmax_vblank_start(const struct intel_crtc_state * crtc_state)169 int intel_vrr_vmax_vblank_start(const struct intel_crtc_state *crtc_state)
170 {
171 return intel_vrr_vmax_vtotal(crtc_state) - crtc_state->vrr.guardband;
172 }
173
174 static bool
is_cmrr_frac_required(struct intel_crtc_state * crtc_state)175 is_cmrr_frac_required(struct intel_crtc_state *crtc_state)
176 {
177 struct intel_display *display = to_intel_display(crtc_state);
178 int calculated_refresh_k, actual_refresh_k, pixel_clock_per_line;
179 struct drm_display_mode *adjusted_mode = &crtc_state->hw.adjusted_mode;
180
181 /* Avoid CMRR for now till we have VRR with fixed timings working */
182 if (!HAS_CMRR(display) || true)
183 return false;
184
185 actual_refresh_k =
186 drm_mode_vrefresh(adjusted_mode) * FIXED_POINT_PRECISION;
187 pixel_clock_per_line =
188 adjusted_mode->crtc_clock * 1000 / adjusted_mode->crtc_htotal;
189 calculated_refresh_k =
190 pixel_clock_per_line * FIXED_POINT_PRECISION / adjusted_mode->crtc_vtotal;
191
192 if ((actual_refresh_k - calculated_refresh_k) < CMRR_PRECISION_TOLERANCE)
193 return false;
194
195 return true;
196 }
197
198 static unsigned int
cmrr_get_vtotal(struct intel_crtc_state * crtc_state,bool video_mode_required)199 cmrr_get_vtotal(struct intel_crtc_state *crtc_state, bool video_mode_required)
200 {
201 int multiplier_m = 1, multiplier_n = 1, vtotal, desired_refresh_rate;
202 u64 adjusted_pixel_rate;
203 struct drm_display_mode *adjusted_mode = &crtc_state->hw.adjusted_mode;
204
205 desired_refresh_rate = drm_mode_vrefresh(adjusted_mode);
206
207 if (video_mode_required) {
208 multiplier_m = 1001;
209 multiplier_n = 1000;
210 }
211
212 crtc_state->cmrr.cmrr_n = mul_u32_u32(desired_refresh_rate * adjusted_mode->crtc_htotal,
213 multiplier_n);
214 vtotal = DIV_ROUND_UP_ULL(mul_u32_u32(adjusted_mode->crtc_clock * 1000, multiplier_n),
215 crtc_state->cmrr.cmrr_n);
216 adjusted_pixel_rate = mul_u32_u32(adjusted_mode->crtc_clock * 1000, multiplier_m);
217 crtc_state->cmrr.cmrr_m = do_div(adjusted_pixel_rate, crtc_state->cmrr.cmrr_n);
218
219 return vtotal;
220 }
221
222 static
intel_vrr_compute_cmrr_timings(struct intel_crtc_state * crtc_state)223 void intel_vrr_compute_cmrr_timings(struct intel_crtc_state *crtc_state)
224 {
225 /*
226 * TODO: Compute precise target refresh rate to determine
227 * if video_mode_required should be true. Currently set to
228 * false due to uncertainty about the precise target
229 * refresh Rate.
230 */
231 crtc_state->vrr.vmax = cmrr_get_vtotal(crtc_state, false);
232 crtc_state->vrr.vmin = crtc_state->vrr.vmax;
233 crtc_state->vrr.flipline = crtc_state->vrr.vmin;
234
235 crtc_state->cmrr.enable = true;
236 crtc_state->mode_flags |= I915_MODE_FLAG_VRR;
237 }
238
239 static
intel_vrr_compute_vrr_timings(struct intel_crtc_state * crtc_state,int vmin,int vmax)240 void intel_vrr_compute_vrr_timings(struct intel_crtc_state *crtc_state,
241 int vmin, int vmax)
242 {
243 crtc_state->vrr.vmax = vmax;
244 crtc_state->vrr.vmin = vmin;
245 crtc_state->vrr.flipline = crtc_state->vrr.vmin;
246
247 crtc_state->vrr.enable = true;
248 crtc_state->mode_flags |= I915_MODE_FLAG_VRR;
249 }
250
251 static
intel_vrr_compute_fixed_rr_timings(struct intel_crtc_state * crtc_state)252 void intel_vrr_compute_fixed_rr_timings(struct intel_crtc_state *crtc_state)
253 {
254 /* For fixed rr, vmin = vmax = flipline */
255 crtc_state->vrr.vmax = crtc_state->hw.adjusted_mode.crtc_vtotal;
256 crtc_state->vrr.vmin = crtc_state->vrr.vmax;
257 crtc_state->vrr.flipline = crtc_state->vrr.vmin;
258 }
259
intel_vrr_hw_value(const struct intel_crtc_state * crtc_state,int value)260 static int intel_vrr_hw_value(const struct intel_crtc_state *crtc_state,
261 int value)
262 {
263 struct intel_display *display = to_intel_display(crtc_state);
264
265 /*
266 * On TGL vmin/vmax/flipline also need to be
267 * adjusted by the SCL to maintain correct vtotals.
268 */
269 if (DISPLAY_VER(display) >= 13)
270 return value;
271 else
272 return value - crtc_state->set_context_latency;
273 }
274
intel_vrr_vblank_start(const struct intel_crtc_state * crtc_state,int vmin_vmax)275 static int intel_vrr_vblank_start(const struct intel_crtc_state *crtc_state,
276 int vmin_vmax)
277 {
278 return intel_vrr_hw_value(crtc_state, vmin_vmax) - crtc_state->vrr.guardband;
279 }
280
281 /*
282 * For fixed refresh rate mode Vmin, Vmax and Flipline all are set to
283 * Vtotal value.
284 */
285 static
intel_vrr_fixed_rr_hw_vtotal(const struct intel_crtc_state * crtc_state)286 int intel_vrr_fixed_rr_hw_vtotal(const struct intel_crtc_state *crtc_state)
287 {
288 return intel_vrr_hw_value(crtc_state, crtc_state->hw.adjusted_mode.crtc_vtotal);
289 }
290
291 static
intel_vrr_fixed_rr_hw_vmax(const struct intel_crtc_state * crtc_state)292 int intel_vrr_fixed_rr_hw_vmax(const struct intel_crtc_state *crtc_state)
293 {
294 return intel_vrr_fixed_rr_hw_vtotal(crtc_state);
295 }
296
297 static
intel_vrr_fixed_rr_hw_vmin(const struct intel_crtc_state * crtc_state)298 int intel_vrr_fixed_rr_hw_vmin(const struct intel_crtc_state *crtc_state)
299 {
300 struct intel_display *display = to_intel_display(crtc_state);
301
302 return intel_vrr_fixed_rr_hw_vtotal(crtc_state) -
303 intel_vrr_vmin_flipline_offset(display);
304 }
305
306 static
intel_vrr_fixed_rr_hw_flipline(const struct intel_crtc_state * crtc_state)307 int intel_vrr_fixed_rr_hw_flipline(const struct intel_crtc_state *crtc_state)
308 {
309 return intel_vrr_fixed_rr_hw_vtotal(crtc_state);
310 }
311
intel_vrr_set_fixed_rr_timings(const struct intel_crtc_state * crtc_state)312 void intel_vrr_set_fixed_rr_timings(const struct intel_crtc_state *crtc_state)
313 {
314 struct intel_display *display = to_intel_display(crtc_state);
315 enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
316
317 if (!intel_vrr_possible(crtc_state))
318 return;
319
320 intel_de_write(display, TRANS_VRR_VMIN(display, cpu_transcoder),
321 intel_vrr_fixed_rr_hw_vmin(crtc_state) - 1);
322 intel_de_write(display, TRANS_VRR_VMAX(display, cpu_transcoder),
323 intel_vrr_fixed_rr_hw_vmax(crtc_state) - 1);
324 intel_de_write(display, TRANS_VRR_FLIPLINE(display, cpu_transcoder),
325 intel_vrr_fixed_rr_hw_flipline(crtc_state) - 1);
326 }
327
328 static
intel_vrr_compute_vmin(struct intel_crtc_state * crtc_state)329 int intel_vrr_compute_vmin(struct intel_crtc_state *crtc_state)
330 {
331 /*
332 * To make fixed rr and vrr work seamless the guardband/pipeline full
333 * should be set such that it satisfies both the fixed and variable
334 * timings.
335 * For this set the vmin as crtc_vtotal. With this we never need to
336 * change anything to do with the guardband.
337 */
338 return crtc_state->hw.adjusted_mode.crtc_vtotal;
339 }
340
341 static
intel_vrr_compute_vmax(struct intel_connector * connector,const struct drm_display_mode * adjusted_mode)342 int intel_vrr_compute_vmax(struct intel_connector *connector,
343 const struct drm_display_mode *adjusted_mode)
344 {
345 const struct drm_display_info *info = &connector->base.display_info;
346 int vmax;
347
348 vmax = adjusted_mode->crtc_clock * 1000 /
349 (adjusted_mode->crtc_htotal * info->monitor_range.min_vfreq);
350 vmax = max_t(int, vmax, adjusted_mode->crtc_vtotal);
351
352 return vmax;
353 }
354
intel_vrr_dc_balance_possible(const struct intel_crtc_state * crtc_state)355 static bool intel_vrr_dc_balance_possible(const struct intel_crtc_state *crtc_state)
356 {
357 struct intel_display *display = to_intel_display(crtc_state);
358 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
359 enum pipe pipe = crtc->pipe;
360
361 /*
362 * FIXME: Currently Firmware supports DC Balancing on PIPE A
363 * and PIPE B. Account those limitation while computing DC
364 * Balance parameters.
365 */
366 return (HAS_VRR_DC_BALANCE(display) &&
367 ((pipe == PIPE_A) || (pipe == PIPE_B)));
368 }
369
370 static void
intel_vrr_dc_balance_compute_config(struct intel_crtc_state * crtc_state)371 intel_vrr_dc_balance_compute_config(struct intel_crtc_state *crtc_state)
372 {
373 int guardband_usec, adjustment_usec;
374 struct drm_display_mode *adjusted_mode = &crtc_state->hw.adjusted_mode;
375
376 if (!intel_vrr_dc_balance_possible(crtc_state) || !crtc_state->vrr.enable)
377 return;
378
379 crtc_state->vrr.dc_balance.vmax = crtc_state->vrr.vmax;
380 crtc_state->vrr.dc_balance.vmin = crtc_state->vrr.vmin;
381 crtc_state->vrr.dc_balance.max_increase =
382 crtc_state->vrr.vmax - crtc_state->vrr.vmin;
383 crtc_state->vrr.dc_balance.max_decrease =
384 crtc_state->vrr.vmax - crtc_state->vrr.vmin;
385 crtc_state->vrr.dc_balance.guardband =
386 DIV_ROUND_UP(crtc_state->vrr.dc_balance.vmax *
387 DCB_CORRECTION_SENSITIVITY, 100);
388 guardband_usec =
389 intel_scanlines_to_usecs(adjusted_mode,
390 crtc_state->vrr.dc_balance.guardband);
391 /*
392 * The correction_aggressiveness/100 is the number of milliseconds to
393 * adjust by when the balance is at twice the guardband.
394 * guardband_slope = correction_aggressiveness / (guardband * 100)
395 */
396 adjustment_usec = DCB_CORRECTION_AGGRESSIVENESS * 10;
397 crtc_state->vrr.dc_balance.slope =
398 DIV_ROUND_UP(adjustment_usec, guardband_usec);
399 crtc_state->vrr.dc_balance.vblank_target =
400 DIV_ROUND_UP((crtc_state->vrr.vmax - crtc_state->vrr.vmin) *
401 DCB_BLANK_TARGET, 100);
402 crtc_state->vrr.dc_balance.enable = true;
403 }
404
405 void
intel_vrr_compute_config(struct intel_crtc_state * crtc_state,struct drm_connector_state * conn_state)406 intel_vrr_compute_config(struct intel_crtc_state *crtc_state,
407 struct drm_connector_state *conn_state)
408 {
409 struct intel_display *display = to_intel_display(crtc_state);
410 struct intel_connector *connector =
411 to_intel_connector(conn_state->connector);
412 struct intel_dp *intel_dp = intel_attached_dp(connector);
413 bool is_edp = intel_dp_is_edp(intel_dp);
414 struct drm_display_mode *adjusted_mode = &crtc_state->hw.adjusted_mode;
415 int vmin, vmax;
416
417 if (!HAS_VRR(display))
418 return;
419
420 if (adjusted_mode->flags & DRM_MODE_FLAG_INTERLACE)
421 return;
422
423 crtc_state->vrr.in_range =
424 intel_vrr_is_in_range(connector, drm_mode_vrefresh(adjusted_mode));
425
426 /*
427 * Allow fixed refresh rate with VRR Timing Generator.
428 * For now set the vrr.in_range to 0, to allow fixed_rr but skip actual
429 * VRR and LRR.
430 * #TODO For actual VRR with joiner, we need to figure out how to
431 * correctly sequence transcoder level stuff vs. pipe level stuff
432 * in the commit.
433 */
434 if (crtc_state->joiner_pipes)
435 crtc_state->vrr.in_range = false;
436
437 vmin = intel_vrr_compute_vmin(crtc_state);
438
439 if (crtc_state->vrr.in_range) {
440 if (HAS_LRR(display))
441 crtc_state->update_lrr = true;
442 vmax = intel_vrr_compute_vmax(connector, adjusted_mode);
443 } else {
444 vmax = vmin;
445 }
446
447 if (crtc_state->uapi.vrr_enabled && vmin < vmax)
448 intel_vrr_compute_vrr_timings(crtc_state, vmin, vmax);
449 else if (is_cmrr_frac_required(crtc_state) && is_edp)
450 intel_vrr_compute_cmrr_timings(crtc_state);
451 else
452 intel_vrr_compute_fixed_rr_timings(crtc_state);
453
454 if (HAS_AS_SDP(display)) {
455 crtc_state->vrr.vsync_start =
456 (crtc_state->hw.adjusted_mode.crtc_vtotal -
457 crtc_state->hw.adjusted_mode.crtc_vsync_start);
458 crtc_state->vrr.vsync_end =
459 (crtc_state->hw.adjusted_mode.crtc_vtotal -
460 crtc_state->hw.adjusted_mode.crtc_vsync_end);
461 }
462
463 intel_vrr_dc_balance_compute_config(crtc_state);
464 }
465
466 static int
intel_vrr_max_hw_guardband(const struct intel_crtc_state * crtc_state)467 intel_vrr_max_hw_guardband(const struct intel_crtc_state *crtc_state)
468 {
469 struct intel_display *display = to_intel_display(crtc_state);
470 int max_pipeline_full = REG_FIELD_MAX(VRR_CTL_PIPELINE_FULL_MASK);
471
472 if (DISPLAY_VER(display) >= 13)
473 return REG_FIELD_MAX(XELPD_VRR_CTL_VRR_GUARDBAND_MASK);
474 else
475 return intel_vrr_pipeline_full_to_guardband(crtc_state,
476 max_pipeline_full);
477 }
478
479 static int
intel_vrr_max_vblank_guardband(const struct intel_crtc_state * crtc_state)480 intel_vrr_max_vblank_guardband(const struct intel_crtc_state *crtc_state)
481 {
482 struct intel_display *display = to_intel_display(crtc_state);
483 const struct drm_display_mode *adjusted_mode = &crtc_state->hw.adjusted_mode;
484
485 return crtc_state->vrr.vmin -
486 adjusted_mode->crtc_vdisplay -
487 crtc_state->set_context_latency -
488 intel_vrr_extra_vblank_delay(display);
489 }
490
491 static int
intel_vrr_max_guardband(struct intel_crtc_state * crtc_state)492 intel_vrr_max_guardband(struct intel_crtc_state *crtc_state)
493 {
494 return min(intel_vrr_max_hw_guardband(crtc_state),
495 intel_vrr_max_vblank_guardband(crtc_state));
496 }
497
498 static
intel_vrr_compute_optimized_guardband(struct intel_crtc_state * crtc_state)499 int intel_vrr_compute_optimized_guardband(struct intel_crtc_state *crtc_state)
500 {
501 struct intel_display *display = to_intel_display(crtc_state);
502 struct skl_prefill_ctx prefill_ctx;
503 int prefill_latency_us;
504 int guardband = 0;
505
506 skl_prefill_init_worst(&prefill_ctx, crtc_state);
507
508 /*
509 * The SoC power controller runs SAGV mutually exclusive with package C states,
510 * so the max of package C and SAGV latencies is used to compute the min prefill guardband.
511 * PM delay = max(sagv_latency, pkgc_max_latency (highest enabled wm level 1 and up))
512 */
513 prefill_latency_us = max(display->sagv.block_time_us,
514 skl_watermark_max_latency(display, 1));
515
516 guardband = skl_prefill_min_guardband(&prefill_ctx,
517 crtc_state,
518 prefill_latency_us);
519
520 if (intel_crtc_has_dp_encoder(crtc_state)) {
521 guardband = max(guardband, intel_psr_min_guardband(crtc_state));
522 guardband = max(guardband, intel_dp_sdp_min_guardband(crtc_state, true));
523 }
524
525 return guardband;
526 }
527
intel_vrr_use_optimized_guardband(const struct intel_crtc_state * crtc_state)528 static bool intel_vrr_use_optimized_guardband(const struct intel_crtc_state *crtc_state)
529 {
530 /*
531 * #TODO: Enable optimized guardband for HDMI
532 * For HDMI lot of infoframes are transmitted a line or two after vsync.
533 * Since with optimized guardband the double bufferring point is at delayed vblank,
534 * we need to ensure that vsync happens after delayed vblank for the HDMI case.
535 */
536 if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI))
537 return false;
538
539 return true;
540 }
541
intel_vrr_compute_guardband(struct intel_crtc_state * crtc_state)542 void intel_vrr_compute_guardband(struct intel_crtc_state *crtc_state)
543 {
544 struct intel_display *display = to_intel_display(crtc_state);
545 struct drm_display_mode *adjusted_mode = &crtc_state->hw.adjusted_mode;
546 struct drm_display_mode *pipe_mode = &crtc_state->hw.pipe_mode;
547 int guardband;
548
549 if (!intel_vrr_possible(crtc_state))
550 return;
551
552 if (intel_vrr_use_optimized_guardband(crtc_state))
553 guardband = intel_vrr_compute_optimized_guardband(crtc_state);
554 else
555 guardband = crtc_state->vrr.vmin - adjusted_mode->crtc_vdisplay;
556
557 crtc_state->vrr.guardband = min(guardband, intel_vrr_max_guardband(crtc_state));
558
559 if (intel_vrr_always_use_vrr_tg(display)) {
560 adjusted_mode->crtc_vblank_start =
561 adjusted_mode->crtc_vtotal - crtc_state->vrr.guardband;
562 /*
563 * pipe_mode has already been derived from the
564 * original adjusted_mode, keep the two in sync.
565 */
566 pipe_mode->crtc_vblank_start =
567 adjusted_mode->crtc_vblank_start;
568 }
569
570 if (DISPLAY_VER(display) < 13)
571 crtc_state->vrr.pipeline_full =
572 intel_vrr_guardband_to_pipeline_full(crtc_state,
573 crtc_state->vrr.guardband);
574 }
575
trans_vrr_ctl(const struct intel_crtc_state * crtc_state)576 static u32 trans_vrr_ctl(const struct intel_crtc_state *crtc_state)
577 {
578 struct intel_display *display = to_intel_display(crtc_state);
579
580 if (DISPLAY_VER(display) >= 14)
581 return VRR_CTL_FLIP_LINE_EN |
582 XELPD_VRR_CTL_VRR_GUARDBAND(crtc_state->vrr.guardband);
583 else if (DISPLAY_VER(display) >= 13)
584 return VRR_CTL_IGN_MAX_SHIFT | VRR_CTL_FLIP_LINE_EN |
585 XELPD_VRR_CTL_VRR_GUARDBAND(crtc_state->vrr.guardband);
586 else
587 return VRR_CTL_IGN_MAX_SHIFT | VRR_CTL_FLIP_LINE_EN |
588 VRR_CTL_PIPELINE_FULL(crtc_state->vrr.pipeline_full) |
589 VRR_CTL_PIPELINE_FULL_OVERRIDE;
590 }
591
intel_vrr_set_transcoder_timings(const struct intel_crtc_state * crtc_state)592 void intel_vrr_set_transcoder_timings(const struct intel_crtc_state *crtc_state)
593 {
594 struct intel_display *display = to_intel_display(crtc_state);
595 enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
596
597 if (!HAS_VRR(display))
598 return;
599
600 /*
601 * Bspec says:
602 * "(note: VRR needs to be programmed after
603 * TRANS_DDI_FUNC_CTL and before TRANS_CONF)."
604 *
605 * In practice it turns out that ICL can hang if
606 * TRANS_VRR_VMAX/FLIPLINE are written before
607 * enabling TRANS_DDI_FUNC_CTL.
608 */
609 drm_WARN_ON(display->drm,
610 !(intel_de_read(display, TRANS_DDI_FUNC_CTL(display, cpu_transcoder)) & TRANS_DDI_FUNC_ENABLE));
611
612 /*
613 * This bit seems to have two meanings depending on the platform:
614 * TGL: generate VRR "safe window" for DSB vblank waits
615 * ADL/DG2: make TRANS_SET_CONTEXT_LATENCY effective with VRR
616 */
617 if (IS_DISPLAY_VER(display, 12, 13))
618 intel_de_rmw(display, CHICKEN_TRANS(display, cpu_transcoder),
619 0, PIPE_VBLANK_WITH_DELAY);
620
621 if (!intel_vrr_possible(crtc_state)) {
622 intel_de_write(display,
623 TRANS_VRR_CTL(display, cpu_transcoder), 0);
624 return;
625 }
626
627 if (crtc_state->cmrr.enable) {
628 intel_de_write(display, TRANS_CMRR_M_HI(display, cpu_transcoder),
629 upper_32_bits(crtc_state->cmrr.cmrr_m));
630 intel_de_write(display, TRANS_CMRR_M_LO(display, cpu_transcoder),
631 lower_32_bits(crtc_state->cmrr.cmrr_m));
632 intel_de_write(display, TRANS_CMRR_N_HI(display, cpu_transcoder),
633 upper_32_bits(crtc_state->cmrr.cmrr_n));
634 intel_de_write(display, TRANS_CMRR_N_LO(display, cpu_transcoder),
635 lower_32_bits(crtc_state->cmrr.cmrr_n));
636 }
637
638 intel_vrr_set_fixed_rr_timings(crtc_state);
639
640 if (!intel_vrr_always_use_vrr_tg(display))
641 intel_de_write(display, TRANS_VRR_CTL(display, cpu_transcoder),
642 trans_vrr_ctl(crtc_state));
643
644 if (HAS_AS_SDP(display))
645 intel_de_write(display,
646 TRANS_VRR_VSYNC(display, cpu_transcoder),
647 VRR_VSYNC_END(crtc_state->vrr.vsync_end) |
648 VRR_VSYNC_START(crtc_state->vrr.vsync_start));
649
650 /*
651 * For BMG and LNL+ onwards the EMP_AS_SDP_TL is used for programming
652 * double buffering point and transmission line for VRR packets for
653 * HDMI2.1/DP/eDP/DP->HDMI2.1 PCON.
654 * Since currently we support VRR only for DP/eDP, so this is programmed
655 * to for Adaptive Sync SDP to Vsync start.
656 */
657 if (DISPLAY_VERx100(display) == 1401 || DISPLAY_VER(display) >= 20)
658 intel_de_write(display,
659 EMP_AS_SDP_TL(display, cpu_transcoder),
660 EMP_AS_SDP_DB_TL(crtc_state->vrr.vsync_start));
661 }
662
663 void
intel_vrr_dcb_increment_flip_count(struct intel_crtc_state * crtc_state,struct intel_crtc * crtc)664 intel_vrr_dcb_increment_flip_count(struct intel_crtc_state *crtc_state,
665 struct intel_crtc *crtc)
666 {
667 struct intel_display *display = to_intel_display(crtc_state);
668 enum pipe pipe = crtc->pipe;
669
670 if (!crtc_state->vrr.dc_balance.enable)
671 return;
672
673 intel_de_write(display, PIPEDMC_DCB_FLIP_COUNT(pipe),
674 ++crtc->dc_balance.flip_count);
675 }
676
677 void
intel_vrr_dcb_reset(const struct intel_crtc_state * old_crtc_state,struct intel_crtc * crtc)678 intel_vrr_dcb_reset(const struct intel_crtc_state *old_crtc_state,
679 struct intel_crtc *crtc)
680 {
681 struct intel_display *display = to_intel_display(old_crtc_state);
682 enum pipe pipe = crtc->pipe;
683
684 if (!old_crtc_state->vrr.dc_balance.enable)
685 return;
686
687 intel_de_write(display, PIPEDMC_DCB_FLIP_COUNT(pipe), 0);
688 intel_de_write(display, PIPEDMC_DCB_BALANCE_RESET(pipe), 0);
689 }
690
intel_vrr_send_push(struct intel_dsb * dsb,const struct intel_crtc_state * crtc_state)691 void intel_vrr_send_push(struct intel_dsb *dsb,
692 const struct intel_crtc_state *crtc_state)
693 {
694 struct intel_display *display = to_intel_display(crtc_state);
695 enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
696
697 if (!crtc_state->vrr.enable)
698 return;
699
700 if (dsb)
701 intel_dsb_nonpost_start(dsb);
702
703 intel_de_write_dsb(display, dsb,
704 TRANS_PUSH(display, cpu_transcoder),
705 TRANS_PUSH_EN | TRANS_PUSH_SEND);
706
707 if (dsb)
708 intel_dsb_nonpost_end(dsb);
709 }
710
intel_vrr_check_push_sent(struct intel_dsb * dsb,const struct intel_crtc_state * crtc_state)711 void intel_vrr_check_push_sent(struct intel_dsb *dsb,
712 const struct intel_crtc_state *crtc_state)
713 {
714 struct intel_display *display = to_intel_display(crtc_state);
715 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
716 enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
717
718 if (!crtc_state->vrr.enable)
719 return;
720
721 /*
722 * Make sure the push send bit has cleared. This should
723 * already be the case as long as the caller makes sure
724 * this is called after the delayed vblank has occurred.
725 */
726 if (dsb) {
727 int wait_us, count;
728
729 wait_us = 2;
730 count = 1;
731
732 /*
733 * If the bit hasn't cleared the DSB will
734 * raise the poll error interrupt.
735 */
736 intel_dsb_poll(dsb, TRANS_PUSH(display, cpu_transcoder),
737 TRANS_PUSH_SEND, 0, wait_us, count);
738 } else {
739 if (intel_vrr_is_push_sent(crtc_state))
740 drm_err(display->drm, "[CRTC:%d:%s] VRR push send still pending\n",
741 crtc->base.base.id, crtc->base.name);
742 }
743 }
744
intel_vrr_is_push_sent(const struct intel_crtc_state * crtc_state)745 bool intel_vrr_is_push_sent(const struct intel_crtc_state *crtc_state)
746 {
747 struct intel_display *display = to_intel_display(crtc_state);
748 enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
749
750 if (!crtc_state->vrr.enable)
751 return false;
752
753 return intel_de_read(display, TRANS_PUSH(display, cpu_transcoder)) & TRANS_PUSH_SEND;
754 }
755
intel_vrr_always_use_vrr_tg(struct intel_display * display)756 bool intel_vrr_always_use_vrr_tg(struct intel_display *display)
757 {
758 if (!HAS_VRR(display))
759 return false;
760
761 if (DISPLAY_VER(display) >= 30)
762 return true;
763
764 return false;
765 }
766
intel_vrr_hw_vmin(const struct intel_crtc_state * crtc_state)767 static int intel_vrr_hw_vmin(const struct intel_crtc_state *crtc_state)
768 {
769 struct intel_display *display = to_intel_display(crtc_state);
770
771 return intel_vrr_hw_value(crtc_state, crtc_state->vrr.vmin) -
772 intel_vrr_vmin_flipline_offset(display);
773 }
774
intel_vrr_hw_vmax(const struct intel_crtc_state * crtc_state)775 static int intel_vrr_hw_vmax(const struct intel_crtc_state *crtc_state)
776 {
777 return intel_vrr_hw_value(crtc_state, crtc_state->vrr.vmax);
778 }
779
intel_vrr_hw_flipline(const struct intel_crtc_state * crtc_state)780 static int intel_vrr_hw_flipline(const struct intel_crtc_state *crtc_state)
781 {
782 return intel_vrr_hw_value(crtc_state, crtc_state->vrr.flipline);
783 }
784
intel_vrr_set_vrr_timings(const struct intel_crtc_state * crtc_state)785 static void intel_vrr_set_vrr_timings(const struct intel_crtc_state *crtc_state)
786 {
787 struct intel_display *display = to_intel_display(crtc_state);
788 enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
789
790 intel_de_write(display, TRANS_VRR_VMIN(display, cpu_transcoder),
791 intel_vrr_hw_vmin(crtc_state) - 1);
792 intel_de_write(display, TRANS_VRR_VMAX(display, cpu_transcoder),
793 intel_vrr_hw_vmax(crtc_state) - 1);
794 intel_de_write(display, TRANS_VRR_FLIPLINE(display, cpu_transcoder),
795 intel_vrr_hw_flipline(crtc_state) - 1);
796 }
797
798 static void
intel_vrr_enable_dc_balancing(const struct intel_crtc_state * crtc_state)799 intel_vrr_enable_dc_balancing(const struct intel_crtc_state *crtc_state)
800 {
801 struct intel_display *display = to_intel_display(crtc_state);
802 enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
803 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
804 enum pipe pipe = crtc->pipe;
805 u32 vrr_ctl = intel_de_read(display, TRANS_VRR_CTL(display, cpu_transcoder));
806
807 if (!crtc_state->vrr.dc_balance.enable)
808 return;
809
810 intel_de_write(display, TRANS_VRR_DCB_ADJ_VMAX_CFG(cpu_transcoder),
811 VRR_DCB_ADJ_VMAX(crtc_state->vrr.vmax - 1));
812 intel_de_write(display, TRANS_VRR_DCB_ADJ_VMAX_CFG_LIVE(cpu_transcoder),
813 VRR_DCB_ADJ_VMAX(crtc_state->vrr.vmax - 1));
814 intel_de_write(display, TRANS_VRR_DCB_VMAX(cpu_transcoder),
815 VRR_DCB_VMAX(crtc_state->vrr.vmax - 1));
816 intel_de_write(display, TRANS_VRR_DCB_VMAX_LIVE(cpu_transcoder),
817 VRR_DCB_VMAX(crtc_state->vrr.vmax - 1));
818 intel_de_write(display, TRANS_VRR_DCB_FLIPLINE(cpu_transcoder),
819 VRR_DCB_FLIPLINE(crtc_state->vrr.flipline - 1));
820 intel_de_write(display, TRANS_VRR_DCB_FLIPLINE_LIVE(cpu_transcoder),
821 VRR_DCB_FLIPLINE(crtc_state->vrr.flipline - 1));
822 intel_de_write(display, TRANS_VRR_DCB_ADJ_FLIPLINE_CFG_LIVE(cpu_transcoder),
823 VRR_DCB_ADJ_FLIPLINE(crtc_state->vrr.flipline - 1));
824 intel_de_write(display, TRANS_VRR_DCB_ADJ_FLIPLINE_CFG(cpu_transcoder),
825 VRR_DCB_ADJ_FLIPLINE(crtc_state->vrr.flipline - 1));
826 intel_de_write(display, PIPEDMC_DCB_VMIN(pipe),
827 crtc_state->vrr.dc_balance.vmin - 1);
828 intel_de_write(display, PIPEDMC_DCB_VMAX(pipe),
829 crtc_state->vrr.dc_balance.vmax - 1);
830 intel_de_write(display, PIPEDMC_DCB_MAX_INCREASE(pipe),
831 crtc_state->vrr.dc_balance.max_increase);
832 intel_de_write(display, PIPEDMC_DCB_MAX_DECREASE(pipe),
833 crtc_state->vrr.dc_balance.max_decrease);
834 intel_de_write(display, PIPEDMC_DCB_GUARDBAND(pipe),
835 crtc_state->vrr.dc_balance.guardband);
836 intel_de_write(display, PIPEDMC_DCB_SLOPE(pipe),
837 crtc_state->vrr.dc_balance.slope);
838 intel_de_write(display, PIPEDMC_DCB_VBLANK(pipe),
839 crtc_state->vrr.dc_balance.vblank_target);
840 intel_dmc_configure_dc_balance_event(display, pipe, true);
841 intel_de_write(display, TRANS_ADAPTIVE_SYNC_DCB_CTL(cpu_transcoder),
842 ADAPTIVE_SYNC_COUNTER_EN);
843 intel_pipedmc_dcb_enable(NULL, crtc);
844
845 vrr_ctl |= VRR_CTL_DCB_ADJ_ENABLE;
846 intel_de_write(display, TRANS_VRR_CTL(display, cpu_transcoder), vrr_ctl);
847 }
848
849 static void
intel_vrr_disable_dc_balancing(const struct intel_crtc_state * old_crtc_state)850 intel_vrr_disable_dc_balancing(const struct intel_crtc_state *old_crtc_state)
851 {
852 struct intel_display *display = to_intel_display(old_crtc_state);
853 enum transcoder cpu_transcoder = old_crtc_state->cpu_transcoder;
854 struct intel_crtc *crtc = to_intel_crtc(old_crtc_state->uapi.crtc);
855 enum pipe pipe = crtc->pipe;
856 u32 vrr_ctl = intel_de_read(display, TRANS_VRR_CTL(display, cpu_transcoder));
857
858 if (!old_crtc_state->vrr.dc_balance.enable)
859 return;
860
861 intel_pipedmc_dcb_disable(NULL, crtc);
862 intel_dmc_configure_dc_balance_event(display, pipe, false);
863 intel_de_write(display, TRANS_ADAPTIVE_SYNC_DCB_CTL(cpu_transcoder), 0);
864 intel_de_write(display, PIPEDMC_DCB_VMIN(pipe), 0);
865 intel_de_write(display, PIPEDMC_DCB_VMAX(pipe), 0);
866 intel_de_write(display, PIPEDMC_DCB_MAX_INCREASE(pipe), 0);
867 intel_de_write(display, PIPEDMC_DCB_MAX_DECREASE(pipe), 0);
868 intel_de_write(display, PIPEDMC_DCB_GUARDBAND(pipe), 0);
869 intel_de_write(display, PIPEDMC_DCB_SLOPE(pipe), 0);
870 intel_de_write(display, PIPEDMC_DCB_VBLANK(pipe), 0);
871 intel_de_write(display, TRANS_VRR_DCB_ADJ_VMAX_CFG_LIVE(cpu_transcoder), 0);
872 intel_de_write(display, TRANS_VRR_DCB_ADJ_FLIPLINE_CFG_LIVE(cpu_transcoder), 0);
873 intel_de_write(display, TRANS_VRR_DCB_VMAX_LIVE(cpu_transcoder), 0);
874 intel_de_write(display, TRANS_VRR_DCB_FLIPLINE_LIVE(cpu_transcoder), 0);
875 intel_de_write(display, TRANS_VRR_DCB_ADJ_VMAX_CFG(cpu_transcoder), 0);
876 intel_de_write(display, TRANS_VRR_DCB_ADJ_FLIPLINE_CFG(cpu_transcoder), 0);
877 intel_de_write(display, TRANS_VRR_DCB_VMAX(cpu_transcoder), 0);
878 intel_de_write(display, TRANS_VRR_DCB_FLIPLINE(cpu_transcoder), 0);
879
880 vrr_ctl &= ~VRR_CTL_DCB_ADJ_ENABLE;
881 intel_de_write(display, TRANS_VRR_CTL(display, cpu_transcoder), vrr_ctl);
882 }
883
intel_vrr_tg_enable(const struct intel_crtc_state * crtc_state,bool cmrr_enable)884 static void intel_vrr_tg_enable(const struct intel_crtc_state *crtc_state,
885 bool cmrr_enable)
886 {
887 struct intel_display *display = to_intel_display(crtc_state);
888 enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
889 u32 vrr_ctl;
890
891 intel_de_write(display, TRANS_PUSH(display, cpu_transcoder), TRANS_PUSH_EN);
892
893 vrr_ctl = VRR_CTL_VRR_ENABLE | trans_vrr_ctl(crtc_state);
894
895 /*
896 * FIXME this might be broken as bspec seems to imply that
897 * even VRR_CTL_CMRR_ENABLE is armed by TRANS_CMRR_N_HI
898 * when enabling CMRR (but not when disabling CMRR?).
899 */
900 if (cmrr_enable)
901 vrr_ctl |= VRR_CTL_CMRR_ENABLE;
902
903 intel_de_write(display, TRANS_VRR_CTL(display, cpu_transcoder), vrr_ctl);
904 }
905
intel_vrr_tg_disable(const struct intel_crtc_state * old_crtc_state)906 static void intel_vrr_tg_disable(const struct intel_crtc_state *old_crtc_state)
907 {
908 struct intel_display *display = to_intel_display(old_crtc_state);
909 enum transcoder cpu_transcoder = old_crtc_state->cpu_transcoder;
910
911 intel_de_write(display, TRANS_VRR_CTL(display, cpu_transcoder),
912 trans_vrr_ctl(old_crtc_state));
913
914 if (intel_de_wait_for_clear_ms(display,
915 TRANS_VRR_STATUS(display, cpu_transcoder),
916 VRR_STATUS_VRR_EN_LIVE, 1000))
917 drm_err(display->drm, "Timed out waiting for VRR live status to clear\n");
918
919 intel_de_write(display, TRANS_PUSH(display, cpu_transcoder), 0);
920 }
921
intel_vrr_enable(const struct intel_crtc_state * crtc_state)922 void intel_vrr_enable(const struct intel_crtc_state *crtc_state)
923 {
924 struct intel_display *display = to_intel_display(crtc_state);
925
926 if (!crtc_state->vrr.enable)
927 return;
928
929 intel_vrr_set_vrr_timings(crtc_state);
930 intel_vrr_enable_dc_balancing(crtc_state);
931
932 if (!intel_vrr_always_use_vrr_tg(display))
933 intel_vrr_tg_enable(crtc_state, crtc_state->cmrr.enable);
934 }
935
intel_vrr_disable(const struct intel_crtc_state * old_crtc_state)936 void intel_vrr_disable(const struct intel_crtc_state *old_crtc_state)
937 {
938 struct intel_display *display = to_intel_display(old_crtc_state);
939
940 if (!old_crtc_state->vrr.enable)
941 return;
942
943 if (!intel_vrr_always_use_vrr_tg(display))
944 intel_vrr_tg_disable(old_crtc_state);
945
946 intel_vrr_disable_dc_balancing(old_crtc_state);
947 intel_vrr_set_fixed_rr_timings(old_crtc_state);
948 }
949
intel_vrr_transcoder_enable(const struct intel_crtc_state * crtc_state)950 void intel_vrr_transcoder_enable(const struct intel_crtc_state *crtc_state)
951 {
952 struct intel_display *display = to_intel_display(crtc_state);
953
954 intel_vrr_set_transcoder_timings(crtc_state);
955
956 if (!intel_vrr_possible(crtc_state))
957 return;
958
959 if (intel_vrr_always_use_vrr_tg(display))
960 intel_vrr_tg_enable(crtc_state, false);
961 }
962
intel_vrr_transcoder_disable(const struct intel_crtc_state * old_crtc_state)963 void intel_vrr_transcoder_disable(const struct intel_crtc_state *old_crtc_state)
964 {
965 struct intel_display *display = to_intel_display(old_crtc_state);
966
967 if (!intel_vrr_possible(old_crtc_state))
968 return;
969
970 if (intel_vrr_always_use_vrr_tg(display))
971 intel_vrr_tg_disable(old_crtc_state);
972 }
973
intel_vrr_is_fixed_rr(const struct intel_crtc_state * crtc_state)974 bool intel_vrr_is_fixed_rr(const struct intel_crtc_state *crtc_state)
975 {
976 return crtc_state->vrr.flipline &&
977 crtc_state->vrr.flipline == crtc_state->vrr.vmax &&
978 crtc_state->vrr.flipline == crtc_state->vrr.vmin;
979 }
980
981 static
intel_vrr_get_dc_balance_config(struct intel_crtc_state * crtc_state)982 void intel_vrr_get_dc_balance_config(struct intel_crtc_state *crtc_state)
983 {
984 u32 reg_val;
985 struct intel_display *display = to_intel_display(crtc_state);
986 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
987 enum pipe pipe = crtc->pipe;
988
989 if (!intel_vrr_dc_balance_possible(crtc_state))
990 return;
991
992 reg_val = intel_de_read(display, PIPEDMC_DCB_VMIN(pipe));
993 crtc_state->vrr.dc_balance.vmin = reg_val ? reg_val + 1 : 0;
994
995 reg_val = intel_de_read(display, PIPEDMC_DCB_VMAX(pipe));
996 crtc_state->vrr.dc_balance.vmax = reg_val ? reg_val + 1 : 0;
997
998 crtc_state->vrr.dc_balance.guardband =
999 intel_de_read(display, PIPEDMC_DCB_GUARDBAND(pipe));
1000 crtc_state->vrr.dc_balance.max_increase =
1001 intel_de_read(display, PIPEDMC_DCB_MAX_INCREASE(pipe));
1002 crtc_state->vrr.dc_balance.max_decrease =
1003 intel_de_read(display, PIPEDMC_DCB_MAX_DECREASE(pipe));
1004 crtc_state->vrr.dc_balance.slope =
1005 intel_de_read(display, PIPEDMC_DCB_SLOPE(pipe));
1006 crtc_state->vrr.dc_balance.vblank_target =
1007 intel_de_read(display, PIPEDMC_DCB_VBLANK(pipe));
1008 }
1009
intel_vrr_get_config(struct intel_crtc_state * crtc_state)1010 void intel_vrr_get_config(struct intel_crtc_state *crtc_state)
1011 {
1012 struct intel_display *display = to_intel_display(crtc_state);
1013 enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
1014 u32 trans_vrr_ctl, trans_vrr_vsync;
1015 bool vrr_enable;
1016
1017 trans_vrr_ctl = intel_de_read(display,
1018 TRANS_VRR_CTL(display, cpu_transcoder));
1019
1020 if (HAS_CMRR(display))
1021 crtc_state->cmrr.enable = (trans_vrr_ctl & VRR_CTL_CMRR_ENABLE);
1022
1023 if (crtc_state->cmrr.enable) {
1024 crtc_state->cmrr.cmrr_n =
1025 intel_de_read64_2x32(display, TRANS_CMRR_N_LO(display, cpu_transcoder),
1026 TRANS_CMRR_N_HI(display, cpu_transcoder));
1027 crtc_state->cmrr.cmrr_m =
1028 intel_de_read64_2x32(display, TRANS_CMRR_M_LO(display, cpu_transcoder),
1029 TRANS_CMRR_M_HI(display, cpu_transcoder));
1030 }
1031
1032 if (DISPLAY_VER(display) >= 13) {
1033 crtc_state->vrr.guardband =
1034 REG_FIELD_GET(XELPD_VRR_CTL_VRR_GUARDBAND_MASK, trans_vrr_ctl);
1035 } else {
1036 if (trans_vrr_ctl & VRR_CTL_PIPELINE_FULL_OVERRIDE) {
1037 crtc_state->vrr.pipeline_full =
1038 REG_FIELD_GET(VRR_CTL_PIPELINE_FULL_MASK, trans_vrr_ctl);
1039
1040 crtc_state->vrr.guardband =
1041 intel_vrr_pipeline_full_to_guardband(crtc_state,
1042 crtc_state->vrr.pipeline_full);
1043 }
1044 }
1045
1046 if (trans_vrr_ctl & VRR_CTL_FLIP_LINE_EN) {
1047 crtc_state->vrr.flipline = intel_de_read(display,
1048 TRANS_VRR_FLIPLINE(display, cpu_transcoder)) + 1;
1049 crtc_state->vrr.vmax = intel_de_read(display,
1050 TRANS_VRR_VMAX(display, cpu_transcoder)) + 1;
1051 crtc_state->vrr.vmin = intel_de_read(display,
1052 TRANS_VRR_VMIN(display, cpu_transcoder)) + 1;
1053
1054 if (DISPLAY_VER(display) < 13) {
1055 /* undo what intel_vrr_hw_value() does when writing the values */
1056 crtc_state->vrr.flipline += crtc_state->set_context_latency;
1057 crtc_state->vrr.vmax += crtc_state->set_context_latency;
1058 crtc_state->vrr.vmin += crtc_state->set_context_latency;
1059
1060 crtc_state->vrr.vmin += intel_vrr_vmin_flipline_offset(display);
1061 }
1062
1063 /*
1064 * For platforms that always use VRR Timing Generator, the VTOTAL.Vtotal
1065 * bits are not filled. Since for these platforms TRAN_VMIN is always
1066 * filled with crtc_vtotal, use TRAN_VRR_VMIN to get the vtotal for
1067 * adjusted_mode.
1068 */
1069 if (intel_vrr_always_use_vrr_tg(display))
1070 crtc_state->hw.adjusted_mode.crtc_vtotal =
1071 intel_vrr_vmin_vtotal(crtc_state);
1072
1073 if (HAS_AS_SDP(display)) {
1074 trans_vrr_vsync =
1075 intel_de_read(display,
1076 TRANS_VRR_VSYNC(display, cpu_transcoder));
1077 crtc_state->vrr.vsync_start =
1078 REG_FIELD_GET(VRR_VSYNC_START_MASK, trans_vrr_vsync);
1079 crtc_state->vrr.vsync_end =
1080 REG_FIELD_GET(VRR_VSYNC_END_MASK, trans_vrr_vsync);
1081 }
1082 }
1083
1084 vrr_enable = trans_vrr_ctl & VRR_CTL_VRR_ENABLE;
1085
1086 if (intel_vrr_always_use_vrr_tg(display))
1087 crtc_state->vrr.enable = vrr_enable && !intel_vrr_is_fixed_rr(crtc_state);
1088 else
1089 crtc_state->vrr.enable = vrr_enable;
1090
1091 intel_vrr_get_dc_balance_config(crtc_state);
1092
1093 /*
1094 * #TODO: For Both VRR and CMRR the flag I915_MODE_FLAG_VRR is set for mode_flags.
1095 * Since CMRR is currently disabled, set this flag for VRR for now.
1096 * Need to keep this in mind while re-enabling CMRR.
1097 */
1098 if (crtc_state->vrr.enable)
1099 crtc_state->mode_flags |= I915_MODE_FLAG_VRR;
1100
1101 /*
1102 * For platforms that always use the VRR timing generator, we overwrite
1103 * crtc_vblank_start with vtotal - guardband to reflect the delayed
1104 * vblank start. This works for both default and optimized guardband values.
1105 * On other platforms, we keep the original value from
1106 * intel_get_transcoder_timings() and apply adjustments only in VRR-specific
1107 * paths as needed.
1108 */
1109 if (intel_vrr_always_use_vrr_tg(display))
1110 crtc_state->hw.adjusted_mode.crtc_vblank_start =
1111 crtc_state->hw.adjusted_mode.crtc_vtotal -
1112 crtc_state->vrr.guardband;
1113 }
1114
intel_vrr_safe_window_start(const struct intel_crtc_state * crtc_state)1115 int intel_vrr_safe_window_start(const struct intel_crtc_state *crtc_state)
1116 {
1117 struct intel_display *display = to_intel_display(crtc_state);
1118
1119 if (DISPLAY_VER(display) >= 30)
1120 return crtc_state->hw.adjusted_mode.crtc_vdisplay -
1121 crtc_state->set_context_latency;
1122 else
1123 return crtc_state->hw.adjusted_mode.crtc_vdisplay;
1124 }
1125
1126 static int
intel_vrr_dcb_vmin_vblank_start(const struct intel_crtc_state * crtc_state)1127 intel_vrr_dcb_vmin_vblank_start(const struct intel_crtc_state *crtc_state)
1128 {
1129 return (intel_vrr_dcb_vmin_vblank_start_next(crtc_state) < 0) ?
1130 intel_vrr_dcb_vmin_vblank_start_final(crtc_state) :
1131 intel_vrr_dcb_vmin_vblank_start_next(crtc_state);
1132 }
1133
intel_vrr_vmin_safe_window_end(const struct intel_crtc_state * crtc_state)1134 int intel_vrr_vmin_safe_window_end(const struct intel_crtc_state *crtc_state)
1135 {
1136 int vmin_vblank_start = crtc_state->vrr.dc_balance.enable ?
1137 intel_vrr_dcb_vmin_vblank_start(crtc_state) :
1138 intel_vrr_vmin_vblank_start(crtc_state);
1139
1140 return vmin_vblank_start - crtc_state->set_context_latency;
1141 }
1142
intel_vrr_dcb_vmin_vblank_start_next(const struct intel_crtc_state * crtc_state)1143 int intel_vrr_dcb_vmin_vblank_start_next(const struct intel_crtc_state *crtc_state)
1144 {
1145 struct intel_display *display = to_intel_display(crtc_state);
1146 enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
1147 u32 tmp = 0;
1148
1149 tmp = intel_de_read(display, TRANS_VRR_DCB_ADJ_FLIPLINE_CFG_LIVE(cpu_transcoder));
1150
1151 if (REG_FIELD_GET(VRR_DCB_ADJ_FLIPLINE_CNT_MASK, tmp) == 0)
1152 return -EINVAL;
1153
1154 return intel_vrr_vblank_start(crtc_state, VRR_DCB_ADJ_FLIPLINE(tmp) + 1);
1155 }
1156
intel_vrr_dcb_vmax_vblank_start_next(const struct intel_crtc_state * crtc_state)1157 int intel_vrr_dcb_vmax_vblank_start_next(const struct intel_crtc_state *crtc_state)
1158 {
1159 struct intel_display *display = to_intel_display(crtc_state);
1160 enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
1161 u32 tmp = 0;
1162
1163 tmp = intel_de_read(display, TRANS_VRR_DCB_ADJ_VMAX_CFG_LIVE(cpu_transcoder));
1164
1165 if (REG_FIELD_GET(VRR_DCB_ADJ_VMAX_CNT_MASK, tmp) == 0)
1166 return -EINVAL;
1167
1168 return intel_vrr_vblank_start(crtc_state, VRR_DCB_ADJ_VMAX(tmp) + 1);
1169 }
1170
intel_vrr_dcb_vmin_vblank_start_final(const struct intel_crtc_state * crtc_state)1171 int intel_vrr_dcb_vmin_vblank_start_final(const struct intel_crtc_state *crtc_state)
1172 {
1173 struct intel_display *display = to_intel_display(crtc_state);
1174 enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
1175 u32 tmp = 0;
1176
1177 tmp = intel_de_read(display, TRANS_VRR_DCB_FLIPLINE_LIVE(cpu_transcoder));
1178
1179 return intel_vrr_vblank_start(crtc_state, VRR_DCB_FLIPLINE(tmp) + 1);
1180 }
1181
intel_vrr_dcb_vmax_vblank_start_final(const struct intel_crtc_state * crtc_state)1182 int intel_vrr_dcb_vmax_vblank_start_final(const struct intel_crtc_state *crtc_state)
1183 {
1184 struct intel_display *display = to_intel_display(crtc_state);
1185 enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
1186 u32 tmp = 0;
1187
1188 tmp = intel_de_read(display, TRANS_VRR_DCB_VMAX_LIVE(cpu_transcoder));
1189
1190 return intel_vrr_vblank_start(crtc_state, VRR_DCB_VMAX(tmp) + 1);
1191 }
1192