1 // SPDX-License-Identifier: MIT 2 /* 3 * Copyright © 2020 Intel Corporation 4 * 5 */ 6 7 #include "i915_drv.h" 8 #include "i915_reg.h" 9 #include "intel_de.h" 10 #include "intel_display_types.h" 11 #include "intel_vrr.h" 12 13 bool intel_vrr_is_capable(struct intel_connector *connector) 14 { 15 const struct drm_display_info *info = &connector->base.display_info; 16 struct drm_i915_private *i915 = to_i915(connector->base.dev); 17 struct intel_dp *intel_dp; 18 19 /* 20 * DP Sink is capable of VRR video timings if 21 * Ignore MSA bit is set in DPCD. 22 * EDID monitor range also should be atleast 10 for reasonable 23 * Adaptive Sync or Variable Refresh Rate end user experience. 24 */ 25 switch (connector->base.connector_type) { 26 case DRM_MODE_CONNECTOR_eDP: 27 if (!connector->panel.vbt.vrr) 28 return false; 29 fallthrough; 30 case DRM_MODE_CONNECTOR_DisplayPort: 31 intel_dp = intel_attached_dp(connector); 32 33 if (!drm_dp_sink_can_do_video_without_timing_msa(intel_dp->dpcd)) 34 return false; 35 36 break; 37 default: 38 return false; 39 } 40 41 return HAS_VRR(i915) && 42 info->monitor_range.max_vfreq - info->monitor_range.min_vfreq > 10; 43 } 44 45 bool intel_vrr_is_in_range(struct intel_connector *connector, int vrefresh) 46 { 47 const struct drm_display_info *info = &connector->base.display_info; 48 49 return intel_vrr_is_capable(connector) && 50 vrefresh >= info->monitor_range.min_vfreq && 51 vrefresh <= info->monitor_range.max_vfreq; 52 } 53 54 void 55 intel_vrr_check_modeset(struct intel_atomic_state *state) 56 { 57 int i; 58 struct intel_crtc_state *old_crtc_state, *new_crtc_state; 59 struct intel_crtc *crtc; 60 61 for_each_oldnew_intel_crtc_in_state(state, crtc, old_crtc_state, 62 new_crtc_state, i) { 63 if (new_crtc_state->uapi.vrr_enabled != 64 old_crtc_state->uapi.vrr_enabled) 65 new_crtc_state->uapi.mode_changed = true; 66 } 67 } 68 69 /* 70 * Without VRR registers get latched at: 71 * vblank_start 72 * 73 * With VRR the earliest registers can get latched is: 74 * intel_vrr_vmin_vblank_start(), which if we want to maintain 75 * the correct min vtotal is >=vblank_start+1 76 * 77 * The latest point registers can get latched is the vmax decision boundary: 78 * intel_vrr_vmax_vblank_start() 79 * 80 * Between those two points the vblank exit starts (and hence registers get 81 * latched) ASAP after a push is sent. 82 * 83 * framestart_delay is programmable 1-4. 84 */ 85 static int intel_vrr_vblank_exit_length(const struct intel_crtc_state *crtc_state) 86 { 87 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc); 88 struct drm_i915_private *i915 = to_i915(crtc->base.dev); 89 90 if (DISPLAY_VER(i915) >= 13) 91 return crtc_state->vrr.guardband; 92 else 93 /* The hw imposes the extra scanline before frame start */ 94 return crtc_state->vrr.pipeline_full + crtc_state->framestart_delay + 1; 95 } 96 97 int intel_vrr_vmin_vblank_start(const struct intel_crtc_state *crtc_state) 98 { 99 /* Min vblank actually determined by flipline that is always >=vmin+1 */ 100 return crtc_state->vrr.vmin + 1 - intel_vrr_vblank_exit_length(crtc_state); 101 } 102 103 int intel_vrr_vmax_vblank_start(const struct intel_crtc_state *crtc_state) 104 { 105 return crtc_state->vrr.vmax - intel_vrr_vblank_exit_length(crtc_state); 106 } 107 108 void 109 intel_vrr_compute_config(struct intel_crtc_state *crtc_state, 110 struct drm_connector_state *conn_state) 111 { 112 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc); 113 struct drm_i915_private *i915 = to_i915(crtc->base.dev); 114 struct intel_connector *connector = 115 to_intel_connector(conn_state->connector); 116 struct drm_display_mode *adjusted_mode = &crtc_state->hw.adjusted_mode; 117 const struct drm_display_info *info = &connector->base.display_info; 118 int vmin, vmax; 119 120 if (adjusted_mode->flags & DRM_MODE_FLAG_INTERLACE) 121 return; 122 123 crtc_state->vrr.in_range = 124 intel_vrr_is_in_range(connector, drm_mode_vrefresh(adjusted_mode)); 125 if (!crtc_state->vrr.in_range) 126 return; 127 128 if (HAS_LRR(i915)) 129 crtc_state->update_lrr = true; 130 131 vmin = DIV_ROUND_UP(adjusted_mode->crtc_clock * 1000, 132 adjusted_mode->crtc_htotal * info->monitor_range.max_vfreq); 133 vmax = adjusted_mode->crtc_clock * 1000 / 134 (adjusted_mode->crtc_htotal * info->monitor_range.min_vfreq); 135 136 vmin = max_t(int, vmin, adjusted_mode->crtc_vtotal); 137 vmax = max_t(int, vmax, adjusted_mode->crtc_vtotal); 138 139 if (vmin >= vmax) 140 return; 141 142 /* 143 * flipline determines the min vblank length the hardware will 144 * generate, and flipline>=vmin+1, hence we reduce vmin by one 145 * to make sure we can get the actual min vblank length. 146 */ 147 crtc_state->vrr.vmin = vmin - 1; 148 crtc_state->vrr.vmax = vmax; 149 150 crtc_state->vrr.flipline = crtc_state->vrr.vmin + 1; 151 152 /* 153 * For XE_LPD+, we use guardband and pipeline override 154 * is deprecated. 155 */ 156 if (DISPLAY_VER(i915) >= 13) { 157 crtc_state->vrr.guardband = 158 crtc_state->vrr.vmin + 1 - adjusted_mode->crtc_vblank_start; 159 } else { 160 crtc_state->vrr.pipeline_full = 161 min(255, crtc_state->vrr.vmin - adjusted_mode->crtc_vblank_start - 162 crtc_state->framestart_delay - 1); 163 } 164 165 if (crtc_state->uapi.vrr_enabled) { 166 crtc_state->vrr.enable = true; 167 crtc_state->mode_flags |= I915_MODE_FLAG_VRR; 168 } 169 } 170 171 static u32 trans_vrr_ctl(const struct intel_crtc_state *crtc_state) 172 { 173 struct drm_i915_private *i915 = to_i915(crtc_state->uapi.crtc->dev); 174 175 if (DISPLAY_VER(i915) >= 13) 176 return VRR_CTL_IGN_MAX_SHIFT | VRR_CTL_FLIP_LINE_EN | 177 XELPD_VRR_CTL_VRR_GUARDBAND(crtc_state->vrr.guardband); 178 else 179 return VRR_CTL_IGN_MAX_SHIFT | VRR_CTL_FLIP_LINE_EN | 180 VRR_CTL_PIPELINE_FULL(crtc_state->vrr.pipeline_full) | 181 VRR_CTL_PIPELINE_FULL_OVERRIDE; 182 } 183 184 void intel_vrr_set_transcoder_timings(const struct intel_crtc_state *crtc_state) 185 { 186 struct drm_i915_private *dev_priv = to_i915(crtc_state->uapi.crtc->dev); 187 enum transcoder cpu_transcoder = crtc_state->cpu_transcoder; 188 189 /* 190 * This bit seems to have two meanings depending on the platform: 191 * TGL: generate VRR "safe window" for DSB vblank waits 192 * ADL/DG2: make TRANS_SET_CONTEXT_LATENCY effective with VRR 193 */ 194 if (IS_DISPLAY_VER(dev_priv, 12, 13)) 195 intel_de_rmw(dev_priv, CHICKEN_TRANS(cpu_transcoder), 196 0, PIPE_VBLANK_WITH_DELAY); 197 198 if (!crtc_state->vrr.flipline) { 199 intel_de_write(dev_priv, TRANS_VRR_CTL(cpu_transcoder), 0); 200 return; 201 } 202 203 intel_de_write(dev_priv, TRANS_VRR_VMIN(cpu_transcoder), crtc_state->vrr.vmin - 1); 204 intel_de_write(dev_priv, TRANS_VRR_VMAX(cpu_transcoder), crtc_state->vrr.vmax - 1); 205 intel_de_write(dev_priv, TRANS_VRR_CTL(cpu_transcoder), trans_vrr_ctl(crtc_state)); 206 intel_de_write(dev_priv, TRANS_VRR_FLIPLINE(cpu_transcoder), crtc_state->vrr.flipline - 1); 207 } 208 209 void intel_vrr_send_push(const struct intel_crtc_state *crtc_state) 210 { 211 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc); 212 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev); 213 enum transcoder cpu_transcoder = crtc_state->cpu_transcoder; 214 215 if (!crtc_state->vrr.enable) 216 return; 217 218 intel_de_write(dev_priv, TRANS_PUSH(cpu_transcoder), 219 TRANS_PUSH_EN | TRANS_PUSH_SEND); 220 } 221 222 bool intel_vrr_is_push_sent(const struct intel_crtc_state *crtc_state) 223 { 224 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc); 225 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev); 226 enum transcoder cpu_transcoder = crtc_state->cpu_transcoder; 227 228 if (!crtc_state->vrr.enable) 229 return false; 230 231 return intel_de_read(dev_priv, TRANS_PUSH(cpu_transcoder)) & TRANS_PUSH_SEND; 232 } 233 234 void intel_vrr_enable(const struct intel_crtc_state *crtc_state) 235 { 236 struct drm_i915_private *dev_priv = to_i915(crtc_state->uapi.crtc->dev); 237 enum transcoder cpu_transcoder = crtc_state->cpu_transcoder; 238 239 if (!crtc_state->vrr.enable) 240 return; 241 242 intel_de_write(dev_priv, TRANS_PUSH(cpu_transcoder), TRANS_PUSH_EN); 243 intel_de_write(dev_priv, TRANS_VRR_CTL(cpu_transcoder), 244 VRR_CTL_VRR_ENABLE | trans_vrr_ctl(crtc_state)); 245 } 246 247 void intel_vrr_disable(const struct intel_crtc_state *old_crtc_state) 248 { 249 struct intel_crtc *crtc = to_intel_crtc(old_crtc_state->uapi.crtc); 250 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev); 251 enum transcoder cpu_transcoder = old_crtc_state->cpu_transcoder; 252 253 if (!old_crtc_state->vrr.enable) 254 return; 255 256 intel_de_write(dev_priv, TRANS_VRR_CTL(cpu_transcoder), 257 trans_vrr_ctl(old_crtc_state)); 258 intel_de_wait_for_clear(dev_priv, TRANS_VRR_STATUS(cpu_transcoder), 259 VRR_STATUS_VRR_EN_LIVE, 1000); 260 intel_de_write(dev_priv, TRANS_PUSH(cpu_transcoder), 0); 261 } 262 263 void intel_vrr_get_config(struct intel_crtc_state *crtc_state) 264 { 265 struct drm_i915_private *dev_priv = to_i915(crtc_state->uapi.crtc->dev); 266 enum transcoder cpu_transcoder = crtc_state->cpu_transcoder; 267 u32 trans_vrr_ctl; 268 269 trans_vrr_ctl = intel_de_read(dev_priv, TRANS_VRR_CTL(cpu_transcoder)); 270 271 crtc_state->vrr.enable = trans_vrr_ctl & VRR_CTL_VRR_ENABLE; 272 273 if (DISPLAY_VER(dev_priv) >= 13) 274 crtc_state->vrr.guardband = 275 REG_FIELD_GET(XELPD_VRR_CTL_VRR_GUARDBAND_MASK, trans_vrr_ctl); 276 else 277 if (trans_vrr_ctl & VRR_CTL_PIPELINE_FULL_OVERRIDE) 278 crtc_state->vrr.pipeline_full = 279 REG_FIELD_GET(VRR_CTL_PIPELINE_FULL_MASK, trans_vrr_ctl); 280 281 if (trans_vrr_ctl & VRR_CTL_FLIP_LINE_EN) { 282 crtc_state->vrr.flipline = intel_de_read(dev_priv, TRANS_VRR_FLIPLINE(cpu_transcoder)) + 1; 283 crtc_state->vrr.vmax = intel_de_read(dev_priv, TRANS_VRR_VMAX(cpu_transcoder)) + 1; 284 crtc_state->vrr.vmin = intel_de_read(dev_priv, TRANS_VRR_VMIN(cpu_transcoder)) + 1; 285 } 286 287 if (crtc_state->vrr.enable) 288 crtc_state->mode_flags |= I915_MODE_FLAG_VRR; 289 } 290