1 /*
2 * Copyright © 2014 Intel Corporation
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
21 * DEALINGS IN THE SOFTWARE.
22 */
23
24 #include <drm/drm_atomic_helper.h>
25 #include <drm/drm_damage_helper.h>
26 #include <drm/drm_debugfs.h>
27
28 #include "i915_drv.h"
29 #include "i915_reg.h"
30 #include "intel_alpm.h"
31 #include "intel_atomic.h"
32 #include "intel_crtc.h"
33 #include "intel_cursor_regs.h"
34 #include "intel_ddi.h"
35 #include "intel_de.h"
36 #include "intel_display_types.h"
37 #include "intel_dp.h"
38 #include "intel_dp_aux.h"
39 #include "intel_frontbuffer.h"
40 #include "intel_hdmi.h"
41 #include "intel_psr.h"
42 #include "intel_psr_regs.h"
43 #include "intel_snps_phy.h"
44 #include "skl_universal_plane.h"
45
46 /**
47 * DOC: Panel Self Refresh (PSR/SRD)
48 *
49 * Since Haswell Display controller supports Panel Self-Refresh on display
50 * panels witch have a remote frame buffer (RFB) implemented according to PSR
51 * spec in eDP1.3. PSR feature allows the display to go to lower standby states
52 * when system is idle but display is on as it eliminates display refresh
53 * request to DDR memory completely as long as the frame buffer for that
54 * display is unchanged.
55 *
56 * Panel Self Refresh must be supported by both Hardware (source) and
57 * Panel (sink).
58 *
59 * PSR saves power by caching the framebuffer in the panel RFB, which allows us
60 * to power down the link and memory controller. For DSI panels the same idea
61 * is called "manual mode".
62 *
63 * The implementation uses the hardware-based PSR support which automatically
64 * enters/exits self-refresh mode. The hardware takes care of sending the
65 * required DP aux message and could even retrain the link (that part isn't
66 * enabled yet though). The hardware also keeps track of any frontbuffer
67 * changes to know when to exit self-refresh mode again. Unfortunately that
68 * part doesn't work too well, hence why the i915 PSR support uses the
69 * software frontbuffer tracking to make sure it doesn't miss a screen
70 * update. For this integration intel_psr_invalidate() and intel_psr_flush()
71 * get called by the frontbuffer tracking code. Note that because of locking
72 * issues the self-refresh re-enable code is done from a work queue, which
73 * must be correctly synchronized/cancelled when shutting down the pipe."
74 *
75 * DC3CO (DC3 clock off)
76 *
77 * On top of PSR2, GEN12 adds a intermediate power savings state that turns
78 * clock off automatically during PSR2 idle state.
79 * The smaller overhead of DC3co entry/exit vs. the overhead of PSR2 deep sleep
80 * entry/exit allows the HW to enter a low-power state even when page flipping
81 * periodically (for instance a 30fps video playback scenario).
82 *
83 * Every time a flips occurs PSR2 will get out of deep sleep state(if it was),
84 * so DC3CO is enabled and tgl_dc3co_disable_work is schedule to run after 6
85 * frames, if no other flip occurs and the function above is executed, DC3CO is
86 * disabled and PSR2 is configured to enter deep sleep, resetting again in case
87 * of another flip.
88 * Front buffer modifications do not trigger DC3CO activation on purpose as it
89 * would bring a lot of complexity and most of the moderns systems will only
90 * use page flips.
91 */
92
93 /*
94 * Description of PSR mask bits:
95 *
96 * EDP_PSR_DEBUG[16]/EDP_PSR_DEBUG_MASK_DISP_REG_WRITE (hsw-skl):
97 *
98 * When unmasked (nearly) all display register writes (eg. even
99 * SWF) trigger a PSR exit. Some registers are excluded from this
100 * and they have a more specific mask (described below). On icl+
101 * this bit no longer exists and is effectively always set.
102 *
103 * PIPE_MISC[21]/PIPE_MISC_PSR_MASK_PIPE_REG_WRITE (skl+):
104 *
105 * When unmasked (nearly) all pipe/plane register writes
106 * trigger a PSR exit. Some plane registers are excluded from this
107 * and they have a more specific mask (described below).
108 *
109 * CHICKEN_PIPESL_1[11]/SKL_PSR_MASK_PLANE_FLIP (skl+):
110 * PIPE_MISC[23]/PIPE_MISC_PSR_MASK_PRIMARY_FLIP (bdw):
111 * EDP_PSR_DEBUG[23]/EDP_PSR_DEBUG_MASK_PRIMARY_FLIP (hsw):
112 *
113 * When unmasked PRI_SURF/PLANE_SURF writes trigger a PSR exit.
114 * SPR_SURF/CURBASE are not included in this and instead are
115 * controlled by PIPE_MISC_PSR_MASK_PIPE_REG_WRITE (skl+) or
116 * EDP_PSR_DEBUG_MASK_DISP_REG_WRITE (hsw/bdw).
117 *
118 * PIPE_MISC[22]/PIPE_MISC_PSR_MASK_SPRITE_ENABLE (bdw):
119 * EDP_PSR_DEBUG[21]/EDP_PSR_DEBUG_MASK_SPRITE_ENABLE (hsw):
120 *
121 * When unmasked PSR is blocked as long as the sprite
122 * plane is enabled. skl+ with their universal planes no
123 * longer have a mask bit like this, and no plane being
124 * enabledb blocks PSR.
125 *
126 * PIPE_MISC[21]/PIPE_MISC_PSR_MASK_CURSOR_MOVE (bdw):
127 * EDP_PSR_DEBUG[20]/EDP_PSR_DEBUG_MASK_CURSOR_MOVE (hsw):
128 *
129 * When umasked CURPOS writes trigger a PSR exit. On skl+
130 * this doesn't exit but CURPOS is included in the
131 * PIPE_MISC_PSR_MASK_PIPE_REG_WRITE mask.
132 *
133 * PIPE_MISC[20]/PIPE_MISC_PSR_MASK_VBLANK_VSYNC_INT (bdw+):
134 * EDP_PSR_DEBUG[19]/EDP_PSR_DEBUG_MASK_VBLANK_VSYNC_INT (hsw):
135 *
136 * When unmasked PSR is blocked as long as vblank and/or vsync
137 * interrupt is unmasked in IMR *and* enabled in IER.
138 *
139 * CHICKEN_TRANS[30]/SKL_UNMASK_VBL_TO_PIPE_IN_SRD (skl+):
140 * CHICKEN_PAR1_1[15]/HSW_MASK_VBL_TO_PIPE_IN_SRD (hsw/bdw):
141 *
142 * Selectcs whether PSR exit generates an extra vblank before
143 * the first frame is transmitted. Also note the opposite polarity
144 * if the bit on hsw/bdw vs. skl+ (masked==generate the extra vblank,
145 * unmasked==do not generate the extra vblank).
146 *
147 * With DC states enabled the extra vblank happens after link training,
148 * with DC states disabled it happens immediately upuon PSR exit trigger.
149 * No idea as of now why there is a difference. HSW/BDW (which don't
150 * even have DMC) always generate it after link training. Go figure.
151 *
152 * Unfortunately CHICKEN_TRANS itself seems to be double buffered
153 * and thus won't latch until the first vblank. So with DC states
154 * enabled the register effctively uses the reset value during DC5
155 * exit+PSR exit sequence, and thus the bit does nothing until
156 * latched by the vblank that it was trying to prevent from being
157 * generated in the first place. So we should probably call this
158 * one a chicken/egg bit instead on skl+.
159 *
160 * In standby mode (as opposed to link-off) this makes no difference
161 * as the timing generator keeps running the whole time generating
162 * normal periodic vblanks.
163 *
164 * WaPsrDPAMaskVBlankInSRD asks us to set the bit on hsw/bdw,
165 * and doing so makes the behaviour match the skl+ reset value.
166 *
167 * CHICKEN_PIPESL_1[0]/BDW_UNMASK_VBL_TO_REGS_IN_SRD (bdw):
168 * CHICKEN_PIPESL_1[15]/HSW_UNMASK_VBL_TO_REGS_IN_SRD (hsw):
169 *
170 * On BDW without this bit is no vblanks whatsoever are
171 * generated after PSR exit. On HSW this has no apparant effect.
172 * WaPsrDPRSUnmaskVBlankInSRD says to set this.
173 *
174 * The rest of the bits are more self-explanatory and/or
175 * irrelevant for normal operation.
176 *
177 * Description of intel_crtc_state variables. has_psr, has_panel_replay and
178 * has_sel_update:
179 *
180 * has_psr (alone): PSR1
181 * has_psr + has_sel_update: PSR2
182 * has_psr + has_panel_replay: Panel Replay
183 * has_psr + has_panel_replay + has_sel_update: Panel Replay Selective Update
184 *
185 * Description of some intel_psr varibles. enabled, panel_replay_enabled,
186 * sel_update_enabled
187 *
188 * enabled (alone): PSR1
189 * enabled + sel_update_enabled: PSR2
190 * enabled + panel_replay_enabled: Panel Replay
191 * enabled + panel_replay_enabled + sel_update_enabled: Panel Replay SU
192 */
193
194 #define CAN_PSR(intel_dp) ((intel_dp)->psr.sink_support && \
195 (intel_dp)->psr.source_support)
196
intel_encoder_can_psr(struct intel_encoder * encoder)197 bool intel_encoder_can_psr(struct intel_encoder *encoder)
198 {
199 if (intel_encoder_is_dp(encoder) || encoder->type == INTEL_OUTPUT_DP_MST)
200 return CAN_PSR(enc_to_intel_dp(encoder)) ||
201 CAN_PANEL_REPLAY(enc_to_intel_dp(encoder));
202 else
203 return false;
204 }
205
intel_psr_needs_aux_io_power(struct intel_encoder * encoder,const struct intel_crtc_state * crtc_state)206 bool intel_psr_needs_aux_io_power(struct intel_encoder *encoder,
207 const struct intel_crtc_state *crtc_state)
208 {
209 /*
210 * For PSR/PR modes only eDP requires the AUX IO power to be enabled whenever
211 * the output is enabled. For non-eDP outputs the main link is always
212 * on, hence it doesn't require the HW initiated AUX wake-up signaling used
213 * for eDP.
214 *
215 * TODO:
216 * - Consider leaving AUX IO disabled for eDP / PR as well, in case
217 * the ALPM with main-link off mode is not enabled.
218 * - Leave AUX IO enabled for DP / PR, once support for ALPM with
219 * main-link off mode is added for it and this mode gets enabled.
220 */
221 return intel_crtc_has_type(crtc_state, INTEL_OUTPUT_EDP) &&
222 intel_encoder_can_psr(encoder);
223 }
224
psr_global_enabled(struct intel_dp * intel_dp)225 static bool psr_global_enabled(struct intel_dp *intel_dp)
226 {
227 struct intel_display *display = to_intel_display(intel_dp);
228 struct intel_connector *connector = intel_dp->attached_connector;
229
230 switch (intel_dp->psr.debug & I915_PSR_DEBUG_MODE_MASK) {
231 case I915_PSR_DEBUG_DEFAULT:
232 if (display->params.enable_psr == -1)
233 return connector->panel.vbt.psr.enable;
234 return display->params.enable_psr;
235 case I915_PSR_DEBUG_DISABLE:
236 return false;
237 default:
238 return true;
239 }
240 }
241
psr2_global_enabled(struct intel_dp * intel_dp)242 static bool psr2_global_enabled(struct intel_dp *intel_dp)
243 {
244 struct intel_display *display = to_intel_display(intel_dp);
245
246 switch (intel_dp->psr.debug & I915_PSR_DEBUG_MODE_MASK) {
247 case I915_PSR_DEBUG_DISABLE:
248 case I915_PSR_DEBUG_FORCE_PSR1:
249 return false;
250 default:
251 if (display->params.enable_psr == 1)
252 return false;
253 return true;
254 }
255 }
256
psr2_su_region_et_global_enabled(struct intel_dp * intel_dp)257 static bool psr2_su_region_et_global_enabled(struct intel_dp *intel_dp)
258 {
259 struct intel_display *display = to_intel_display(intel_dp);
260
261 if (display->params.enable_psr != -1)
262 return false;
263
264 return true;
265 }
266
panel_replay_global_enabled(struct intel_dp * intel_dp)267 static bool panel_replay_global_enabled(struct intel_dp *intel_dp)
268 {
269 struct intel_display *display = to_intel_display(intel_dp);
270
271 if ((display->params.enable_psr != -1) ||
272 (intel_dp->psr.debug & I915_PSR_DEBUG_PANEL_REPLAY_DISABLE))
273 return false;
274 return true;
275 }
276
psr_irq_psr_error_bit_get(struct intel_dp * intel_dp)277 static u32 psr_irq_psr_error_bit_get(struct intel_dp *intel_dp)
278 {
279 struct intel_display *display = to_intel_display(intel_dp);
280
281 return DISPLAY_VER(display) >= 12 ? TGL_PSR_ERROR :
282 EDP_PSR_ERROR(intel_dp->psr.transcoder);
283 }
284
psr_irq_post_exit_bit_get(struct intel_dp * intel_dp)285 static u32 psr_irq_post_exit_bit_get(struct intel_dp *intel_dp)
286 {
287 struct intel_display *display = to_intel_display(intel_dp);
288
289 return DISPLAY_VER(display) >= 12 ? TGL_PSR_POST_EXIT :
290 EDP_PSR_POST_EXIT(intel_dp->psr.transcoder);
291 }
292
psr_irq_pre_entry_bit_get(struct intel_dp * intel_dp)293 static u32 psr_irq_pre_entry_bit_get(struct intel_dp *intel_dp)
294 {
295 struct intel_display *display = to_intel_display(intel_dp);
296
297 return DISPLAY_VER(display) >= 12 ? TGL_PSR_PRE_ENTRY :
298 EDP_PSR_PRE_ENTRY(intel_dp->psr.transcoder);
299 }
300
psr_irq_mask_get(struct intel_dp * intel_dp)301 static u32 psr_irq_mask_get(struct intel_dp *intel_dp)
302 {
303 struct intel_display *display = to_intel_display(intel_dp);
304
305 return DISPLAY_VER(display) >= 12 ? TGL_PSR_MASK :
306 EDP_PSR_MASK(intel_dp->psr.transcoder);
307 }
308
psr_ctl_reg(struct intel_display * display,enum transcoder cpu_transcoder)309 static i915_reg_t psr_ctl_reg(struct intel_display *display,
310 enum transcoder cpu_transcoder)
311 {
312 if (DISPLAY_VER(display) >= 8)
313 return EDP_PSR_CTL(display, cpu_transcoder);
314 else
315 return HSW_SRD_CTL;
316 }
317
psr_debug_reg(struct intel_display * display,enum transcoder cpu_transcoder)318 static i915_reg_t psr_debug_reg(struct intel_display *display,
319 enum transcoder cpu_transcoder)
320 {
321 if (DISPLAY_VER(display) >= 8)
322 return EDP_PSR_DEBUG(display, cpu_transcoder);
323 else
324 return HSW_SRD_DEBUG;
325 }
326
psr_perf_cnt_reg(struct intel_display * display,enum transcoder cpu_transcoder)327 static i915_reg_t psr_perf_cnt_reg(struct intel_display *display,
328 enum transcoder cpu_transcoder)
329 {
330 if (DISPLAY_VER(display) >= 8)
331 return EDP_PSR_PERF_CNT(display, cpu_transcoder);
332 else
333 return HSW_SRD_PERF_CNT;
334 }
335
psr_status_reg(struct intel_display * display,enum transcoder cpu_transcoder)336 static i915_reg_t psr_status_reg(struct intel_display *display,
337 enum transcoder cpu_transcoder)
338 {
339 if (DISPLAY_VER(display) >= 8)
340 return EDP_PSR_STATUS(display, cpu_transcoder);
341 else
342 return HSW_SRD_STATUS;
343 }
344
psr_imr_reg(struct intel_display * display,enum transcoder cpu_transcoder)345 static i915_reg_t psr_imr_reg(struct intel_display *display,
346 enum transcoder cpu_transcoder)
347 {
348 if (DISPLAY_VER(display) >= 12)
349 return TRANS_PSR_IMR(display, cpu_transcoder);
350 else
351 return EDP_PSR_IMR;
352 }
353
psr_iir_reg(struct intel_display * display,enum transcoder cpu_transcoder)354 static i915_reg_t psr_iir_reg(struct intel_display *display,
355 enum transcoder cpu_transcoder)
356 {
357 if (DISPLAY_VER(display) >= 12)
358 return TRANS_PSR_IIR(display, cpu_transcoder);
359 else
360 return EDP_PSR_IIR;
361 }
362
psr_aux_ctl_reg(struct intel_display * display,enum transcoder cpu_transcoder)363 static i915_reg_t psr_aux_ctl_reg(struct intel_display *display,
364 enum transcoder cpu_transcoder)
365 {
366 if (DISPLAY_VER(display) >= 8)
367 return EDP_PSR_AUX_CTL(display, cpu_transcoder);
368 else
369 return HSW_SRD_AUX_CTL;
370 }
371
psr_aux_data_reg(struct intel_display * display,enum transcoder cpu_transcoder,int i)372 static i915_reg_t psr_aux_data_reg(struct intel_display *display,
373 enum transcoder cpu_transcoder, int i)
374 {
375 if (DISPLAY_VER(display) >= 8)
376 return EDP_PSR_AUX_DATA(display, cpu_transcoder, i);
377 else
378 return HSW_SRD_AUX_DATA(i);
379 }
380
psr_irq_control(struct intel_dp * intel_dp)381 static void psr_irq_control(struct intel_dp *intel_dp)
382 {
383 struct intel_display *display = to_intel_display(intel_dp);
384 enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
385 u32 mask;
386
387 if (intel_dp->psr.panel_replay_enabled)
388 return;
389
390 mask = psr_irq_psr_error_bit_get(intel_dp);
391 if (intel_dp->psr.debug & I915_PSR_DEBUG_IRQ)
392 mask |= psr_irq_post_exit_bit_get(intel_dp) |
393 psr_irq_pre_entry_bit_get(intel_dp);
394
395 intel_de_rmw(display, psr_imr_reg(display, cpu_transcoder),
396 psr_irq_mask_get(intel_dp), ~mask);
397 }
398
psr_event_print(struct intel_display * display,u32 val,bool sel_update_enabled)399 static void psr_event_print(struct intel_display *display,
400 u32 val, bool sel_update_enabled)
401 {
402 drm_dbg_kms(display->drm, "PSR exit events: 0x%x\n", val);
403 if (val & PSR_EVENT_PSR2_WD_TIMER_EXPIRE)
404 drm_dbg_kms(display->drm, "\tPSR2 watchdog timer expired\n");
405 if ((val & PSR_EVENT_PSR2_DISABLED) && sel_update_enabled)
406 drm_dbg_kms(display->drm, "\tPSR2 disabled\n");
407 if (val & PSR_EVENT_SU_DIRTY_FIFO_UNDERRUN)
408 drm_dbg_kms(display->drm, "\tSU dirty FIFO underrun\n");
409 if (val & PSR_EVENT_SU_CRC_FIFO_UNDERRUN)
410 drm_dbg_kms(display->drm, "\tSU CRC FIFO underrun\n");
411 if (val & PSR_EVENT_GRAPHICS_RESET)
412 drm_dbg_kms(display->drm, "\tGraphics reset\n");
413 if (val & PSR_EVENT_PCH_INTERRUPT)
414 drm_dbg_kms(display->drm, "\tPCH interrupt\n");
415 if (val & PSR_EVENT_MEMORY_UP)
416 drm_dbg_kms(display->drm, "\tMemory up\n");
417 if (val & PSR_EVENT_FRONT_BUFFER_MODIFY)
418 drm_dbg_kms(display->drm, "\tFront buffer modification\n");
419 if (val & PSR_EVENT_WD_TIMER_EXPIRE)
420 drm_dbg_kms(display->drm, "\tPSR watchdog timer expired\n");
421 if (val & PSR_EVENT_PIPE_REGISTERS_UPDATE)
422 drm_dbg_kms(display->drm, "\tPIPE registers updated\n");
423 if (val & PSR_EVENT_REGISTER_UPDATE)
424 drm_dbg_kms(display->drm, "\tRegister updated\n");
425 if (val & PSR_EVENT_HDCP_ENABLE)
426 drm_dbg_kms(display->drm, "\tHDCP enabled\n");
427 if (val & PSR_EVENT_KVMR_SESSION_ENABLE)
428 drm_dbg_kms(display->drm, "\tKVMR session enabled\n");
429 if (val & PSR_EVENT_VBI_ENABLE)
430 drm_dbg_kms(display->drm, "\tVBI enabled\n");
431 if (val & PSR_EVENT_LPSP_MODE_EXIT)
432 drm_dbg_kms(display->drm, "\tLPSP mode exited\n");
433 if ((val & PSR_EVENT_PSR_DISABLE) && !sel_update_enabled)
434 drm_dbg_kms(display->drm, "\tPSR disabled\n");
435 }
436
intel_psr_irq_handler(struct intel_dp * intel_dp,u32 psr_iir)437 void intel_psr_irq_handler(struct intel_dp *intel_dp, u32 psr_iir)
438 {
439 struct intel_display *display = to_intel_display(intel_dp);
440 struct drm_i915_private *dev_priv = to_i915(display->drm);
441 enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
442 ktime_t time_ns = ktime_get();
443
444 if (psr_iir & psr_irq_pre_entry_bit_get(intel_dp)) {
445 intel_dp->psr.last_entry_attempt = time_ns;
446 drm_dbg_kms(display->drm,
447 "[transcoder %s] PSR entry attempt in 2 vblanks\n",
448 transcoder_name(cpu_transcoder));
449 }
450
451 if (psr_iir & psr_irq_post_exit_bit_get(intel_dp)) {
452 intel_dp->psr.last_exit = time_ns;
453 drm_dbg_kms(display->drm,
454 "[transcoder %s] PSR exit completed\n",
455 transcoder_name(cpu_transcoder));
456
457 if (DISPLAY_VER(display) >= 9) {
458 u32 val;
459
460 val = intel_de_rmw(dev_priv,
461 PSR_EVENT(dev_priv, cpu_transcoder),
462 0, 0);
463
464 psr_event_print(display, val, intel_dp->psr.sel_update_enabled);
465 }
466 }
467
468 if (psr_iir & psr_irq_psr_error_bit_get(intel_dp)) {
469 drm_warn(display->drm, "[transcoder %s] PSR aux error\n",
470 transcoder_name(cpu_transcoder));
471
472 intel_dp->psr.irq_aux_error = true;
473
474 /*
475 * If this interruption is not masked it will keep
476 * interrupting so fast that it prevents the scheduled
477 * work to run.
478 * Also after a PSR error, we don't want to arm PSR
479 * again so we don't care about unmask the interruption
480 * or unset irq_aux_error.
481 */
482 intel_de_rmw(display, psr_imr_reg(display, cpu_transcoder),
483 0, psr_irq_psr_error_bit_get(intel_dp));
484
485 queue_work(dev_priv->unordered_wq, &intel_dp->psr.work);
486 }
487 }
488
intel_dp_get_sink_sync_latency(struct intel_dp * intel_dp)489 static u8 intel_dp_get_sink_sync_latency(struct intel_dp *intel_dp)
490 {
491 struct intel_display *display = to_intel_display(intel_dp);
492 u8 val = 8; /* assume the worst if we can't read the value */
493
494 if (drm_dp_dpcd_readb(&intel_dp->aux,
495 DP_SYNCHRONIZATION_LATENCY_IN_SINK, &val) == 1)
496 val &= DP_MAX_RESYNC_FRAME_COUNT_MASK;
497 else
498 drm_dbg_kms(display->drm,
499 "Unable to get sink synchronization latency, assuming 8 frames\n");
500 return val;
501 }
502
intel_dp_get_su_capability(struct intel_dp * intel_dp)503 static u8 intel_dp_get_su_capability(struct intel_dp *intel_dp)
504 {
505 u8 su_capability = 0;
506
507 if (intel_dp->psr.sink_panel_replay_su_support)
508 drm_dp_dpcd_readb(&intel_dp->aux,
509 DP_PANEL_PANEL_REPLAY_CAPABILITY,
510 &su_capability);
511 else
512 su_capability = intel_dp->psr_dpcd[1];
513
514 return su_capability;
515 }
516
517 static unsigned int
intel_dp_get_su_x_granularity_offset(struct intel_dp * intel_dp)518 intel_dp_get_su_x_granularity_offset(struct intel_dp *intel_dp)
519 {
520 return intel_dp->psr.sink_panel_replay_su_support ?
521 DP_PANEL_PANEL_REPLAY_X_GRANULARITY :
522 DP_PSR2_SU_X_GRANULARITY;
523 }
524
525 static unsigned int
intel_dp_get_su_y_granularity_offset(struct intel_dp * intel_dp)526 intel_dp_get_su_y_granularity_offset(struct intel_dp *intel_dp)
527 {
528 return intel_dp->psr.sink_panel_replay_su_support ?
529 DP_PANEL_PANEL_REPLAY_Y_GRANULARITY :
530 DP_PSR2_SU_Y_GRANULARITY;
531 }
532
533 /*
534 * Note: Bits related to granularity are same in panel replay and psr
535 * registers. Rely on PSR definitions on these "common" bits.
536 */
intel_dp_get_su_granularity(struct intel_dp * intel_dp)537 static void intel_dp_get_su_granularity(struct intel_dp *intel_dp)
538 {
539 struct intel_display *display = to_intel_display(intel_dp);
540 ssize_t r;
541 u16 w;
542 u8 y;
543
544 /*
545 * TODO: Do we need to take into account panel supporting both PSR and
546 * Panel replay?
547 */
548
549 /*
550 * If sink don't have specific granularity requirements set legacy
551 * ones.
552 */
553 if (!(intel_dp_get_su_capability(intel_dp) &
554 DP_PSR2_SU_GRANULARITY_REQUIRED)) {
555 /* As PSR2 HW sends full lines, we do not care about x granularity */
556 w = 4;
557 y = 4;
558 goto exit;
559 }
560
561 r = drm_dp_dpcd_read(&intel_dp->aux,
562 intel_dp_get_su_x_granularity_offset(intel_dp),
563 &w, 2);
564 if (r != 2)
565 drm_dbg_kms(display->drm,
566 "Unable to read selective update x granularity\n");
567 /*
568 * Spec says that if the value read is 0 the default granularity should
569 * be used instead.
570 */
571 if (r != 2 || w == 0)
572 w = 4;
573
574 r = drm_dp_dpcd_read(&intel_dp->aux,
575 intel_dp_get_su_y_granularity_offset(intel_dp),
576 &y, 1);
577 if (r != 1) {
578 drm_dbg_kms(display->drm,
579 "Unable to read selective update y granularity\n");
580 y = 4;
581 }
582 if (y == 0)
583 y = 1;
584
585 exit:
586 intel_dp->psr.su_w_granularity = w;
587 intel_dp->psr.su_y_granularity = y;
588 }
589
_panel_replay_init_dpcd(struct intel_dp * intel_dp)590 static void _panel_replay_init_dpcd(struct intel_dp *intel_dp)
591 {
592 struct intel_display *display = to_intel_display(intel_dp);
593
594 if (intel_dp_is_edp(intel_dp)) {
595 if (!intel_alpm_aux_less_wake_supported(intel_dp)) {
596 drm_dbg_kms(display->drm,
597 "Panel doesn't support AUX-less ALPM, eDP Panel Replay not possible\n");
598 return;
599 }
600
601 if (!(intel_dp->pr_dpcd & DP_PANEL_REPLAY_EARLY_TRANSPORT_SUPPORT)) {
602 drm_dbg_kms(display->drm,
603 "Panel doesn't support early transport, eDP Panel Replay not possible\n");
604 return;
605 }
606 }
607
608 intel_dp->psr.sink_panel_replay_support = true;
609
610 if (intel_dp->pr_dpcd & DP_PANEL_REPLAY_SU_SUPPORT)
611 intel_dp->psr.sink_panel_replay_su_support = true;
612
613 drm_dbg_kms(display->drm,
614 "Panel replay %sis supported by panel\n",
615 intel_dp->psr.sink_panel_replay_su_support ?
616 "selective_update " : "");
617 }
618
_psr_init_dpcd(struct intel_dp * intel_dp)619 static void _psr_init_dpcd(struct intel_dp *intel_dp)
620 {
621 struct intel_display *display = to_intel_display(intel_dp);
622
623 drm_dbg_kms(display->drm, "eDP panel supports PSR version %x\n",
624 intel_dp->psr_dpcd[0]);
625
626 if (drm_dp_has_quirk(&intel_dp->desc, DP_DPCD_QUIRK_NO_PSR)) {
627 drm_dbg_kms(display->drm,
628 "PSR support not currently available for this panel\n");
629 return;
630 }
631
632 if (!(intel_dp->edp_dpcd[1] & DP_EDP_SET_POWER_CAP)) {
633 drm_dbg_kms(display->drm,
634 "Panel lacks power state control, PSR cannot be enabled\n");
635 return;
636 }
637
638 intel_dp->psr.sink_support = true;
639 intel_dp->psr.sink_sync_latency =
640 intel_dp_get_sink_sync_latency(intel_dp);
641
642 if (DISPLAY_VER(display) >= 9 &&
643 intel_dp->psr_dpcd[0] >= DP_PSR2_WITH_Y_COORD_IS_SUPPORTED) {
644 bool y_req = intel_dp->psr_dpcd[1] &
645 DP_PSR2_SU_Y_COORDINATE_REQUIRED;
646
647 /*
648 * All panels that supports PSR version 03h (PSR2 +
649 * Y-coordinate) can handle Y-coordinates in VSC but we are
650 * only sure that it is going to be used when required by the
651 * panel. This way panel is capable to do selective update
652 * without a aux frame sync.
653 *
654 * To support PSR version 02h and PSR version 03h without
655 * Y-coordinate requirement panels we would need to enable
656 * GTC first.
657 */
658 intel_dp->psr.sink_psr2_support = y_req &&
659 intel_alpm_aux_wake_supported(intel_dp);
660 drm_dbg_kms(display->drm, "PSR2 %ssupported\n",
661 intel_dp->psr.sink_psr2_support ? "" : "not ");
662 }
663 }
664
intel_psr_init_dpcd(struct intel_dp * intel_dp)665 void intel_psr_init_dpcd(struct intel_dp *intel_dp)
666 {
667 drm_dp_dpcd_read(&intel_dp->aux, DP_PSR_SUPPORT, intel_dp->psr_dpcd,
668 sizeof(intel_dp->psr_dpcd));
669 drm_dp_dpcd_readb(&intel_dp->aux, DP_PANEL_REPLAY_CAP,
670 &intel_dp->pr_dpcd);
671
672 if (intel_dp->pr_dpcd & DP_PANEL_REPLAY_SUPPORT)
673 _panel_replay_init_dpcd(intel_dp);
674
675 if (intel_dp->psr_dpcd[0])
676 _psr_init_dpcd(intel_dp);
677
678 if (intel_dp->psr.sink_psr2_support ||
679 intel_dp->psr.sink_panel_replay_su_support)
680 intel_dp_get_su_granularity(intel_dp);
681 }
682
hsw_psr_setup_aux(struct intel_dp * intel_dp)683 static void hsw_psr_setup_aux(struct intel_dp *intel_dp)
684 {
685 struct intel_display *display = to_intel_display(intel_dp);
686 struct drm_i915_private *dev_priv = to_i915(display->drm);
687 enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
688 u32 aux_clock_divider, aux_ctl;
689 /* write DP_SET_POWER=D0 */
690 static const u8 aux_msg[] = {
691 [0] = (DP_AUX_NATIVE_WRITE << 4) | ((DP_SET_POWER >> 16) & 0xf),
692 [1] = (DP_SET_POWER >> 8) & 0xff,
693 [2] = DP_SET_POWER & 0xff,
694 [3] = 1 - 1,
695 [4] = DP_SET_POWER_D0,
696 };
697 int i;
698
699 BUILD_BUG_ON(sizeof(aux_msg) > 20);
700 for (i = 0; i < sizeof(aux_msg); i += 4)
701 intel_de_write(dev_priv,
702 psr_aux_data_reg(display, cpu_transcoder, i >> 2),
703 intel_dp_aux_pack(&aux_msg[i], sizeof(aux_msg) - i));
704
705 aux_clock_divider = intel_dp->get_aux_clock_divider(intel_dp, 0);
706
707 /* Start with bits set for DDI_AUX_CTL register */
708 aux_ctl = intel_dp->get_aux_send_ctl(intel_dp, sizeof(aux_msg),
709 aux_clock_divider);
710
711 /* Select only valid bits for SRD_AUX_CTL */
712 aux_ctl &= EDP_PSR_AUX_CTL_TIME_OUT_MASK |
713 EDP_PSR_AUX_CTL_MESSAGE_SIZE_MASK |
714 EDP_PSR_AUX_CTL_PRECHARGE_2US_MASK |
715 EDP_PSR_AUX_CTL_BIT_CLOCK_2X_MASK;
716
717 intel_de_write(display, psr_aux_ctl_reg(display, cpu_transcoder),
718 aux_ctl);
719 }
720
psr2_su_region_et_valid(struct intel_dp * intel_dp,bool panel_replay)721 static bool psr2_su_region_et_valid(struct intel_dp *intel_dp, bool panel_replay)
722 {
723 struct intel_display *display = to_intel_display(intel_dp);
724
725 if (DISPLAY_VER(display) < 20 || !intel_dp_is_edp(intel_dp) ||
726 intel_dp->psr.debug & I915_PSR_DEBUG_SU_REGION_ET_DISABLE)
727 return false;
728
729 return panel_replay ?
730 intel_dp->pr_dpcd & DP_PANEL_REPLAY_EARLY_TRANSPORT_SUPPORT :
731 intel_dp->psr_dpcd[0] == DP_PSR2_WITH_Y_COORD_ET_SUPPORTED &&
732 psr2_su_region_et_global_enabled(intel_dp);
733 }
734
_panel_replay_enable_sink(struct intel_dp * intel_dp,const struct intel_crtc_state * crtc_state)735 static void _panel_replay_enable_sink(struct intel_dp *intel_dp,
736 const struct intel_crtc_state *crtc_state)
737 {
738 u8 val = DP_PANEL_REPLAY_ENABLE |
739 DP_PANEL_REPLAY_VSC_SDP_CRC_EN |
740 DP_PANEL_REPLAY_UNRECOVERABLE_ERROR_EN |
741 DP_PANEL_REPLAY_RFB_STORAGE_ERROR_EN |
742 DP_PANEL_REPLAY_ACTIVE_FRAME_CRC_ERROR_EN;
743 u8 panel_replay_config2 = DP_PANEL_REPLAY_CRC_VERIFICATION;
744
745 if (crtc_state->has_sel_update)
746 val |= DP_PANEL_REPLAY_SU_ENABLE;
747
748 if (crtc_state->enable_psr2_su_region_et)
749 val |= DP_PANEL_REPLAY_ENABLE_SU_REGION_ET;
750
751 if (crtc_state->req_psr2_sdp_prior_scanline)
752 panel_replay_config2 |=
753 DP_PANEL_REPLAY_SU_REGION_SCANLINE_CAPTURE;
754
755 drm_dp_dpcd_writeb(&intel_dp->aux, PANEL_REPLAY_CONFIG, val);
756
757 drm_dp_dpcd_writeb(&intel_dp->aux, PANEL_REPLAY_CONFIG2,
758 panel_replay_config2);
759 }
760
_psr_enable_sink(struct intel_dp * intel_dp,const struct intel_crtc_state * crtc_state)761 static void _psr_enable_sink(struct intel_dp *intel_dp,
762 const struct intel_crtc_state *crtc_state)
763 {
764 struct intel_display *display = to_intel_display(intel_dp);
765 u8 val = DP_PSR_ENABLE;
766
767 if (crtc_state->has_sel_update) {
768 val |= DP_PSR_ENABLE_PSR2 | DP_PSR_IRQ_HPD_WITH_CRC_ERRORS;
769 } else {
770 if (intel_dp->psr.link_standby)
771 val |= DP_PSR_MAIN_LINK_ACTIVE;
772
773 if (DISPLAY_VER(display) >= 8)
774 val |= DP_PSR_CRC_VERIFICATION;
775 }
776
777 if (crtc_state->req_psr2_sdp_prior_scanline)
778 val |= DP_PSR_SU_REGION_SCANLINE_CAPTURE;
779
780 if (crtc_state->enable_psr2_su_region_et)
781 val |= DP_PANEL_REPLAY_ENABLE_SU_REGION_ET;
782
783 if (intel_dp->psr.entry_setup_frames > 0)
784 val |= DP_PSR_FRAME_CAPTURE;
785
786 drm_dp_dpcd_writeb(&intel_dp->aux, DP_PSR_EN_CFG, val);
787 }
788
intel_psr_enable_sink_alpm(struct intel_dp * intel_dp,const struct intel_crtc_state * crtc_state)789 static void intel_psr_enable_sink_alpm(struct intel_dp *intel_dp,
790 const struct intel_crtc_state *crtc_state)
791 {
792 u8 val;
793
794 /*
795 * eDP Panel Replay uses always ALPM
796 * PSR2 uses ALPM but PSR1 doesn't
797 */
798 if (!intel_dp_is_edp(intel_dp) || (!crtc_state->has_panel_replay &&
799 !crtc_state->has_sel_update))
800 return;
801
802 val = DP_ALPM_ENABLE | DP_ALPM_LOCK_ERROR_IRQ_HPD_ENABLE;
803
804 if (crtc_state->has_panel_replay)
805 val |= DP_ALPM_MODE_AUX_LESS;
806
807 drm_dp_dpcd_writeb(&intel_dp->aux, DP_RECEIVER_ALPM_CONFIG, val);
808 }
809
intel_psr_enable_sink(struct intel_dp * intel_dp,const struct intel_crtc_state * crtc_state)810 void intel_psr_enable_sink(struct intel_dp *intel_dp,
811 const struct intel_crtc_state *crtc_state)
812 {
813 intel_psr_enable_sink_alpm(intel_dp, crtc_state);
814
815 crtc_state->has_panel_replay ?
816 _panel_replay_enable_sink(intel_dp, crtc_state) :
817 _psr_enable_sink(intel_dp, crtc_state);
818
819 if (intel_dp_is_edp(intel_dp))
820 drm_dp_dpcd_writeb(&intel_dp->aux, DP_SET_POWER, DP_SET_POWER_D0);
821 }
822
intel_psr1_get_tp_time(struct intel_dp * intel_dp)823 static u32 intel_psr1_get_tp_time(struct intel_dp *intel_dp)
824 {
825 struct intel_display *display = to_intel_display(intel_dp);
826 struct intel_connector *connector = intel_dp->attached_connector;
827 struct drm_i915_private *dev_priv = to_i915(display->drm);
828 u32 val = 0;
829
830 if (DISPLAY_VER(display) >= 11)
831 val |= EDP_PSR_TP4_TIME_0us;
832
833 if (display->params.psr_safest_params) {
834 val |= EDP_PSR_TP1_TIME_2500us;
835 val |= EDP_PSR_TP2_TP3_TIME_2500us;
836 goto check_tp3_sel;
837 }
838
839 if (connector->panel.vbt.psr.tp1_wakeup_time_us == 0)
840 val |= EDP_PSR_TP1_TIME_0us;
841 else if (connector->panel.vbt.psr.tp1_wakeup_time_us <= 100)
842 val |= EDP_PSR_TP1_TIME_100us;
843 else if (connector->panel.vbt.psr.tp1_wakeup_time_us <= 500)
844 val |= EDP_PSR_TP1_TIME_500us;
845 else
846 val |= EDP_PSR_TP1_TIME_2500us;
847
848 if (connector->panel.vbt.psr.tp2_tp3_wakeup_time_us == 0)
849 val |= EDP_PSR_TP2_TP3_TIME_0us;
850 else if (connector->panel.vbt.psr.tp2_tp3_wakeup_time_us <= 100)
851 val |= EDP_PSR_TP2_TP3_TIME_100us;
852 else if (connector->panel.vbt.psr.tp2_tp3_wakeup_time_us <= 500)
853 val |= EDP_PSR_TP2_TP3_TIME_500us;
854 else
855 val |= EDP_PSR_TP2_TP3_TIME_2500us;
856
857 /*
858 * WA 0479: hsw,bdw
859 * "Do not skip both TP1 and TP2/TP3"
860 */
861 if (DISPLAY_VER(dev_priv) < 9 &&
862 connector->panel.vbt.psr.tp1_wakeup_time_us == 0 &&
863 connector->panel.vbt.psr.tp2_tp3_wakeup_time_us == 0)
864 val |= EDP_PSR_TP2_TP3_TIME_100us;
865
866 check_tp3_sel:
867 if (intel_dp_source_supports_tps3(dev_priv) &&
868 drm_dp_tps3_supported(intel_dp->dpcd))
869 val |= EDP_PSR_TP_TP1_TP3;
870 else
871 val |= EDP_PSR_TP_TP1_TP2;
872
873 return val;
874 }
875
psr_compute_idle_frames(struct intel_dp * intel_dp)876 static u8 psr_compute_idle_frames(struct intel_dp *intel_dp)
877 {
878 struct intel_display *display = to_intel_display(intel_dp);
879 struct intel_connector *connector = intel_dp->attached_connector;
880 int idle_frames;
881
882 /* Let's use 6 as the minimum to cover all known cases including the
883 * off-by-one issue that HW has in some cases.
884 */
885 idle_frames = max(6, connector->panel.vbt.psr.idle_frames);
886 idle_frames = max(idle_frames, intel_dp->psr.sink_sync_latency + 1);
887
888 if (drm_WARN_ON(display->drm, idle_frames > 0xf))
889 idle_frames = 0xf;
890
891 return idle_frames;
892 }
893
hsw_activate_psr1(struct intel_dp * intel_dp)894 static void hsw_activate_psr1(struct intel_dp *intel_dp)
895 {
896 struct intel_display *display = to_intel_display(intel_dp);
897 struct drm_i915_private *dev_priv = to_i915(display->drm);
898 enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
899 u32 max_sleep_time = 0x1f;
900 u32 val = EDP_PSR_ENABLE;
901
902 val |= EDP_PSR_IDLE_FRAMES(psr_compute_idle_frames(intel_dp));
903
904 if (DISPLAY_VER(display) < 20)
905 val |= EDP_PSR_MAX_SLEEP_TIME(max_sleep_time);
906
907 if (IS_HASWELL(dev_priv))
908 val |= EDP_PSR_MIN_LINK_ENTRY_TIME_8_LINES;
909
910 if (intel_dp->psr.link_standby)
911 val |= EDP_PSR_LINK_STANDBY;
912
913 val |= intel_psr1_get_tp_time(intel_dp);
914
915 if (DISPLAY_VER(display) >= 8)
916 val |= EDP_PSR_CRC_ENABLE;
917
918 if (DISPLAY_VER(display) >= 20)
919 val |= LNL_EDP_PSR_ENTRY_SETUP_FRAMES(intel_dp->psr.entry_setup_frames);
920
921 intel_de_rmw(display, psr_ctl_reg(display, cpu_transcoder),
922 ~EDP_PSR_RESTORE_PSR_ACTIVE_CTX_MASK, val);
923 }
924
intel_psr2_get_tp_time(struct intel_dp * intel_dp)925 static u32 intel_psr2_get_tp_time(struct intel_dp *intel_dp)
926 {
927 struct intel_display *display = to_intel_display(intel_dp);
928 struct intel_connector *connector = intel_dp->attached_connector;
929 u32 val = 0;
930
931 if (display->params.psr_safest_params)
932 return EDP_PSR2_TP2_TIME_2500us;
933
934 if (connector->panel.vbt.psr.psr2_tp2_tp3_wakeup_time_us >= 0 &&
935 connector->panel.vbt.psr.psr2_tp2_tp3_wakeup_time_us <= 50)
936 val |= EDP_PSR2_TP2_TIME_50us;
937 else if (connector->panel.vbt.psr.psr2_tp2_tp3_wakeup_time_us <= 100)
938 val |= EDP_PSR2_TP2_TIME_100us;
939 else if (connector->panel.vbt.psr.psr2_tp2_tp3_wakeup_time_us <= 500)
940 val |= EDP_PSR2_TP2_TIME_500us;
941 else
942 val |= EDP_PSR2_TP2_TIME_2500us;
943
944 return val;
945 }
946
psr2_block_count_lines(struct intel_dp * intel_dp)947 static int psr2_block_count_lines(struct intel_dp *intel_dp)
948 {
949 return intel_dp->alpm_parameters.io_wake_lines < 9 &&
950 intel_dp->alpm_parameters.fast_wake_lines < 9 ? 8 : 12;
951 }
952
psr2_block_count(struct intel_dp * intel_dp)953 static int psr2_block_count(struct intel_dp *intel_dp)
954 {
955 return psr2_block_count_lines(intel_dp) / 4;
956 }
957
frames_before_su_entry(struct intel_dp * intel_dp)958 static u8 frames_before_su_entry(struct intel_dp *intel_dp)
959 {
960 u8 frames_before_su_entry;
961
962 frames_before_su_entry = max_t(u8,
963 intel_dp->psr.sink_sync_latency + 1,
964 2);
965
966 /* Entry setup frames must be at least 1 less than frames before SU entry */
967 if (intel_dp->psr.entry_setup_frames >= frames_before_su_entry)
968 frames_before_su_entry = intel_dp->psr.entry_setup_frames + 1;
969
970 return frames_before_su_entry;
971 }
972
dg2_activate_panel_replay(struct intel_dp * intel_dp)973 static void dg2_activate_panel_replay(struct intel_dp *intel_dp)
974 {
975 struct intel_display *display = to_intel_display(intel_dp);
976 struct intel_psr *psr = &intel_dp->psr;
977 enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
978
979 if (intel_dp_is_edp(intel_dp) && psr->sel_update_enabled) {
980 u32 val = psr->su_region_et_enabled ?
981 LNL_EDP_PSR2_SU_REGION_ET_ENABLE : 0;
982
983 if (intel_dp->psr.req_psr2_sdp_prior_scanline)
984 val |= EDP_PSR2_SU_SDP_SCANLINE;
985
986 intel_de_write(display, EDP_PSR2_CTL(display, cpu_transcoder),
987 val);
988 }
989
990 intel_de_rmw(display,
991 PSR2_MAN_TRK_CTL(display, intel_dp->psr.transcoder),
992 0, ADLP_PSR2_MAN_TRK_CTL_SF_CONTINUOS_FULL_FRAME);
993
994 intel_de_rmw(display, TRANS_DP2_CTL(intel_dp->psr.transcoder), 0,
995 TRANS_DP2_PANEL_REPLAY_ENABLE);
996 }
997
hsw_activate_psr2(struct intel_dp * intel_dp)998 static void hsw_activate_psr2(struct intel_dp *intel_dp)
999 {
1000 struct intel_display *display = to_intel_display(intel_dp);
1001 struct drm_i915_private *dev_priv = to_i915(display->drm);
1002 enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
1003 u32 val = EDP_PSR2_ENABLE;
1004 u32 psr_val = 0;
1005
1006 val |= EDP_PSR2_IDLE_FRAMES(psr_compute_idle_frames(intel_dp));
1007
1008 if (DISPLAY_VER(display) < 14 && !IS_ALDERLAKE_P(dev_priv))
1009 val |= EDP_SU_TRACK_ENABLE;
1010
1011 if (DISPLAY_VER(display) >= 10 && DISPLAY_VER(display) < 13)
1012 val |= EDP_Y_COORDINATE_ENABLE;
1013
1014 val |= EDP_PSR2_FRAME_BEFORE_SU(frames_before_su_entry(intel_dp));
1015
1016 val |= intel_psr2_get_tp_time(intel_dp);
1017
1018 if (DISPLAY_VER(display) >= 12 && DISPLAY_VER(display) < 20) {
1019 if (psr2_block_count(intel_dp) > 2)
1020 val |= TGL_EDP_PSR2_BLOCK_COUNT_NUM_3;
1021 else
1022 val |= TGL_EDP_PSR2_BLOCK_COUNT_NUM_2;
1023 }
1024
1025 /* Wa_22012278275:adl-p */
1026 if (IS_ALDERLAKE_P(dev_priv) && IS_DISPLAY_STEP(display, STEP_A0, STEP_E0)) {
1027 static const u8 map[] = {
1028 2, /* 5 lines */
1029 1, /* 6 lines */
1030 0, /* 7 lines */
1031 3, /* 8 lines */
1032 6, /* 9 lines */
1033 5, /* 10 lines */
1034 4, /* 11 lines */
1035 7, /* 12 lines */
1036 };
1037 /*
1038 * Still using the default IO_BUFFER_WAKE and FAST_WAKE, see
1039 * comments bellow for more information
1040 */
1041 int tmp;
1042
1043 tmp = map[intel_dp->alpm_parameters.io_wake_lines -
1044 TGL_EDP_PSR2_IO_BUFFER_WAKE_MIN_LINES];
1045 val |= TGL_EDP_PSR2_IO_BUFFER_WAKE(tmp + TGL_EDP_PSR2_IO_BUFFER_WAKE_MIN_LINES);
1046
1047 tmp = map[intel_dp->alpm_parameters.fast_wake_lines - TGL_EDP_PSR2_FAST_WAKE_MIN_LINES];
1048 val |= TGL_EDP_PSR2_FAST_WAKE(tmp + TGL_EDP_PSR2_FAST_WAKE_MIN_LINES);
1049 } else if (DISPLAY_VER(display) >= 20) {
1050 val |= LNL_EDP_PSR2_IO_BUFFER_WAKE(intel_dp->alpm_parameters.io_wake_lines);
1051 } else if (DISPLAY_VER(display) >= 12) {
1052 val |= TGL_EDP_PSR2_IO_BUFFER_WAKE(intel_dp->alpm_parameters.io_wake_lines);
1053 val |= TGL_EDP_PSR2_FAST_WAKE(intel_dp->alpm_parameters.fast_wake_lines);
1054 } else if (DISPLAY_VER(display) >= 9) {
1055 val |= EDP_PSR2_IO_BUFFER_WAKE(intel_dp->alpm_parameters.io_wake_lines);
1056 val |= EDP_PSR2_FAST_WAKE(intel_dp->alpm_parameters.fast_wake_lines);
1057 }
1058
1059 if (intel_dp->psr.req_psr2_sdp_prior_scanline)
1060 val |= EDP_PSR2_SU_SDP_SCANLINE;
1061
1062 if (DISPLAY_VER(display) >= 20)
1063 psr_val |= LNL_EDP_PSR_ENTRY_SETUP_FRAMES(intel_dp->psr.entry_setup_frames);
1064
1065 if (intel_dp->psr.psr2_sel_fetch_enabled) {
1066 u32 tmp;
1067
1068 tmp = intel_de_read(display,
1069 PSR2_MAN_TRK_CTL(display, cpu_transcoder));
1070 drm_WARN_ON(display->drm, !(tmp & PSR2_MAN_TRK_CTL_ENABLE));
1071 } else if (HAS_PSR2_SEL_FETCH(display)) {
1072 intel_de_write(display,
1073 PSR2_MAN_TRK_CTL(display, cpu_transcoder), 0);
1074 }
1075
1076 if (intel_dp->psr.su_region_et_enabled)
1077 val |= LNL_EDP_PSR2_SU_REGION_ET_ENABLE;
1078
1079 /*
1080 * PSR2 HW is incorrectly using EDP_PSR_TP1_TP3_SEL and BSpec is
1081 * recommending keep this bit unset while PSR2 is enabled.
1082 */
1083 intel_de_write(display, psr_ctl_reg(display, cpu_transcoder), psr_val);
1084
1085 intel_de_write(display, EDP_PSR2_CTL(display, cpu_transcoder), val);
1086 }
1087
1088 static bool
transcoder_has_psr2(struct intel_display * display,enum transcoder cpu_transcoder)1089 transcoder_has_psr2(struct intel_display *display, enum transcoder cpu_transcoder)
1090 {
1091 struct drm_i915_private *dev_priv = to_i915(display->drm);
1092
1093 if (IS_ALDERLAKE_P(dev_priv) || DISPLAY_VER(display) >= 14)
1094 return cpu_transcoder == TRANSCODER_A || cpu_transcoder == TRANSCODER_B;
1095 else if (DISPLAY_VER(display) >= 12)
1096 return cpu_transcoder == TRANSCODER_A;
1097 else if (DISPLAY_VER(display) >= 9)
1098 return cpu_transcoder == TRANSCODER_EDP;
1099 else
1100 return false;
1101 }
1102
intel_get_frame_time_us(const struct intel_crtc_state * crtc_state)1103 static u32 intel_get_frame_time_us(const struct intel_crtc_state *crtc_state)
1104 {
1105 if (!crtc_state->hw.active)
1106 return 0;
1107
1108 return DIV_ROUND_UP(1000 * 1000,
1109 drm_mode_vrefresh(&crtc_state->hw.adjusted_mode));
1110 }
1111
psr2_program_idle_frames(struct intel_dp * intel_dp,u32 idle_frames)1112 static void psr2_program_idle_frames(struct intel_dp *intel_dp,
1113 u32 idle_frames)
1114 {
1115 struct intel_display *display = to_intel_display(intel_dp);
1116 enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
1117
1118 intel_de_rmw(display, EDP_PSR2_CTL(display, cpu_transcoder),
1119 EDP_PSR2_IDLE_FRAMES_MASK,
1120 EDP_PSR2_IDLE_FRAMES(idle_frames));
1121 }
1122
tgl_psr2_enable_dc3co(struct intel_dp * intel_dp)1123 static void tgl_psr2_enable_dc3co(struct intel_dp *intel_dp)
1124 {
1125 struct intel_display *display = to_intel_display(intel_dp);
1126 struct drm_i915_private *dev_priv = to_i915(display->drm);
1127
1128 psr2_program_idle_frames(intel_dp, 0);
1129 intel_display_power_set_target_dc_state(dev_priv, DC_STATE_EN_DC3CO);
1130 }
1131
tgl_psr2_disable_dc3co(struct intel_dp * intel_dp)1132 static void tgl_psr2_disable_dc3co(struct intel_dp *intel_dp)
1133 {
1134 struct intel_display *display = to_intel_display(intel_dp);
1135 struct drm_i915_private *dev_priv = to_i915(display->drm);
1136
1137 intel_display_power_set_target_dc_state(dev_priv, DC_STATE_EN_UPTO_DC6);
1138 psr2_program_idle_frames(intel_dp, psr_compute_idle_frames(intel_dp));
1139 }
1140
tgl_dc3co_disable_work(struct work_struct * work)1141 static void tgl_dc3co_disable_work(struct work_struct *work)
1142 {
1143 struct intel_dp *intel_dp =
1144 container_of(work, typeof(*intel_dp), psr.dc3co_work.work);
1145
1146 mutex_lock(&intel_dp->psr.lock);
1147 /* If delayed work is pending, it is not idle */
1148 if (delayed_work_pending(&intel_dp->psr.dc3co_work))
1149 goto unlock;
1150
1151 tgl_psr2_disable_dc3co(intel_dp);
1152 unlock:
1153 mutex_unlock(&intel_dp->psr.lock);
1154 }
1155
tgl_disallow_dc3co_on_psr2_exit(struct intel_dp * intel_dp)1156 static void tgl_disallow_dc3co_on_psr2_exit(struct intel_dp *intel_dp)
1157 {
1158 if (!intel_dp->psr.dc3co_exitline)
1159 return;
1160
1161 cancel_delayed_work(&intel_dp->psr.dc3co_work);
1162 /* Before PSR2 exit disallow dc3co*/
1163 tgl_psr2_disable_dc3co(intel_dp);
1164 }
1165
1166 static bool
dc3co_is_pipe_port_compatible(struct intel_dp * intel_dp,struct intel_crtc_state * crtc_state)1167 dc3co_is_pipe_port_compatible(struct intel_dp *intel_dp,
1168 struct intel_crtc_state *crtc_state)
1169 {
1170 struct intel_display *display = to_intel_display(intel_dp);
1171 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
1172 enum pipe pipe = to_intel_crtc(crtc_state->uapi.crtc)->pipe;
1173 struct drm_i915_private *dev_priv = to_i915(display->drm);
1174 enum port port = dig_port->base.port;
1175
1176 if (IS_ALDERLAKE_P(dev_priv) || DISPLAY_VER(display) >= 14)
1177 return pipe <= PIPE_B && port <= PORT_B;
1178 else
1179 return pipe == PIPE_A && port == PORT_A;
1180 }
1181
1182 static void
tgl_dc3co_exitline_compute_config(struct intel_dp * intel_dp,struct intel_crtc_state * crtc_state)1183 tgl_dc3co_exitline_compute_config(struct intel_dp *intel_dp,
1184 struct intel_crtc_state *crtc_state)
1185 {
1186 struct intel_display *display = to_intel_display(intel_dp);
1187 struct drm_i915_private *dev_priv = to_i915(display->drm);
1188 const u32 crtc_vdisplay = crtc_state->uapi.adjusted_mode.crtc_vdisplay;
1189 struct i915_power_domains *power_domains = &display->power.domains;
1190 u32 exit_scanlines;
1191
1192 /*
1193 * FIXME: Due to the changed sequence of activating/deactivating DC3CO,
1194 * disable DC3CO until the changed dc3co activating/deactivating sequence
1195 * is applied. B.Specs:49196
1196 */
1197 return;
1198
1199 /*
1200 * DMC's DC3CO exit mechanism has an issue with Selective Fecth
1201 * TODO: when the issue is addressed, this restriction should be removed.
1202 */
1203 if (crtc_state->enable_psr2_sel_fetch)
1204 return;
1205
1206 if (!(power_domains->allowed_dc_mask & DC_STATE_EN_DC3CO))
1207 return;
1208
1209 if (!dc3co_is_pipe_port_compatible(intel_dp, crtc_state))
1210 return;
1211
1212 /* Wa_16011303918:adl-p */
1213 if (IS_ALDERLAKE_P(dev_priv) && IS_DISPLAY_STEP(display, STEP_A0, STEP_B0))
1214 return;
1215
1216 /*
1217 * DC3CO Exit time 200us B.Spec 49196
1218 * PSR2 transcoder Early Exit scanlines = ROUNDUP(200 / line time) + 1
1219 */
1220 exit_scanlines =
1221 intel_usecs_to_scanlines(&crtc_state->uapi.adjusted_mode, 200) + 1;
1222
1223 if (drm_WARN_ON(display->drm, exit_scanlines > crtc_vdisplay))
1224 return;
1225
1226 crtc_state->dc3co_exitline = crtc_vdisplay - exit_scanlines;
1227 }
1228
intel_psr2_sel_fetch_config_valid(struct intel_dp * intel_dp,struct intel_crtc_state * crtc_state)1229 static bool intel_psr2_sel_fetch_config_valid(struct intel_dp *intel_dp,
1230 struct intel_crtc_state *crtc_state)
1231 {
1232 struct intel_display *display = to_intel_display(intel_dp);
1233
1234 if (!display->params.enable_psr2_sel_fetch &&
1235 intel_dp->psr.debug != I915_PSR_DEBUG_ENABLE_SEL_FETCH) {
1236 drm_dbg_kms(display->drm,
1237 "PSR2 sel fetch not enabled, disabled by parameter\n");
1238 return false;
1239 }
1240
1241 if (crtc_state->uapi.async_flip) {
1242 drm_dbg_kms(display->drm,
1243 "PSR2 sel fetch not enabled, async flip enabled\n");
1244 return false;
1245 }
1246
1247 return crtc_state->enable_psr2_sel_fetch = true;
1248 }
1249
psr2_granularity_check(struct intel_dp * intel_dp,struct intel_crtc_state * crtc_state)1250 static bool psr2_granularity_check(struct intel_dp *intel_dp,
1251 struct intel_crtc_state *crtc_state)
1252 {
1253 struct intel_display *display = to_intel_display(intel_dp);
1254 struct drm_i915_private *dev_priv = to_i915(display->drm);
1255 const struct drm_dsc_config *vdsc_cfg = &crtc_state->dsc.config;
1256 const int crtc_hdisplay = crtc_state->hw.adjusted_mode.crtc_hdisplay;
1257 const int crtc_vdisplay = crtc_state->hw.adjusted_mode.crtc_vdisplay;
1258 u16 y_granularity = 0;
1259
1260 /* PSR2 HW only send full lines so we only need to validate the width */
1261 if (crtc_hdisplay % intel_dp->psr.su_w_granularity)
1262 return false;
1263
1264 if (crtc_vdisplay % intel_dp->psr.su_y_granularity)
1265 return false;
1266
1267 /* HW tracking is only aligned to 4 lines */
1268 if (!crtc_state->enable_psr2_sel_fetch)
1269 return intel_dp->psr.su_y_granularity == 4;
1270
1271 /*
1272 * adl_p and mtl platforms have 1 line granularity.
1273 * For other platforms with SW tracking we can adjust the y coordinates
1274 * to match sink requirement if multiple of 4.
1275 */
1276 if (IS_ALDERLAKE_P(dev_priv) || DISPLAY_VER(display) >= 14)
1277 y_granularity = intel_dp->psr.su_y_granularity;
1278 else if (intel_dp->psr.su_y_granularity <= 2)
1279 y_granularity = 4;
1280 else if ((intel_dp->psr.su_y_granularity % 4) == 0)
1281 y_granularity = intel_dp->psr.su_y_granularity;
1282
1283 if (y_granularity == 0 || crtc_vdisplay % y_granularity)
1284 return false;
1285
1286 if (crtc_state->dsc.compression_enable &&
1287 vdsc_cfg->slice_height % y_granularity)
1288 return false;
1289
1290 crtc_state->su_y_granularity = y_granularity;
1291 return true;
1292 }
1293
_compute_psr2_sdp_prior_scanline_indication(struct intel_dp * intel_dp,struct intel_crtc_state * crtc_state)1294 static bool _compute_psr2_sdp_prior_scanline_indication(struct intel_dp *intel_dp,
1295 struct intel_crtc_state *crtc_state)
1296 {
1297 struct intel_display *display = to_intel_display(intel_dp);
1298 const struct drm_display_mode *adjusted_mode = &crtc_state->uapi.adjusted_mode;
1299 u32 hblank_total, hblank_ns, req_ns;
1300
1301 hblank_total = adjusted_mode->crtc_hblank_end - adjusted_mode->crtc_hblank_start;
1302 hblank_ns = div_u64(1000000ULL * hblank_total, adjusted_mode->crtc_clock);
1303
1304 /* From spec: ((60 / number of lanes) + 11) * 1000 / symbol clock frequency MHz */
1305 req_ns = ((60 / crtc_state->lane_count) + 11) * 1000 / (crtc_state->port_clock / 1000);
1306
1307 if ((hblank_ns - req_ns) > 100)
1308 return true;
1309
1310 /* Not supported <13 / Wa_22012279113:adl-p */
1311 if (DISPLAY_VER(display) < 14 || intel_dp->edp_dpcd[0] < DP_EDP_14b)
1312 return false;
1313
1314 crtc_state->req_psr2_sdp_prior_scanline = true;
1315 return true;
1316 }
1317
intel_psr_entry_setup_frames(struct intel_dp * intel_dp,const struct drm_display_mode * adjusted_mode)1318 static int intel_psr_entry_setup_frames(struct intel_dp *intel_dp,
1319 const struct drm_display_mode *adjusted_mode)
1320 {
1321 struct intel_display *display = to_intel_display(intel_dp);
1322 int psr_setup_time = drm_dp_psr_setup_time(intel_dp->psr_dpcd);
1323 int entry_setup_frames = 0;
1324
1325 if (psr_setup_time < 0) {
1326 drm_dbg_kms(display->drm,
1327 "PSR condition failed: Invalid PSR setup time (0x%02x)\n",
1328 intel_dp->psr_dpcd[1]);
1329 return -ETIME;
1330 }
1331
1332 if (intel_usecs_to_scanlines(adjusted_mode, psr_setup_time) >
1333 adjusted_mode->crtc_vtotal - adjusted_mode->crtc_vdisplay - 1) {
1334 if (DISPLAY_VER(display) >= 20) {
1335 /* setup entry frames can be up to 3 frames */
1336 entry_setup_frames = 1;
1337 drm_dbg_kms(display->drm,
1338 "PSR setup entry frames %d\n",
1339 entry_setup_frames);
1340 } else {
1341 drm_dbg_kms(display->drm,
1342 "PSR condition failed: PSR setup time (%d us) too long\n",
1343 psr_setup_time);
1344 return -ETIME;
1345 }
1346 }
1347
1348 return entry_setup_frames;
1349 }
1350
wake_lines_fit_into_vblank(struct intel_dp * intel_dp,const struct intel_crtc_state * crtc_state,bool aux_less)1351 static bool wake_lines_fit_into_vblank(struct intel_dp *intel_dp,
1352 const struct intel_crtc_state *crtc_state,
1353 bool aux_less)
1354 {
1355 struct intel_display *display = to_intel_display(intel_dp);
1356 int vblank = crtc_state->hw.adjusted_mode.crtc_vblank_end -
1357 crtc_state->hw.adjusted_mode.crtc_vblank_start;
1358 int wake_lines;
1359
1360 if (aux_less)
1361 wake_lines = intel_dp->alpm_parameters.aux_less_wake_lines;
1362 else
1363 wake_lines = DISPLAY_VER(display) < 20 ?
1364 psr2_block_count_lines(intel_dp) :
1365 intel_dp->alpm_parameters.io_wake_lines;
1366
1367 if (crtc_state->req_psr2_sdp_prior_scanline)
1368 vblank -= 1;
1369
1370 /* Vblank >= PSR2_CTL Block Count Number maximum line count */
1371 if (vblank < wake_lines)
1372 return false;
1373
1374 return true;
1375 }
1376
alpm_config_valid(struct intel_dp * intel_dp,const struct intel_crtc_state * crtc_state,bool aux_less)1377 static bool alpm_config_valid(struct intel_dp *intel_dp,
1378 const struct intel_crtc_state *crtc_state,
1379 bool aux_less)
1380 {
1381 struct intel_display *display = to_intel_display(intel_dp);
1382
1383 if (!intel_alpm_compute_params(intel_dp, crtc_state)) {
1384 drm_dbg_kms(display->drm,
1385 "PSR2/Panel Replay not enabled, Unable to use long enough wake times\n");
1386 return false;
1387 }
1388
1389 if (!wake_lines_fit_into_vblank(intel_dp, crtc_state, aux_less)) {
1390 drm_dbg_kms(display->drm,
1391 "PSR2/Panel Replay not enabled, too short vblank time\n");
1392 return false;
1393 }
1394
1395 return true;
1396 }
1397
intel_psr2_config_valid(struct intel_dp * intel_dp,struct intel_crtc_state * crtc_state)1398 static bool intel_psr2_config_valid(struct intel_dp *intel_dp,
1399 struct intel_crtc_state *crtc_state)
1400 {
1401 struct intel_display *display = to_intel_display(intel_dp);
1402 struct drm_i915_private *dev_priv = to_i915(display->drm);
1403 int crtc_hdisplay = crtc_state->hw.adjusted_mode.crtc_hdisplay;
1404 int crtc_vdisplay = crtc_state->hw.adjusted_mode.crtc_vdisplay;
1405 int psr_max_h = 0, psr_max_v = 0, max_bpp = 0;
1406
1407 if (!intel_dp->psr.sink_psr2_support)
1408 return false;
1409
1410 /* JSL and EHL only supports eDP 1.3 */
1411 if (IS_JASPERLAKE(dev_priv) || IS_ELKHARTLAKE(dev_priv)) {
1412 drm_dbg_kms(display->drm, "PSR2 not supported by phy\n");
1413 return false;
1414 }
1415
1416 /* Wa_16011181250 */
1417 if (IS_ROCKETLAKE(dev_priv) || IS_ALDERLAKE_S(dev_priv) ||
1418 IS_DG2(dev_priv)) {
1419 drm_dbg_kms(display->drm,
1420 "PSR2 is defeatured for this platform\n");
1421 return false;
1422 }
1423
1424 if (IS_ALDERLAKE_P(dev_priv) && IS_DISPLAY_STEP(display, STEP_A0, STEP_B0)) {
1425 drm_dbg_kms(display->drm,
1426 "PSR2 not completely functional in this stepping\n");
1427 return false;
1428 }
1429
1430 if (!transcoder_has_psr2(display, crtc_state->cpu_transcoder)) {
1431 drm_dbg_kms(display->drm,
1432 "PSR2 not supported in transcoder %s\n",
1433 transcoder_name(crtc_state->cpu_transcoder));
1434 return false;
1435 }
1436
1437 /*
1438 * DSC and PSR2 cannot be enabled simultaneously. If a requested
1439 * resolution requires DSC to be enabled, priority is given to DSC
1440 * over PSR2.
1441 */
1442 if (crtc_state->dsc.compression_enable &&
1443 (DISPLAY_VER(display) < 14 && !IS_ALDERLAKE_P(dev_priv))) {
1444 drm_dbg_kms(display->drm,
1445 "PSR2 cannot be enabled since DSC is enabled\n");
1446 return false;
1447 }
1448
1449 if (DISPLAY_VER(display) >= 12) {
1450 psr_max_h = 5120;
1451 psr_max_v = 3200;
1452 max_bpp = 30;
1453 } else if (DISPLAY_VER(display) >= 10) {
1454 psr_max_h = 4096;
1455 psr_max_v = 2304;
1456 max_bpp = 24;
1457 } else if (DISPLAY_VER(display) == 9) {
1458 psr_max_h = 3640;
1459 psr_max_v = 2304;
1460 max_bpp = 24;
1461 }
1462
1463 if (crtc_state->pipe_bpp > max_bpp) {
1464 drm_dbg_kms(display->drm,
1465 "PSR2 not enabled, pipe bpp %d > max supported %d\n",
1466 crtc_state->pipe_bpp, max_bpp);
1467 return false;
1468 }
1469
1470 /* Wa_16011303918:adl-p */
1471 if (crtc_state->vrr.enable &&
1472 IS_ALDERLAKE_P(dev_priv) && IS_DISPLAY_STEP(display, STEP_A0, STEP_B0)) {
1473 drm_dbg_kms(display->drm,
1474 "PSR2 not enabled, not compatible with HW stepping + VRR\n");
1475 return false;
1476 }
1477
1478 if (!alpm_config_valid(intel_dp, crtc_state, false))
1479 return false;
1480
1481 if (!crtc_state->enable_psr2_sel_fetch &&
1482 (crtc_hdisplay > psr_max_h || crtc_vdisplay > psr_max_v)) {
1483 drm_dbg_kms(display->drm,
1484 "PSR2 not enabled, resolution %dx%d > max supported %dx%d\n",
1485 crtc_hdisplay, crtc_vdisplay,
1486 psr_max_h, psr_max_v);
1487 return false;
1488 }
1489
1490 tgl_dc3co_exitline_compute_config(intel_dp, crtc_state);
1491
1492 return true;
1493 }
1494
intel_sel_update_config_valid(struct intel_dp * intel_dp,struct intel_crtc_state * crtc_state)1495 static bool intel_sel_update_config_valid(struct intel_dp *intel_dp,
1496 struct intel_crtc_state *crtc_state)
1497 {
1498 struct intel_display *display = to_intel_display(intel_dp);
1499
1500 if (HAS_PSR2_SEL_FETCH(display) &&
1501 !intel_psr2_sel_fetch_config_valid(intel_dp, crtc_state) &&
1502 !HAS_PSR_HW_TRACKING(display)) {
1503 drm_dbg_kms(display->drm,
1504 "Selective update not enabled, selective fetch not valid and no HW tracking available\n");
1505 goto unsupported;
1506 }
1507
1508 if (!psr2_global_enabled(intel_dp)) {
1509 drm_dbg_kms(display->drm,
1510 "Selective update disabled by flag\n");
1511 goto unsupported;
1512 }
1513
1514 if (!crtc_state->has_panel_replay && !intel_psr2_config_valid(intel_dp, crtc_state))
1515 goto unsupported;
1516
1517 if (!_compute_psr2_sdp_prior_scanline_indication(intel_dp, crtc_state)) {
1518 drm_dbg_kms(display->drm,
1519 "Selective update not enabled, SDP indication do not fit in hblank\n");
1520 goto unsupported;
1521 }
1522
1523 if (crtc_state->has_panel_replay && (DISPLAY_VER(display) < 14 ||
1524 !intel_dp->psr.sink_panel_replay_su_support))
1525 goto unsupported;
1526
1527 if (crtc_state->crc_enabled) {
1528 drm_dbg_kms(display->drm,
1529 "Selective update not enabled because it would inhibit pipe CRC calculation\n");
1530 goto unsupported;
1531 }
1532
1533 if (!psr2_granularity_check(intel_dp, crtc_state)) {
1534 drm_dbg_kms(display->drm,
1535 "Selective update not enabled, SU granularity not compatible\n");
1536 goto unsupported;
1537 }
1538
1539 crtc_state->enable_psr2_su_region_et =
1540 psr2_su_region_et_valid(intel_dp, crtc_state->has_panel_replay);
1541
1542 return true;
1543
1544 unsupported:
1545 crtc_state->enable_psr2_sel_fetch = false;
1546 return false;
1547 }
1548
_psr_compute_config(struct intel_dp * intel_dp,struct intel_crtc_state * crtc_state)1549 static bool _psr_compute_config(struct intel_dp *intel_dp,
1550 struct intel_crtc_state *crtc_state)
1551 {
1552 struct intel_display *display = to_intel_display(intel_dp);
1553 const struct drm_display_mode *adjusted_mode = &crtc_state->hw.adjusted_mode;
1554 int entry_setup_frames;
1555
1556 /*
1557 * Current PSR panels don't work reliably with VRR enabled
1558 * So if VRR is enabled, do not enable PSR.
1559 */
1560 if (crtc_state->vrr.enable)
1561 return false;
1562
1563 if (!CAN_PSR(intel_dp))
1564 return false;
1565
1566 entry_setup_frames = intel_psr_entry_setup_frames(intel_dp, adjusted_mode);
1567
1568 if (entry_setup_frames >= 0) {
1569 intel_dp->psr.entry_setup_frames = entry_setup_frames;
1570 } else {
1571 drm_dbg_kms(display->drm,
1572 "PSR condition failed: PSR setup timing not met\n");
1573 return false;
1574 }
1575
1576 return true;
1577 }
1578
1579 static bool
_panel_replay_compute_config(struct intel_dp * intel_dp,const struct intel_crtc_state * crtc_state,const struct drm_connector_state * conn_state)1580 _panel_replay_compute_config(struct intel_dp *intel_dp,
1581 const struct intel_crtc_state *crtc_state,
1582 const struct drm_connector_state *conn_state)
1583 {
1584 struct intel_display *display = to_intel_display(intel_dp);
1585 struct intel_connector *connector =
1586 to_intel_connector(conn_state->connector);
1587 struct intel_hdcp *hdcp = &connector->hdcp;
1588
1589 if (!CAN_PANEL_REPLAY(intel_dp))
1590 return false;
1591
1592 if (!panel_replay_global_enabled(intel_dp)) {
1593 drm_dbg_kms(display->drm, "Panel Replay disabled by flag\n");
1594 return false;
1595 }
1596
1597 if (!intel_dp_is_edp(intel_dp))
1598 return true;
1599
1600 /* Remaining checks are for eDP only */
1601
1602 /* 128b/132b Panel Replay is not supported on eDP */
1603 if (intel_dp_is_uhbr(crtc_state)) {
1604 drm_dbg_kms(display->drm,
1605 "Panel Replay is not supported with 128b/132b\n");
1606 return false;
1607 }
1608
1609 /* HW will not allow Panel Replay on eDP when HDCP enabled */
1610 if (conn_state->content_protection ==
1611 DRM_MODE_CONTENT_PROTECTION_DESIRED ||
1612 (conn_state->content_protection ==
1613 DRM_MODE_CONTENT_PROTECTION_ENABLED && hdcp->value ==
1614 DRM_MODE_CONTENT_PROTECTION_UNDESIRED)) {
1615 drm_dbg_kms(display->drm,
1616 "Panel Replay is not supported with HDCP\n");
1617 return false;
1618 }
1619
1620 if (!alpm_config_valid(intel_dp, crtc_state, true))
1621 return false;
1622
1623 if (crtc_state->crc_enabled) {
1624 drm_dbg_kms(display->drm,
1625 "Panel Replay not enabled because it would inhibit pipe CRC calculation\n");
1626 return false;
1627 }
1628
1629 return true;
1630 }
1631
intel_psr_compute_config(struct intel_dp * intel_dp,struct intel_crtc_state * crtc_state,struct drm_connector_state * conn_state)1632 void intel_psr_compute_config(struct intel_dp *intel_dp,
1633 struct intel_crtc_state *crtc_state,
1634 struct drm_connector_state *conn_state)
1635 {
1636 struct intel_display *display = to_intel_display(intel_dp);
1637 const struct drm_display_mode *adjusted_mode = &crtc_state->hw.adjusted_mode;
1638
1639 if (!psr_global_enabled(intel_dp)) {
1640 drm_dbg_kms(display->drm, "PSR disabled by flag\n");
1641 return;
1642 }
1643
1644 if (intel_dp->psr.sink_not_reliable) {
1645 drm_dbg_kms(display->drm,
1646 "PSR sink implementation is not reliable\n");
1647 return;
1648 }
1649
1650 if (adjusted_mode->flags & DRM_MODE_FLAG_INTERLACE) {
1651 drm_dbg_kms(display->drm,
1652 "PSR condition failed: Interlaced mode enabled\n");
1653 return;
1654 }
1655
1656 /*
1657 * FIXME figure out what is wrong with PSR+joiner and
1658 * fix it. Presumably something related to the fact that
1659 * PSR is a transcoder level feature.
1660 */
1661 if (crtc_state->joiner_pipes) {
1662 drm_dbg_kms(display->drm,
1663 "PSR disabled due to joiner\n");
1664 return;
1665 }
1666
1667 crtc_state->has_panel_replay = _panel_replay_compute_config(intel_dp,
1668 crtc_state,
1669 conn_state);
1670
1671 crtc_state->has_psr = crtc_state->has_panel_replay ? true :
1672 _psr_compute_config(intel_dp, crtc_state);
1673
1674 if (!crtc_state->has_psr)
1675 return;
1676
1677 crtc_state->has_sel_update = intel_sel_update_config_valid(intel_dp, crtc_state);
1678 }
1679
intel_psr_get_config(struct intel_encoder * encoder,struct intel_crtc_state * pipe_config)1680 void intel_psr_get_config(struct intel_encoder *encoder,
1681 struct intel_crtc_state *pipe_config)
1682 {
1683 struct intel_display *display = to_intel_display(encoder);
1684 struct intel_digital_port *dig_port = enc_to_dig_port(encoder);
1685 enum transcoder cpu_transcoder = pipe_config->cpu_transcoder;
1686 struct intel_dp *intel_dp;
1687 u32 val;
1688
1689 if (!dig_port)
1690 return;
1691
1692 intel_dp = &dig_port->dp;
1693 if (!(CAN_PSR(intel_dp) || CAN_PANEL_REPLAY(intel_dp)))
1694 return;
1695
1696 mutex_lock(&intel_dp->psr.lock);
1697 if (!intel_dp->psr.enabled)
1698 goto unlock;
1699
1700 if (intel_dp->psr.panel_replay_enabled) {
1701 pipe_config->has_psr = pipe_config->has_panel_replay = true;
1702 } else {
1703 /*
1704 * Not possible to read EDP_PSR/PSR2_CTL registers as it is
1705 * enabled/disabled because of frontbuffer tracking and others.
1706 */
1707 pipe_config->has_psr = true;
1708 }
1709
1710 pipe_config->has_sel_update = intel_dp->psr.sel_update_enabled;
1711 pipe_config->infoframes.enable |= intel_hdmi_infoframe_enable(DP_SDP_VSC);
1712
1713 if (!intel_dp->psr.sel_update_enabled)
1714 goto unlock;
1715
1716 if (HAS_PSR2_SEL_FETCH(display)) {
1717 val = intel_de_read(display,
1718 PSR2_MAN_TRK_CTL(display, cpu_transcoder));
1719 if (val & PSR2_MAN_TRK_CTL_ENABLE)
1720 pipe_config->enable_psr2_sel_fetch = true;
1721 }
1722
1723 pipe_config->enable_psr2_su_region_et = intel_dp->psr.su_region_et_enabled;
1724
1725 if (DISPLAY_VER(display) >= 12) {
1726 val = intel_de_read(display,
1727 TRANS_EXITLINE(display, cpu_transcoder));
1728 pipe_config->dc3co_exitline = REG_FIELD_GET(EXITLINE_MASK, val);
1729 }
1730 unlock:
1731 mutex_unlock(&intel_dp->psr.lock);
1732 }
1733
intel_psr_activate(struct intel_dp * intel_dp)1734 static void intel_psr_activate(struct intel_dp *intel_dp)
1735 {
1736 struct intel_display *display = to_intel_display(intel_dp);
1737 enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
1738
1739 drm_WARN_ON(display->drm,
1740 transcoder_has_psr2(display, cpu_transcoder) &&
1741 intel_de_read(display, EDP_PSR2_CTL(display, cpu_transcoder)) & EDP_PSR2_ENABLE);
1742
1743 drm_WARN_ON(display->drm,
1744 intel_de_read(display, psr_ctl_reg(display, cpu_transcoder)) & EDP_PSR_ENABLE);
1745
1746 drm_WARN_ON(display->drm, intel_dp->psr.active);
1747
1748 lockdep_assert_held(&intel_dp->psr.lock);
1749
1750 /* psr1, psr2 and panel-replay are mutually exclusive.*/
1751 if (intel_dp->psr.panel_replay_enabled)
1752 dg2_activate_panel_replay(intel_dp);
1753 else if (intel_dp->psr.sel_update_enabled)
1754 hsw_activate_psr2(intel_dp);
1755 else
1756 hsw_activate_psr1(intel_dp);
1757
1758 intel_dp->psr.active = true;
1759 }
1760
wa_16013835468_bit_get(struct intel_dp * intel_dp)1761 static u32 wa_16013835468_bit_get(struct intel_dp *intel_dp)
1762 {
1763 switch (intel_dp->psr.pipe) {
1764 case PIPE_A:
1765 return LATENCY_REPORTING_REMOVED_PIPE_A;
1766 case PIPE_B:
1767 return LATENCY_REPORTING_REMOVED_PIPE_B;
1768 case PIPE_C:
1769 return LATENCY_REPORTING_REMOVED_PIPE_C;
1770 case PIPE_D:
1771 return LATENCY_REPORTING_REMOVED_PIPE_D;
1772 default:
1773 MISSING_CASE(intel_dp->psr.pipe);
1774 return 0;
1775 }
1776 }
1777
1778 /*
1779 * Wa_16013835468
1780 * Wa_14015648006
1781 */
wm_optimization_wa(struct intel_dp * intel_dp,const struct intel_crtc_state * crtc_state)1782 static void wm_optimization_wa(struct intel_dp *intel_dp,
1783 const struct intel_crtc_state *crtc_state)
1784 {
1785 struct intel_display *display = to_intel_display(intel_dp);
1786 bool set_wa_bit = false;
1787
1788 /* Wa_14015648006 */
1789 if (IS_DISPLAY_VER(display, 11, 14))
1790 set_wa_bit |= crtc_state->wm_level_disabled;
1791
1792 /* Wa_16013835468 */
1793 if (DISPLAY_VER(display) == 12)
1794 set_wa_bit |= crtc_state->hw.adjusted_mode.crtc_vblank_start !=
1795 crtc_state->hw.adjusted_mode.crtc_vdisplay;
1796
1797 if (set_wa_bit)
1798 intel_de_rmw(display, GEN8_CHICKEN_DCPR_1,
1799 0, wa_16013835468_bit_get(intel_dp));
1800 else
1801 intel_de_rmw(display, GEN8_CHICKEN_DCPR_1,
1802 wa_16013835468_bit_get(intel_dp), 0);
1803 }
1804
intel_psr_enable_source(struct intel_dp * intel_dp,const struct intel_crtc_state * crtc_state)1805 static void intel_psr_enable_source(struct intel_dp *intel_dp,
1806 const struct intel_crtc_state *crtc_state)
1807 {
1808 struct intel_display *display = to_intel_display(intel_dp);
1809 struct drm_i915_private *dev_priv = to_i915(display->drm);
1810 enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
1811 u32 mask = 0;
1812
1813 /*
1814 * Only HSW and BDW have PSR AUX registers that need to be setup.
1815 * SKL+ use hardcoded values PSR AUX transactions
1816 */
1817 if (DISPLAY_VER(display) < 9)
1818 hsw_psr_setup_aux(intel_dp);
1819
1820 /*
1821 * Per Spec: Avoid continuous PSR exit by masking MEMUP and HPD also
1822 * mask LPSP to avoid dependency on other drivers that might block
1823 * runtime_pm besides preventing other hw tracking issues now we
1824 * can rely on frontbuffer tracking.
1825 *
1826 * From bspec prior LunarLake:
1827 * Only PSR_MASK[Mask FBC modify] and PSR_MASK[Mask Hotplug] are used in
1828 * panel replay mode.
1829 *
1830 * From bspec beyod LunarLake:
1831 * Panel Replay on DP: No bits are applicable
1832 * Panel Replay on eDP: All bits are applicable
1833 */
1834 if (DISPLAY_VER(display) < 20 || intel_dp_is_edp(intel_dp))
1835 mask = EDP_PSR_DEBUG_MASK_HPD;
1836
1837 if (intel_dp_is_edp(intel_dp)) {
1838 mask |= EDP_PSR_DEBUG_MASK_MEMUP;
1839
1840 /*
1841 * For some unknown reason on HSW non-ULT (or at least on
1842 * Dell Latitude E6540) external displays start to flicker
1843 * when PSR is enabled on the eDP. SR/PC6 residency is much
1844 * higher than should be possible with an external display.
1845 * As a workaround leave LPSP unmasked to prevent PSR entry
1846 * when external displays are active.
1847 */
1848 if (DISPLAY_VER(display) >= 8 || IS_HASWELL_ULT(dev_priv))
1849 mask |= EDP_PSR_DEBUG_MASK_LPSP;
1850
1851 if (DISPLAY_VER(display) < 20)
1852 mask |= EDP_PSR_DEBUG_MASK_MAX_SLEEP;
1853
1854 /*
1855 * No separate pipe reg write mask on hsw/bdw, so have to unmask all
1856 * registers in order to keep the CURSURFLIVE tricks working :(
1857 */
1858 if (IS_DISPLAY_VER(display, 9, 10))
1859 mask |= EDP_PSR_DEBUG_MASK_DISP_REG_WRITE;
1860
1861 /* allow PSR with sprite enabled */
1862 if (IS_HASWELL(dev_priv))
1863 mask |= EDP_PSR_DEBUG_MASK_SPRITE_ENABLE;
1864 }
1865
1866 intel_de_write(display, psr_debug_reg(display, cpu_transcoder), mask);
1867
1868 psr_irq_control(intel_dp);
1869
1870 /*
1871 * TODO: if future platforms supports DC3CO in more than one
1872 * transcoder, EXITLINE will need to be unset when disabling PSR
1873 */
1874 if (intel_dp->psr.dc3co_exitline)
1875 intel_de_rmw(display,
1876 TRANS_EXITLINE(display, cpu_transcoder),
1877 EXITLINE_MASK,
1878 intel_dp->psr.dc3co_exitline << EXITLINE_SHIFT | EXITLINE_ENABLE);
1879
1880 if (HAS_PSR_HW_TRACKING(display) && HAS_PSR2_SEL_FETCH(display))
1881 intel_de_rmw(display, CHICKEN_PAR1_1, IGNORE_PSR2_HW_TRACKING,
1882 intel_dp->psr.psr2_sel_fetch_enabled ?
1883 IGNORE_PSR2_HW_TRACKING : 0);
1884
1885 if (intel_dp_is_edp(intel_dp))
1886 intel_alpm_configure(intel_dp, crtc_state);
1887
1888 /*
1889 * Wa_16013835468
1890 * Wa_14015648006
1891 */
1892 wm_optimization_wa(intel_dp, crtc_state);
1893
1894 if (intel_dp->psr.sel_update_enabled) {
1895 if (DISPLAY_VER(display) == 9)
1896 intel_de_rmw(display, CHICKEN_TRANS(cpu_transcoder), 0,
1897 PSR2_VSC_ENABLE_PROG_HEADER |
1898 PSR2_ADD_VERTICAL_LINE_COUNT);
1899
1900 /*
1901 * Wa_16014451276:adlp,mtl[a0,b0]
1902 * All supported adlp panels have 1-based X granularity, this may
1903 * cause issues if non-supported panels are used.
1904 */
1905 if (!intel_dp->psr.panel_replay_enabled &&
1906 (IS_DISPLAY_VER_STEP(display, IP_VER(14, 0), STEP_A0, STEP_B0) ||
1907 IS_ALDERLAKE_P(dev_priv)))
1908 intel_de_rmw(display, hsw_chicken_trans_reg(dev_priv, cpu_transcoder),
1909 0, ADLP_1_BASED_X_GRANULARITY);
1910
1911 /* Wa_16012604467:adlp,mtl[a0,b0] */
1912 if (!intel_dp->psr.panel_replay_enabled &&
1913 IS_DISPLAY_VER_STEP(display, IP_VER(14, 0), STEP_A0, STEP_B0))
1914 intel_de_rmw(display,
1915 MTL_CLKGATE_DIS_TRANS(display, cpu_transcoder),
1916 0,
1917 MTL_CLKGATE_DIS_TRANS_DMASC_GATING_DIS);
1918 else if (IS_ALDERLAKE_P(dev_priv))
1919 intel_de_rmw(display, CLKGATE_DIS_MISC, 0,
1920 CLKGATE_DIS_MISC_DMASC_GATING_DIS);
1921 }
1922 }
1923
psr_interrupt_error_check(struct intel_dp * intel_dp)1924 static bool psr_interrupt_error_check(struct intel_dp *intel_dp)
1925 {
1926 struct intel_display *display = to_intel_display(intel_dp);
1927 enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
1928 u32 val;
1929
1930 if (intel_dp->psr.panel_replay_enabled)
1931 goto no_err;
1932
1933 /*
1934 * If a PSR error happened and the driver is reloaded, the EDP_PSR_IIR
1935 * will still keep the error set even after the reset done in the
1936 * irq_preinstall and irq_uninstall hooks.
1937 * And enabling in this situation cause the screen to freeze in the
1938 * first time that PSR HW tries to activate so lets keep PSR disabled
1939 * to avoid any rendering problems.
1940 */
1941 val = intel_de_read(display, psr_iir_reg(display, cpu_transcoder));
1942 val &= psr_irq_psr_error_bit_get(intel_dp);
1943 if (val) {
1944 intel_dp->psr.sink_not_reliable = true;
1945 drm_dbg_kms(display->drm,
1946 "PSR interruption error set, not enabling PSR\n");
1947 return false;
1948 }
1949
1950 no_err:
1951 return true;
1952 }
1953
intel_psr_enable_locked(struct intel_dp * intel_dp,const struct intel_crtc_state * crtc_state)1954 static void intel_psr_enable_locked(struct intel_dp *intel_dp,
1955 const struct intel_crtc_state *crtc_state)
1956 {
1957 struct intel_display *display = to_intel_display(intel_dp);
1958 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
1959 u32 val;
1960
1961 drm_WARN_ON(display->drm, intel_dp->psr.enabled);
1962
1963 intel_dp->psr.sel_update_enabled = crtc_state->has_sel_update;
1964 intel_dp->psr.panel_replay_enabled = crtc_state->has_panel_replay;
1965 intel_dp->psr.busy_frontbuffer_bits = 0;
1966 intel_dp->psr.pipe = to_intel_crtc(crtc_state->uapi.crtc)->pipe;
1967 intel_dp->psr.transcoder = crtc_state->cpu_transcoder;
1968 /* DC5/DC6 requires at least 6 idle frames */
1969 val = usecs_to_jiffies(intel_get_frame_time_us(crtc_state) * 6);
1970 intel_dp->psr.dc3co_exit_delay = val;
1971 intel_dp->psr.dc3co_exitline = crtc_state->dc3co_exitline;
1972 intel_dp->psr.psr2_sel_fetch_enabled = crtc_state->enable_psr2_sel_fetch;
1973 intel_dp->psr.su_region_et_enabled = crtc_state->enable_psr2_su_region_et;
1974 intel_dp->psr.psr2_sel_fetch_cff_enabled = false;
1975 intel_dp->psr.req_psr2_sdp_prior_scanline =
1976 crtc_state->req_psr2_sdp_prior_scanline;
1977
1978 if (!psr_interrupt_error_check(intel_dp))
1979 return;
1980
1981 if (intel_dp->psr.panel_replay_enabled) {
1982 drm_dbg_kms(display->drm, "Enabling Panel Replay\n");
1983 } else {
1984 drm_dbg_kms(display->drm, "Enabling PSR%s\n",
1985 intel_dp->psr.sel_update_enabled ? "2" : "1");
1986
1987 /*
1988 * Panel replay has to be enabled before link training: doing it
1989 * only for PSR here.
1990 */
1991 intel_psr_enable_sink(intel_dp, crtc_state);
1992 }
1993
1994 if (intel_dp_is_edp(intel_dp))
1995 intel_snps_phy_update_psr_power_state(&dig_port->base, true);
1996
1997 intel_psr_enable_source(intel_dp, crtc_state);
1998 intel_dp->psr.enabled = true;
1999 intel_dp->psr.paused = false;
2000
2001 intel_psr_activate(intel_dp);
2002 }
2003
intel_psr_exit(struct intel_dp * intel_dp)2004 static void intel_psr_exit(struct intel_dp *intel_dp)
2005 {
2006 struct intel_display *display = to_intel_display(intel_dp);
2007 enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
2008 u32 val;
2009
2010 if (!intel_dp->psr.active) {
2011 if (transcoder_has_psr2(display, cpu_transcoder)) {
2012 val = intel_de_read(display,
2013 EDP_PSR2_CTL(display, cpu_transcoder));
2014 drm_WARN_ON(display->drm, val & EDP_PSR2_ENABLE);
2015 }
2016
2017 val = intel_de_read(display,
2018 psr_ctl_reg(display, cpu_transcoder));
2019 drm_WARN_ON(display->drm, val & EDP_PSR_ENABLE);
2020
2021 return;
2022 }
2023
2024 if (intel_dp->psr.panel_replay_enabled) {
2025 intel_de_rmw(display, TRANS_DP2_CTL(intel_dp->psr.transcoder),
2026 TRANS_DP2_PANEL_REPLAY_ENABLE, 0);
2027 } else if (intel_dp->psr.sel_update_enabled) {
2028 tgl_disallow_dc3co_on_psr2_exit(intel_dp);
2029
2030 val = intel_de_rmw(display,
2031 EDP_PSR2_CTL(display, cpu_transcoder),
2032 EDP_PSR2_ENABLE, 0);
2033
2034 drm_WARN_ON(display->drm, !(val & EDP_PSR2_ENABLE));
2035 } else {
2036 val = intel_de_rmw(display,
2037 psr_ctl_reg(display, cpu_transcoder),
2038 EDP_PSR_ENABLE, 0);
2039
2040 drm_WARN_ON(display->drm, !(val & EDP_PSR_ENABLE));
2041 }
2042 intel_dp->psr.active = false;
2043 }
2044
intel_psr_wait_exit_locked(struct intel_dp * intel_dp)2045 static void intel_psr_wait_exit_locked(struct intel_dp *intel_dp)
2046 {
2047 struct intel_display *display = to_intel_display(intel_dp);
2048 enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
2049 i915_reg_t psr_status;
2050 u32 psr_status_mask;
2051
2052 if (intel_dp_is_edp(intel_dp) && (intel_dp->psr.sel_update_enabled ||
2053 intel_dp->psr.panel_replay_enabled)) {
2054 psr_status = EDP_PSR2_STATUS(display, cpu_transcoder);
2055 psr_status_mask = EDP_PSR2_STATUS_STATE_MASK;
2056 } else {
2057 psr_status = psr_status_reg(display, cpu_transcoder);
2058 psr_status_mask = EDP_PSR_STATUS_STATE_MASK;
2059 }
2060
2061 /* Wait till PSR is idle */
2062 if (intel_de_wait_for_clear(display, psr_status,
2063 psr_status_mask, 2000))
2064 drm_err(display->drm, "Timed out waiting PSR idle state\n");
2065 }
2066
intel_psr_disable_locked(struct intel_dp * intel_dp)2067 static void intel_psr_disable_locked(struct intel_dp *intel_dp)
2068 {
2069 struct intel_display *display = to_intel_display(intel_dp);
2070 struct drm_i915_private *dev_priv = to_i915(display->drm);
2071 enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
2072
2073 lockdep_assert_held(&intel_dp->psr.lock);
2074
2075 if (!intel_dp->psr.enabled)
2076 return;
2077
2078 if (intel_dp->psr.panel_replay_enabled)
2079 drm_dbg_kms(display->drm, "Disabling Panel Replay\n");
2080 else
2081 drm_dbg_kms(display->drm, "Disabling PSR%s\n",
2082 intel_dp->psr.sel_update_enabled ? "2" : "1");
2083
2084 intel_psr_exit(intel_dp);
2085 intel_psr_wait_exit_locked(intel_dp);
2086
2087 /*
2088 * Wa_16013835468
2089 * Wa_14015648006
2090 */
2091 if (DISPLAY_VER(display) >= 11)
2092 intel_de_rmw(display, GEN8_CHICKEN_DCPR_1,
2093 wa_16013835468_bit_get(intel_dp), 0);
2094
2095 if (intel_dp->psr.sel_update_enabled) {
2096 /* Wa_16012604467:adlp,mtl[a0,b0] */
2097 if (!intel_dp->psr.panel_replay_enabled &&
2098 IS_DISPLAY_VER_STEP(display, IP_VER(14, 0), STEP_A0, STEP_B0))
2099 intel_de_rmw(display,
2100 MTL_CLKGATE_DIS_TRANS(display, cpu_transcoder),
2101 MTL_CLKGATE_DIS_TRANS_DMASC_GATING_DIS, 0);
2102 else if (IS_ALDERLAKE_P(dev_priv))
2103 intel_de_rmw(display, CLKGATE_DIS_MISC,
2104 CLKGATE_DIS_MISC_DMASC_GATING_DIS, 0);
2105 }
2106
2107 if (intel_dp_is_edp(intel_dp))
2108 intel_snps_phy_update_psr_power_state(&dp_to_dig_port(intel_dp)->base, false);
2109
2110 /* Panel Replay on eDP is always using ALPM aux less. */
2111 if (intel_dp->psr.panel_replay_enabled && intel_dp_is_edp(intel_dp)) {
2112 intel_de_rmw(display, ALPM_CTL(display, cpu_transcoder),
2113 ALPM_CTL_ALPM_ENABLE |
2114 ALPM_CTL_ALPM_AUX_LESS_ENABLE, 0);
2115
2116 intel_de_rmw(display,
2117 PORT_ALPM_CTL(display, cpu_transcoder),
2118 PORT_ALPM_CTL_ALPM_AUX_LESS_ENABLE, 0);
2119 }
2120
2121 /* Disable PSR on Sink */
2122 if (!intel_dp->psr.panel_replay_enabled) {
2123 drm_dp_dpcd_writeb(&intel_dp->aux, DP_PSR_EN_CFG, 0);
2124
2125 if (intel_dp->psr.sel_update_enabled)
2126 drm_dp_dpcd_writeb(&intel_dp->aux,
2127 DP_RECEIVER_ALPM_CONFIG, 0);
2128 }
2129
2130 intel_dp->psr.enabled = false;
2131 intel_dp->psr.panel_replay_enabled = false;
2132 intel_dp->psr.sel_update_enabled = false;
2133 intel_dp->psr.psr2_sel_fetch_enabled = false;
2134 intel_dp->psr.su_region_et_enabled = false;
2135 intel_dp->psr.psr2_sel_fetch_cff_enabled = false;
2136 }
2137
2138 /**
2139 * intel_psr_disable - Disable PSR
2140 * @intel_dp: Intel DP
2141 * @old_crtc_state: old CRTC state
2142 *
2143 * This function needs to be called before disabling pipe.
2144 */
intel_psr_disable(struct intel_dp * intel_dp,const struct intel_crtc_state * old_crtc_state)2145 void intel_psr_disable(struct intel_dp *intel_dp,
2146 const struct intel_crtc_state *old_crtc_state)
2147 {
2148 struct intel_display *display = to_intel_display(intel_dp);
2149
2150 if (!old_crtc_state->has_psr)
2151 return;
2152
2153 if (drm_WARN_ON(display->drm, !CAN_PSR(intel_dp)))
2154 return;
2155
2156 mutex_lock(&intel_dp->psr.lock);
2157
2158 intel_psr_disable_locked(intel_dp);
2159
2160 mutex_unlock(&intel_dp->psr.lock);
2161 cancel_work_sync(&intel_dp->psr.work);
2162 cancel_delayed_work_sync(&intel_dp->psr.dc3co_work);
2163 }
2164
2165 /**
2166 * intel_psr_pause - Pause PSR
2167 * @intel_dp: Intel DP
2168 *
2169 * This function need to be called after enabling psr.
2170 */
intel_psr_pause(struct intel_dp * intel_dp)2171 void intel_psr_pause(struct intel_dp *intel_dp)
2172 {
2173 struct intel_display *display = to_intel_display(intel_dp);
2174 struct intel_psr *psr = &intel_dp->psr;
2175
2176 if (!CAN_PSR(intel_dp) && !CAN_PANEL_REPLAY(intel_dp))
2177 return;
2178
2179 mutex_lock(&psr->lock);
2180
2181 if (!psr->enabled) {
2182 mutex_unlock(&psr->lock);
2183 return;
2184 }
2185
2186 /* If we ever hit this, we will need to add refcount to pause/resume */
2187 drm_WARN_ON(display->drm, psr->paused);
2188
2189 intel_psr_exit(intel_dp);
2190 intel_psr_wait_exit_locked(intel_dp);
2191 psr->paused = true;
2192
2193 mutex_unlock(&psr->lock);
2194
2195 cancel_work_sync(&psr->work);
2196 cancel_delayed_work_sync(&psr->dc3co_work);
2197 }
2198
2199 /**
2200 * intel_psr_resume - Resume PSR
2201 * @intel_dp: Intel DP
2202 *
2203 * This function need to be called after pausing psr.
2204 */
intel_psr_resume(struct intel_dp * intel_dp)2205 void intel_psr_resume(struct intel_dp *intel_dp)
2206 {
2207 struct intel_psr *psr = &intel_dp->psr;
2208
2209 if (!CAN_PSR(intel_dp) && !CAN_PANEL_REPLAY(intel_dp))
2210 return;
2211
2212 mutex_lock(&psr->lock);
2213
2214 if (!psr->paused)
2215 goto unlock;
2216
2217 psr->paused = false;
2218 intel_psr_activate(intel_dp);
2219
2220 unlock:
2221 mutex_unlock(&psr->lock);
2222 }
2223
man_trk_ctl_enable_bit_get(struct intel_display * display)2224 static u32 man_trk_ctl_enable_bit_get(struct intel_display *display)
2225 {
2226 struct drm_i915_private *dev_priv = to_i915(display->drm);
2227
2228 return IS_ALDERLAKE_P(dev_priv) || DISPLAY_VER(display) >= 14 ? 0 :
2229 PSR2_MAN_TRK_CTL_ENABLE;
2230 }
2231
man_trk_ctl_single_full_frame_bit_get(struct intel_display * display)2232 static u32 man_trk_ctl_single_full_frame_bit_get(struct intel_display *display)
2233 {
2234 struct drm_i915_private *dev_priv = to_i915(display->drm);
2235
2236 return IS_ALDERLAKE_P(dev_priv) || DISPLAY_VER(display) >= 14 ?
2237 ADLP_PSR2_MAN_TRK_CTL_SF_SINGLE_FULL_FRAME :
2238 PSR2_MAN_TRK_CTL_SF_SINGLE_FULL_FRAME;
2239 }
2240
man_trk_ctl_partial_frame_bit_get(struct intel_display * display)2241 static u32 man_trk_ctl_partial_frame_bit_get(struct intel_display *display)
2242 {
2243 struct drm_i915_private *dev_priv = to_i915(display->drm);
2244
2245 return IS_ALDERLAKE_P(dev_priv) || DISPLAY_VER(display) >= 14 ?
2246 ADLP_PSR2_MAN_TRK_CTL_SF_PARTIAL_FRAME_UPDATE :
2247 PSR2_MAN_TRK_CTL_SF_PARTIAL_FRAME_UPDATE;
2248 }
2249
man_trk_ctl_continuos_full_frame(struct intel_display * display)2250 static u32 man_trk_ctl_continuos_full_frame(struct intel_display *display)
2251 {
2252 struct drm_i915_private *dev_priv = to_i915(display->drm);
2253
2254 return IS_ALDERLAKE_P(dev_priv) || DISPLAY_VER(display) >= 14 ?
2255 ADLP_PSR2_MAN_TRK_CTL_SF_CONTINUOS_FULL_FRAME :
2256 PSR2_MAN_TRK_CTL_SF_CONTINUOS_FULL_FRAME;
2257 }
2258
psr_force_hw_tracking_exit(struct intel_dp * intel_dp)2259 static void psr_force_hw_tracking_exit(struct intel_dp *intel_dp)
2260 {
2261 struct intel_display *display = to_intel_display(intel_dp);
2262 enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
2263
2264 if (intel_dp->psr.psr2_sel_fetch_enabled)
2265 intel_de_write(display,
2266 PSR2_MAN_TRK_CTL(display, cpu_transcoder),
2267 man_trk_ctl_enable_bit_get(display) |
2268 man_trk_ctl_partial_frame_bit_get(display) |
2269 man_trk_ctl_single_full_frame_bit_get(display) |
2270 man_trk_ctl_continuos_full_frame(display));
2271
2272 /*
2273 * Display WA #0884: skl+
2274 * This documented WA for bxt can be safely applied
2275 * broadly so we can force HW tracking to exit PSR
2276 * instead of disabling and re-enabling.
2277 * Workaround tells us to write 0 to CUR_SURFLIVE_A,
2278 * but it makes more sense write to the current active
2279 * pipe.
2280 *
2281 * This workaround do not exist for platforms with display 10 or newer
2282 * but testing proved that it works for up display 13, for newer
2283 * than that testing will be needed.
2284 */
2285 intel_de_write(display, CURSURFLIVE(display, intel_dp->psr.pipe), 0);
2286 }
2287
intel_psr2_program_trans_man_trk_ctl(const struct intel_crtc_state * crtc_state)2288 void intel_psr2_program_trans_man_trk_ctl(const struct intel_crtc_state *crtc_state)
2289 {
2290 struct intel_display *display = to_intel_display(crtc_state);
2291 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
2292 enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
2293 struct intel_encoder *encoder;
2294
2295 if (!crtc_state->enable_psr2_sel_fetch)
2296 return;
2297
2298 for_each_intel_encoder_mask_with_psr(display->drm, encoder,
2299 crtc_state->uapi.encoder_mask) {
2300 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
2301
2302 lockdep_assert_held(&intel_dp->psr.lock);
2303 if (intel_dp->psr.psr2_sel_fetch_cff_enabled)
2304 return;
2305 break;
2306 }
2307
2308 intel_de_write(display, PSR2_MAN_TRK_CTL(display, cpu_transcoder),
2309 crtc_state->psr2_man_track_ctl);
2310
2311 if (!crtc_state->enable_psr2_su_region_et)
2312 return;
2313
2314 intel_de_write(display, PIPE_SRCSZ_ERLY_TPT(crtc->pipe),
2315 crtc_state->pipe_srcsz_early_tpt);
2316 }
2317
psr2_man_trk_ctl_calc(struct intel_crtc_state * crtc_state,bool full_update)2318 static void psr2_man_trk_ctl_calc(struct intel_crtc_state *crtc_state,
2319 bool full_update)
2320 {
2321 struct intel_display *display = to_intel_display(crtc_state);
2322 struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
2323 struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
2324 u32 val = man_trk_ctl_enable_bit_get(display);
2325
2326 /* SF partial frame enable has to be set even on full update */
2327 val |= man_trk_ctl_partial_frame_bit_get(display);
2328
2329 if (full_update) {
2330 val |= man_trk_ctl_single_full_frame_bit_get(display);
2331 val |= man_trk_ctl_continuos_full_frame(display);
2332 goto exit;
2333 }
2334
2335 if (crtc_state->psr2_su_area.y1 == -1)
2336 goto exit;
2337
2338 if (IS_ALDERLAKE_P(dev_priv) || DISPLAY_VER(display) >= 14) {
2339 val |= ADLP_PSR2_MAN_TRK_CTL_SU_REGION_START_ADDR(crtc_state->psr2_su_area.y1);
2340 val |= ADLP_PSR2_MAN_TRK_CTL_SU_REGION_END_ADDR(crtc_state->psr2_su_area.y2 - 1);
2341 } else {
2342 drm_WARN_ON(crtc_state->uapi.crtc->dev,
2343 crtc_state->psr2_su_area.y1 % 4 ||
2344 crtc_state->psr2_su_area.y2 % 4);
2345
2346 val |= PSR2_MAN_TRK_CTL_SU_REGION_START_ADDR(
2347 crtc_state->psr2_su_area.y1 / 4 + 1);
2348 val |= PSR2_MAN_TRK_CTL_SU_REGION_END_ADDR(
2349 crtc_state->psr2_su_area.y2 / 4 + 1);
2350 }
2351 exit:
2352 crtc_state->psr2_man_track_ctl = val;
2353 }
2354
psr2_pipe_srcsz_early_tpt_calc(struct intel_crtc_state * crtc_state,bool full_update)2355 static u32 psr2_pipe_srcsz_early_tpt_calc(struct intel_crtc_state *crtc_state,
2356 bool full_update)
2357 {
2358 int width, height;
2359
2360 if (!crtc_state->enable_psr2_su_region_et || full_update)
2361 return 0;
2362
2363 width = drm_rect_width(&crtc_state->psr2_su_area);
2364 height = drm_rect_height(&crtc_state->psr2_su_area);
2365
2366 return PIPESRC_WIDTH(width - 1) | PIPESRC_HEIGHT(height - 1);
2367 }
2368
clip_area_update(struct drm_rect * overlap_damage_area,struct drm_rect * damage_area,struct drm_rect * pipe_src)2369 static void clip_area_update(struct drm_rect *overlap_damage_area,
2370 struct drm_rect *damage_area,
2371 struct drm_rect *pipe_src)
2372 {
2373 if (!drm_rect_intersect(damage_area, pipe_src))
2374 return;
2375
2376 if (overlap_damage_area->y1 == -1) {
2377 overlap_damage_area->y1 = damage_area->y1;
2378 overlap_damage_area->y2 = damage_area->y2;
2379 return;
2380 }
2381
2382 if (damage_area->y1 < overlap_damage_area->y1)
2383 overlap_damage_area->y1 = damage_area->y1;
2384
2385 if (damage_area->y2 > overlap_damage_area->y2)
2386 overlap_damage_area->y2 = damage_area->y2;
2387 }
2388
intel_psr2_sel_fetch_pipe_alignment(struct intel_crtc_state * crtc_state)2389 static void intel_psr2_sel_fetch_pipe_alignment(struct intel_crtc_state *crtc_state)
2390 {
2391 struct intel_display *display = to_intel_display(crtc_state);
2392 struct drm_i915_private *dev_priv = to_i915(crtc_state->uapi.crtc->dev);
2393 const struct drm_dsc_config *vdsc_cfg = &crtc_state->dsc.config;
2394 u16 y_alignment;
2395
2396 /* ADLP aligns the SU region to vdsc slice height in case dsc is enabled */
2397 if (crtc_state->dsc.compression_enable &&
2398 (IS_ALDERLAKE_P(dev_priv) || DISPLAY_VER(display) >= 14))
2399 y_alignment = vdsc_cfg->slice_height;
2400 else
2401 y_alignment = crtc_state->su_y_granularity;
2402
2403 crtc_state->psr2_su_area.y1 -= crtc_state->psr2_su_area.y1 % y_alignment;
2404 if (crtc_state->psr2_su_area.y2 % y_alignment)
2405 crtc_state->psr2_su_area.y2 = ((crtc_state->psr2_su_area.y2 /
2406 y_alignment) + 1) * y_alignment;
2407 }
2408
2409 /*
2410 * When early transport is in use we need to extend SU area to cover
2411 * cursor fully when cursor is in SU area.
2412 */
2413 static void
intel_psr2_sel_fetch_et_alignment(struct intel_atomic_state * state,struct intel_crtc * crtc,bool * cursor_in_su_area)2414 intel_psr2_sel_fetch_et_alignment(struct intel_atomic_state *state,
2415 struct intel_crtc *crtc,
2416 bool *cursor_in_su_area)
2417 {
2418 struct intel_crtc_state *crtc_state = intel_atomic_get_new_crtc_state(state, crtc);
2419 struct intel_plane_state *new_plane_state;
2420 struct intel_plane *plane;
2421 int i;
2422
2423 if (!crtc_state->enable_psr2_su_region_et)
2424 return;
2425
2426 for_each_new_intel_plane_in_state(state, plane, new_plane_state, i) {
2427 struct drm_rect inter;
2428
2429 if (new_plane_state->uapi.crtc != crtc_state->uapi.crtc)
2430 continue;
2431
2432 if (plane->id != PLANE_CURSOR)
2433 continue;
2434
2435 if (!new_plane_state->uapi.visible)
2436 continue;
2437
2438 inter = crtc_state->psr2_su_area;
2439 if (!drm_rect_intersect(&inter, &new_plane_state->uapi.dst))
2440 continue;
2441
2442 clip_area_update(&crtc_state->psr2_su_area, &new_plane_state->uapi.dst,
2443 &crtc_state->pipe_src);
2444 *cursor_in_su_area = true;
2445 }
2446 }
2447
2448 /*
2449 * TODO: Not clear how to handle planes with negative position,
2450 * also planes are not updated if they have a negative X
2451 * position so for now doing a full update in this cases
2452 *
2453 * Plane scaling and rotation is not supported by selective fetch and both
2454 * properties can change without a modeset, so need to be check at every
2455 * atomic commit.
2456 */
psr2_sel_fetch_plane_state_supported(const struct intel_plane_state * plane_state)2457 static bool psr2_sel_fetch_plane_state_supported(const struct intel_plane_state *plane_state)
2458 {
2459 if (plane_state->uapi.dst.y1 < 0 ||
2460 plane_state->uapi.dst.x1 < 0 ||
2461 plane_state->scaler_id >= 0 ||
2462 plane_state->uapi.rotation != DRM_MODE_ROTATE_0)
2463 return false;
2464
2465 return true;
2466 }
2467
2468 /*
2469 * Check for pipe properties that is not supported by selective fetch.
2470 *
2471 * TODO: pipe scaling causes a modeset but skl_update_scaler_crtc() is executed
2472 * after intel_psr_compute_config(), so for now keeping PSR2 selective fetch
2473 * enabled and going to the full update path.
2474 */
psr2_sel_fetch_pipe_state_supported(const struct intel_crtc_state * crtc_state)2475 static bool psr2_sel_fetch_pipe_state_supported(const struct intel_crtc_state *crtc_state)
2476 {
2477 if (crtc_state->scaler_state.scaler_id >= 0)
2478 return false;
2479
2480 return true;
2481 }
2482
intel_psr2_sel_fetch_update(struct intel_atomic_state * state,struct intel_crtc * crtc)2483 int intel_psr2_sel_fetch_update(struct intel_atomic_state *state,
2484 struct intel_crtc *crtc)
2485 {
2486 struct intel_display *display = to_intel_display(state);
2487 struct drm_i915_private *dev_priv = to_i915(state->base.dev);
2488 struct intel_crtc_state *crtc_state = intel_atomic_get_new_crtc_state(state, crtc);
2489 struct intel_plane_state *new_plane_state, *old_plane_state;
2490 struct intel_plane *plane;
2491 bool full_update = false, cursor_in_su_area = false;
2492 int i, ret;
2493
2494 if (!crtc_state->enable_psr2_sel_fetch)
2495 return 0;
2496
2497 if (!psr2_sel_fetch_pipe_state_supported(crtc_state)) {
2498 full_update = true;
2499 goto skip_sel_fetch_set_loop;
2500 }
2501
2502 crtc_state->psr2_su_area.x1 = 0;
2503 crtc_state->psr2_su_area.y1 = -1;
2504 crtc_state->psr2_su_area.x2 = drm_rect_width(&crtc_state->pipe_src);
2505 crtc_state->psr2_su_area.y2 = -1;
2506
2507 /*
2508 * Calculate minimal selective fetch area of each plane and calculate
2509 * the pipe damaged area.
2510 * In the next loop the plane selective fetch area will actually be set
2511 * using whole pipe damaged area.
2512 */
2513 for_each_oldnew_intel_plane_in_state(state, plane, old_plane_state,
2514 new_plane_state, i) {
2515 struct drm_rect src, damaged_area = { .x1 = 0, .y1 = -1,
2516 .x2 = INT_MAX };
2517
2518 if (new_plane_state->uapi.crtc != crtc_state->uapi.crtc)
2519 continue;
2520
2521 if (!new_plane_state->uapi.visible &&
2522 !old_plane_state->uapi.visible)
2523 continue;
2524
2525 if (!psr2_sel_fetch_plane_state_supported(new_plane_state)) {
2526 full_update = true;
2527 break;
2528 }
2529
2530 /*
2531 * If visibility or plane moved, mark the whole plane area as
2532 * damaged as it needs to be complete redraw in the new and old
2533 * position.
2534 */
2535 if (new_plane_state->uapi.visible != old_plane_state->uapi.visible ||
2536 !drm_rect_equals(&new_plane_state->uapi.dst,
2537 &old_plane_state->uapi.dst)) {
2538 if (old_plane_state->uapi.visible) {
2539 damaged_area.y1 = old_plane_state->uapi.dst.y1;
2540 damaged_area.y2 = old_plane_state->uapi.dst.y2;
2541 clip_area_update(&crtc_state->psr2_su_area, &damaged_area,
2542 &crtc_state->pipe_src);
2543 }
2544
2545 if (new_plane_state->uapi.visible) {
2546 damaged_area.y1 = new_plane_state->uapi.dst.y1;
2547 damaged_area.y2 = new_plane_state->uapi.dst.y2;
2548 clip_area_update(&crtc_state->psr2_su_area, &damaged_area,
2549 &crtc_state->pipe_src);
2550 }
2551 continue;
2552 } else if (new_plane_state->uapi.alpha != old_plane_state->uapi.alpha) {
2553 /* If alpha changed mark the whole plane area as damaged */
2554 damaged_area.y1 = new_plane_state->uapi.dst.y1;
2555 damaged_area.y2 = new_plane_state->uapi.dst.y2;
2556 clip_area_update(&crtc_state->psr2_su_area, &damaged_area,
2557 &crtc_state->pipe_src);
2558 continue;
2559 }
2560
2561 src = drm_plane_state_src(&new_plane_state->uapi);
2562 drm_rect_fp_to_int(&src, &src);
2563
2564 if (!drm_atomic_helper_damage_merged(&old_plane_state->uapi,
2565 &new_plane_state->uapi, &damaged_area))
2566 continue;
2567
2568 damaged_area.y1 += new_plane_state->uapi.dst.y1 - src.y1;
2569 damaged_area.y2 += new_plane_state->uapi.dst.y1 - src.y1;
2570 damaged_area.x1 += new_plane_state->uapi.dst.x1 - src.x1;
2571 damaged_area.x2 += new_plane_state->uapi.dst.x1 - src.x1;
2572
2573 clip_area_update(&crtc_state->psr2_su_area, &damaged_area, &crtc_state->pipe_src);
2574 }
2575
2576 /*
2577 * TODO: For now we are just using full update in case
2578 * selective fetch area calculation fails. To optimize this we
2579 * should identify cases where this happens and fix the area
2580 * calculation for those.
2581 */
2582 if (crtc_state->psr2_su_area.y1 == -1) {
2583 drm_info_once(display->drm,
2584 "Selective fetch area calculation failed in pipe %c\n",
2585 pipe_name(crtc->pipe));
2586 full_update = true;
2587 }
2588
2589 if (full_update)
2590 goto skip_sel_fetch_set_loop;
2591
2592 /* Wa_14014971492 */
2593 if (!crtc_state->has_panel_replay &&
2594 ((IS_DISPLAY_VER_STEP(display, IP_VER(14, 0), STEP_A0, STEP_B0) ||
2595 IS_ALDERLAKE_P(dev_priv) || IS_TIGERLAKE(dev_priv))) &&
2596 crtc_state->splitter.enable)
2597 crtc_state->psr2_su_area.y1 = 0;
2598
2599 ret = drm_atomic_add_affected_planes(&state->base, &crtc->base);
2600 if (ret)
2601 return ret;
2602
2603 /*
2604 * Adjust su area to cover cursor fully as necessary (early
2605 * transport). This needs to be done after
2606 * drm_atomic_add_affected_planes to ensure visible cursor is added into
2607 * affected planes even when cursor is not updated by itself.
2608 */
2609 intel_psr2_sel_fetch_et_alignment(state, crtc, &cursor_in_su_area);
2610
2611 intel_psr2_sel_fetch_pipe_alignment(crtc_state);
2612
2613 /*
2614 * Now that we have the pipe damaged area check if it intersect with
2615 * every plane, if it does set the plane selective fetch area.
2616 */
2617 for_each_oldnew_intel_plane_in_state(state, plane, old_plane_state,
2618 new_plane_state, i) {
2619 struct drm_rect *sel_fetch_area, inter;
2620 struct intel_plane *linked = new_plane_state->planar_linked_plane;
2621
2622 if (new_plane_state->uapi.crtc != crtc_state->uapi.crtc ||
2623 !new_plane_state->uapi.visible)
2624 continue;
2625
2626 inter = crtc_state->psr2_su_area;
2627 sel_fetch_area = &new_plane_state->psr2_sel_fetch_area;
2628 if (!drm_rect_intersect(&inter, &new_plane_state->uapi.dst)) {
2629 sel_fetch_area->y1 = -1;
2630 sel_fetch_area->y2 = -1;
2631 /*
2632 * if plane sel fetch was previously enabled ->
2633 * disable it
2634 */
2635 if (drm_rect_height(&old_plane_state->psr2_sel_fetch_area) > 0)
2636 crtc_state->update_planes |= BIT(plane->id);
2637
2638 continue;
2639 }
2640
2641 if (!psr2_sel_fetch_plane_state_supported(new_plane_state)) {
2642 full_update = true;
2643 break;
2644 }
2645
2646 sel_fetch_area = &new_plane_state->psr2_sel_fetch_area;
2647 sel_fetch_area->y1 = inter.y1 - new_plane_state->uapi.dst.y1;
2648 sel_fetch_area->y2 = inter.y2 - new_plane_state->uapi.dst.y1;
2649 crtc_state->update_planes |= BIT(plane->id);
2650
2651 /*
2652 * Sel_fetch_area is calculated for UV plane. Use
2653 * same area for Y plane as well.
2654 */
2655 if (linked) {
2656 struct intel_plane_state *linked_new_plane_state;
2657 struct drm_rect *linked_sel_fetch_area;
2658
2659 linked_new_plane_state = intel_atomic_get_plane_state(state, linked);
2660 if (IS_ERR(linked_new_plane_state))
2661 return PTR_ERR(linked_new_plane_state);
2662
2663 linked_sel_fetch_area = &linked_new_plane_state->psr2_sel_fetch_area;
2664 linked_sel_fetch_area->y1 = sel_fetch_area->y1;
2665 linked_sel_fetch_area->y2 = sel_fetch_area->y2;
2666 crtc_state->update_planes |= BIT(linked->id);
2667 }
2668 }
2669
2670 skip_sel_fetch_set_loop:
2671 psr2_man_trk_ctl_calc(crtc_state, full_update);
2672 crtc_state->pipe_srcsz_early_tpt =
2673 psr2_pipe_srcsz_early_tpt_calc(crtc_state, full_update);
2674 return 0;
2675 }
2676
intel_psr_pre_plane_update(struct intel_atomic_state * state,struct intel_crtc * crtc)2677 void intel_psr_pre_plane_update(struct intel_atomic_state *state,
2678 struct intel_crtc *crtc)
2679 {
2680 struct intel_display *display = to_intel_display(state);
2681 struct drm_i915_private *i915 = to_i915(state->base.dev);
2682 const struct intel_crtc_state *old_crtc_state =
2683 intel_atomic_get_old_crtc_state(state, crtc);
2684 const struct intel_crtc_state *new_crtc_state =
2685 intel_atomic_get_new_crtc_state(state, crtc);
2686 struct intel_encoder *encoder;
2687
2688 if (!HAS_PSR(display))
2689 return;
2690
2691 for_each_intel_encoder_mask_with_psr(state->base.dev, encoder,
2692 old_crtc_state->uapi.encoder_mask) {
2693 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
2694 struct intel_psr *psr = &intel_dp->psr;
2695 bool needs_to_disable = false;
2696
2697 mutex_lock(&psr->lock);
2698
2699 /*
2700 * Reasons to disable:
2701 * - PSR disabled in new state
2702 * - All planes will go inactive
2703 * - Changing between PSR versions
2704 * - Region Early Transport changing
2705 * - Display WA #1136: skl, bxt
2706 */
2707 needs_to_disable |= intel_crtc_needs_modeset(new_crtc_state);
2708 needs_to_disable |= !new_crtc_state->has_psr;
2709 needs_to_disable |= !new_crtc_state->active_planes;
2710 needs_to_disable |= new_crtc_state->has_sel_update != psr->sel_update_enabled;
2711 needs_to_disable |= new_crtc_state->enable_psr2_su_region_et !=
2712 psr->su_region_et_enabled;
2713 needs_to_disable |= DISPLAY_VER(i915) < 11 &&
2714 new_crtc_state->wm_level_disabled;
2715
2716 if (psr->enabled && needs_to_disable)
2717 intel_psr_disable_locked(intel_dp);
2718 else if (psr->enabled && new_crtc_state->wm_level_disabled)
2719 /* Wa_14015648006 */
2720 wm_optimization_wa(intel_dp, new_crtc_state);
2721
2722 mutex_unlock(&psr->lock);
2723 }
2724 }
2725
intel_psr_post_plane_update(struct intel_atomic_state * state,struct intel_crtc * crtc)2726 void intel_psr_post_plane_update(struct intel_atomic_state *state,
2727 struct intel_crtc *crtc)
2728 {
2729 struct intel_display *display = to_intel_display(state);
2730 const struct intel_crtc_state *crtc_state =
2731 intel_atomic_get_new_crtc_state(state, crtc);
2732 struct intel_encoder *encoder;
2733
2734 if (!crtc_state->has_psr)
2735 return;
2736
2737 for_each_intel_encoder_mask_with_psr(state->base.dev, encoder,
2738 crtc_state->uapi.encoder_mask) {
2739 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
2740 struct intel_psr *psr = &intel_dp->psr;
2741 bool keep_disabled = false;
2742
2743 mutex_lock(&psr->lock);
2744
2745 drm_WARN_ON(display->drm,
2746 psr->enabled && !crtc_state->active_planes);
2747
2748 keep_disabled |= psr->sink_not_reliable;
2749 keep_disabled |= !crtc_state->active_planes;
2750
2751 /* Display WA #1136: skl, bxt */
2752 keep_disabled |= DISPLAY_VER(display) < 11 &&
2753 crtc_state->wm_level_disabled;
2754
2755 if (!psr->enabled && !keep_disabled)
2756 intel_psr_enable_locked(intel_dp, crtc_state);
2757 else if (psr->enabled && !crtc_state->wm_level_disabled)
2758 /* Wa_14015648006 */
2759 wm_optimization_wa(intel_dp, crtc_state);
2760
2761 /* Force a PSR exit when enabling CRC to avoid CRC timeouts */
2762 if (crtc_state->crc_enabled && psr->enabled)
2763 psr_force_hw_tracking_exit(intel_dp);
2764
2765 /*
2766 * Clear possible busy bits in case we have
2767 * invalidate -> flip -> flush sequence.
2768 */
2769 intel_dp->psr.busy_frontbuffer_bits = 0;
2770
2771 mutex_unlock(&psr->lock);
2772 }
2773 }
2774
_psr2_ready_for_pipe_update_locked(struct intel_dp * intel_dp)2775 static int _psr2_ready_for_pipe_update_locked(struct intel_dp *intel_dp)
2776 {
2777 struct intel_display *display = to_intel_display(intel_dp);
2778 enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
2779
2780 /*
2781 * Any state lower than EDP_PSR2_STATUS_STATE_DEEP_SLEEP is enough.
2782 * As all higher states has bit 4 of PSR2 state set we can just wait for
2783 * EDP_PSR2_STATUS_STATE_DEEP_SLEEP to be cleared.
2784 */
2785 return intel_de_wait_for_clear(display,
2786 EDP_PSR2_STATUS(display, cpu_transcoder),
2787 EDP_PSR2_STATUS_STATE_DEEP_SLEEP, 50);
2788 }
2789
_psr1_ready_for_pipe_update_locked(struct intel_dp * intel_dp)2790 static int _psr1_ready_for_pipe_update_locked(struct intel_dp *intel_dp)
2791 {
2792 struct intel_display *display = to_intel_display(intel_dp);
2793 enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
2794
2795 /*
2796 * From bspec: Panel Self Refresh (BDW+)
2797 * Max. time for PSR to idle = Inverse of the refresh rate + 6 ms of
2798 * exit training time + 1.5 ms of aux channel handshake. 50 ms is
2799 * defensive enough to cover everything.
2800 */
2801 return intel_de_wait_for_clear(display,
2802 psr_status_reg(display, cpu_transcoder),
2803 EDP_PSR_STATUS_STATE_MASK, 50);
2804 }
2805
2806 /**
2807 * intel_psr_wait_for_idle_locked - wait for PSR be ready for a pipe update
2808 * @new_crtc_state: new CRTC state
2809 *
2810 * This function is expected to be called from pipe_update_start() where it is
2811 * not expected to race with PSR enable or disable.
2812 */
intel_psr_wait_for_idle_locked(const struct intel_crtc_state * new_crtc_state)2813 void intel_psr_wait_for_idle_locked(const struct intel_crtc_state *new_crtc_state)
2814 {
2815 struct intel_display *display = to_intel_display(new_crtc_state);
2816 struct intel_encoder *encoder;
2817
2818 if (!new_crtc_state->has_psr)
2819 return;
2820
2821 for_each_intel_encoder_mask_with_psr(display->drm, encoder,
2822 new_crtc_state->uapi.encoder_mask) {
2823 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
2824 int ret;
2825
2826 lockdep_assert_held(&intel_dp->psr.lock);
2827
2828 if (!intel_dp->psr.enabled || intel_dp->psr.panel_replay_enabled)
2829 continue;
2830
2831 if (intel_dp->psr.sel_update_enabled)
2832 ret = _psr2_ready_for_pipe_update_locked(intel_dp);
2833 else
2834 ret = _psr1_ready_for_pipe_update_locked(intel_dp);
2835
2836 if (ret)
2837 drm_err(display->drm,
2838 "PSR wait timed out, atomic update may fail\n");
2839 }
2840 }
2841
__psr_wait_for_idle_locked(struct intel_dp * intel_dp)2842 static bool __psr_wait_for_idle_locked(struct intel_dp *intel_dp)
2843 {
2844 struct intel_display *display = to_intel_display(intel_dp);
2845 enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
2846 i915_reg_t reg;
2847 u32 mask;
2848 int err;
2849
2850 if (!intel_dp->psr.enabled)
2851 return false;
2852
2853 if (intel_dp_is_edp(intel_dp) && (intel_dp->psr.sel_update_enabled ||
2854 intel_dp->psr.panel_replay_enabled)) {
2855 reg = EDP_PSR2_STATUS(display, cpu_transcoder);
2856 mask = EDP_PSR2_STATUS_STATE_MASK;
2857 } else {
2858 reg = psr_status_reg(display, cpu_transcoder);
2859 mask = EDP_PSR_STATUS_STATE_MASK;
2860 }
2861
2862 mutex_unlock(&intel_dp->psr.lock);
2863
2864 err = intel_de_wait_for_clear(display, reg, mask, 50);
2865 if (err)
2866 drm_err(display->drm,
2867 "Timed out waiting for PSR Idle for re-enable\n");
2868
2869 /* After the unlocked wait, verify that PSR is still wanted! */
2870 mutex_lock(&intel_dp->psr.lock);
2871 return err == 0 && intel_dp->psr.enabled;
2872 }
2873
intel_psr_fastset_force(struct intel_display * display)2874 static int intel_psr_fastset_force(struct intel_display *display)
2875 {
2876 struct drm_connector_list_iter conn_iter;
2877 struct drm_modeset_acquire_ctx ctx;
2878 struct drm_atomic_state *state;
2879 struct drm_connector *conn;
2880 int err = 0;
2881
2882 state = drm_atomic_state_alloc(display->drm);
2883 if (!state)
2884 return -ENOMEM;
2885
2886 drm_modeset_acquire_init(&ctx, DRM_MODESET_ACQUIRE_INTERRUPTIBLE);
2887
2888 state->acquire_ctx = &ctx;
2889 to_intel_atomic_state(state)->internal = true;
2890
2891 retry:
2892 drm_connector_list_iter_begin(display->drm, &conn_iter);
2893 drm_for_each_connector_iter(conn, &conn_iter) {
2894 struct drm_connector_state *conn_state;
2895 struct drm_crtc_state *crtc_state;
2896
2897 if (conn->connector_type != DRM_MODE_CONNECTOR_eDP)
2898 continue;
2899
2900 conn_state = drm_atomic_get_connector_state(state, conn);
2901 if (IS_ERR(conn_state)) {
2902 err = PTR_ERR(conn_state);
2903 break;
2904 }
2905
2906 if (!conn_state->crtc)
2907 continue;
2908
2909 crtc_state = drm_atomic_get_crtc_state(state, conn_state->crtc);
2910 if (IS_ERR(crtc_state)) {
2911 err = PTR_ERR(crtc_state);
2912 break;
2913 }
2914
2915 /* Mark mode as changed to trigger a pipe->update() */
2916 crtc_state->mode_changed = true;
2917 }
2918 drm_connector_list_iter_end(&conn_iter);
2919
2920 if (err == 0)
2921 err = drm_atomic_commit(state);
2922
2923 if (err == -EDEADLK) {
2924 drm_atomic_state_clear(state);
2925 err = drm_modeset_backoff(&ctx);
2926 if (!err)
2927 goto retry;
2928 }
2929
2930 drm_modeset_drop_locks(&ctx);
2931 drm_modeset_acquire_fini(&ctx);
2932 drm_atomic_state_put(state);
2933
2934 return err;
2935 }
2936
intel_psr_debug_set(struct intel_dp * intel_dp,u64 val)2937 int intel_psr_debug_set(struct intel_dp *intel_dp, u64 val)
2938 {
2939 struct intel_display *display = to_intel_display(intel_dp);
2940 const u32 mode = val & I915_PSR_DEBUG_MODE_MASK;
2941 const u32 disable_bits = val & (I915_PSR_DEBUG_SU_REGION_ET_DISABLE |
2942 I915_PSR_DEBUG_PANEL_REPLAY_DISABLE);
2943 u32 old_mode, old_disable_bits;
2944 int ret;
2945
2946 if (val & ~(I915_PSR_DEBUG_IRQ | I915_PSR_DEBUG_SU_REGION_ET_DISABLE |
2947 I915_PSR_DEBUG_PANEL_REPLAY_DISABLE |
2948 I915_PSR_DEBUG_MODE_MASK) ||
2949 mode > I915_PSR_DEBUG_ENABLE_SEL_FETCH) {
2950 drm_dbg_kms(display->drm, "Invalid debug mask %llx\n", val);
2951 return -EINVAL;
2952 }
2953
2954 ret = mutex_lock_interruptible(&intel_dp->psr.lock);
2955 if (ret)
2956 return ret;
2957
2958 old_mode = intel_dp->psr.debug & I915_PSR_DEBUG_MODE_MASK;
2959 old_disable_bits = intel_dp->psr.debug &
2960 (I915_PSR_DEBUG_SU_REGION_ET_DISABLE |
2961 I915_PSR_DEBUG_PANEL_REPLAY_DISABLE);
2962
2963 intel_dp->psr.debug = val;
2964
2965 /*
2966 * Do it right away if it's already enabled, otherwise it will be done
2967 * when enabling the source.
2968 */
2969 if (intel_dp->psr.enabled)
2970 psr_irq_control(intel_dp);
2971
2972 mutex_unlock(&intel_dp->psr.lock);
2973
2974 if (old_mode != mode || old_disable_bits != disable_bits)
2975 ret = intel_psr_fastset_force(display);
2976
2977 return ret;
2978 }
2979
intel_psr_handle_irq(struct intel_dp * intel_dp)2980 static void intel_psr_handle_irq(struct intel_dp *intel_dp)
2981 {
2982 struct intel_psr *psr = &intel_dp->psr;
2983
2984 intel_psr_disable_locked(intel_dp);
2985 psr->sink_not_reliable = true;
2986 /* let's make sure that sink is awaken */
2987 drm_dp_dpcd_writeb(&intel_dp->aux, DP_SET_POWER, DP_SET_POWER_D0);
2988 }
2989
intel_psr_work(struct work_struct * work)2990 static void intel_psr_work(struct work_struct *work)
2991 {
2992 struct intel_dp *intel_dp =
2993 container_of(work, typeof(*intel_dp), psr.work);
2994
2995 mutex_lock(&intel_dp->psr.lock);
2996
2997 if (!intel_dp->psr.enabled)
2998 goto unlock;
2999
3000 if (READ_ONCE(intel_dp->psr.irq_aux_error))
3001 intel_psr_handle_irq(intel_dp);
3002
3003 /*
3004 * We have to make sure PSR is ready for re-enable
3005 * otherwise it keeps disabled until next full enable/disable cycle.
3006 * PSR might take some time to get fully disabled
3007 * and be ready for re-enable.
3008 */
3009 if (!__psr_wait_for_idle_locked(intel_dp))
3010 goto unlock;
3011
3012 /*
3013 * The delayed work can race with an invalidate hence we need to
3014 * recheck. Since psr_flush first clears this and then reschedules we
3015 * won't ever miss a flush when bailing out here.
3016 */
3017 if (intel_dp->psr.busy_frontbuffer_bits || intel_dp->psr.active)
3018 goto unlock;
3019
3020 intel_psr_activate(intel_dp);
3021 unlock:
3022 mutex_unlock(&intel_dp->psr.lock);
3023 }
3024
_psr_invalidate_handle(struct intel_dp * intel_dp)3025 static void _psr_invalidate_handle(struct intel_dp *intel_dp)
3026 {
3027 struct intel_display *display = to_intel_display(intel_dp);
3028 enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
3029
3030 if (intel_dp->psr.psr2_sel_fetch_enabled) {
3031 u32 val;
3032
3033 if (intel_dp->psr.psr2_sel_fetch_cff_enabled) {
3034 /* Send one update otherwise lag is observed in screen */
3035 intel_de_write(display,
3036 CURSURFLIVE(display, intel_dp->psr.pipe),
3037 0);
3038 return;
3039 }
3040
3041 val = man_trk_ctl_enable_bit_get(display) |
3042 man_trk_ctl_partial_frame_bit_get(display) |
3043 man_trk_ctl_continuos_full_frame(display);
3044 intel_de_write(display,
3045 PSR2_MAN_TRK_CTL(display, cpu_transcoder),
3046 val);
3047 intel_de_write(display,
3048 CURSURFLIVE(display, intel_dp->psr.pipe), 0);
3049 intel_dp->psr.psr2_sel_fetch_cff_enabled = true;
3050 } else {
3051 intel_psr_exit(intel_dp);
3052 }
3053 }
3054
3055 /**
3056 * intel_psr_invalidate - Invalidate PSR
3057 * @display: display device
3058 * @frontbuffer_bits: frontbuffer plane tracking bits
3059 * @origin: which operation caused the invalidate
3060 *
3061 * Since the hardware frontbuffer tracking has gaps we need to integrate
3062 * with the software frontbuffer tracking. This function gets called every
3063 * time frontbuffer rendering starts and a buffer gets dirtied. PSR must be
3064 * disabled if the frontbuffer mask contains a buffer relevant to PSR.
3065 *
3066 * Dirty frontbuffers relevant to PSR are tracked in busy_frontbuffer_bits."
3067 */
intel_psr_invalidate(struct intel_display * display,unsigned frontbuffer_bits,enum fb_op_origin origin)3068 void intel_psr_invalidate(struct intel_display *display,
3069 unsigned frontbuffer_bits, enum fb_op_origin origin)
3070 {
3071 struct intel_encoder *encoder;
3072
3073 if (origin == ORIGIN_FLIP)
3074 return;
3075
3076 for_each_intel_encoder_with_psr(display->drm, encoder) {
3077 unsigned int pipe_frontbuffer_bits = frontbuffer_bits;
3078 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
3079
3080 mutex_lock(&intel_dp->psr.lock);
3081 if (!intel_dp->psr.enabled) {
3082 mutex_unlock(&intel_dp->psr.lock);
3083 continue;
3084 }
3085
3086 pipe_frontbuffer_bits &=
3087 INTEL_FRONTBUFFER_ALL_MASK(intel_dp->psr.pipe);
3088 intel_dp->psr.busy_frontbuffer_bits |= pipe_frontbuffer_bits;
3089
3090 if (pipe_frontbuffer_bits)
3091 _psr_invalidate_handle(intel_dp);
3092
3093 mutex_unlock(&intel_dp->psr.lock);
3094 }
3095 }
3096 /*
3097 * When we will be completely rely on PSR2 S/W tracking in future,
3098 * intel_psr_flush() will invalidate and flush the PSR for ORIGIN_FLIP
3099 * event also therefore tgl_dc3co_flush_locked() require to be changed
3100 * accordingly in future.
3101 */
3102 static void
tgl_dc3co_flush_locked(struct intel_dp * intel_dp,unsigned int frontbuffer_bits,enum fb_op_origin origin)3103 tgl_dc3co_flush_locked(struct intel_dp *intel_dp, unsigned int frontbuffer_bits,
3104 enum fb_op_origin origin)
3105 {
3106 struct intel_display *display = to_intel_display(intel_dp);
3107 struct drm_i915_private *i915 = to_i915(display->drm);
3108
3109 if (!intel_dp->psr.dc3co_exitline || !intel_dp->psr.sel_update_enabled ||
3110 !intel_dp->psr.active)
3111 return;
3112
3113 /*
3114 * At every frontbuffer flush flip event modified delay of delayed work,
3115 * when delayed work schedules that means display has been idle.
3116 */
3117 if (!(frontbuffer_bits &
3118 INTEL_FRONTBUFFER_ALL_MASK(intel_dp->psr.pipe)))
3119 return;
3120
3121 tgl_psr2_enable_dc3co(intel_dp);
3122 mod_delayed_work(i915->unordered_wq, &intel_dp->psr.dc3co_work,
3123 intel_dp->psr.dc3co_exit_delay);
3124 }
3125
_psr_flush_handle(struct intel_dp * intel_dp)3126 static void _psr_flush_handle(struct intel_dp *intel_dp)
3127 {
3128 struct intel_display *display = to_intel_display(intel_dp);
3129 struct drm_i915_private *dev_priv = to_i915(display->drm);
3130 enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
3131
3132 if (intel_dp->psr.psr2_sel_fetch_enabled) {
3133 if (intel_dp->psr.psr2_sel_fetch_cff_enabled) {
3134 /* can we turn CFF off? */
3135 if (intel_dp->psr.busy_frontbuffer_bits == 0) {
3136 u32 val = man_trk_ctl_enable_bit_get(display) |
3137 man_trk_ctl_partial_frame_bit_get(display) |
3138 man_trk_ctl_single_full_frame_bit_get(display) |
3139 man_trk_ctl_continuos_full_frame(display);
3140
3141 /*
3142 * Set psr2_sel_fetch_cff_enabled as false to allow selective
3143 * updates. Still keep cff bit enabled as we don't have proper
3144 * SU configuration in case update is sent for any reason after
3145 * sff bit gets cleared by the HW on next vblank.
3146 */
3147 intel_de_write(display,
3148 PSR2_MAN_TRK_CTL(display, cpu_transcoder),
3149 val);
3150 intel_de_write(display,
3151 CURSURFLIVE(display, intel_dp->psr.pipe),
3152 0);
3153 intel_dp->psr.psr2_sel_fetch_cff_enabled = false;
3154 }
3155 } else {
3156 /*
3157 * continuous full frame is disabled, only a single full
3158 * frame is required
3159 */
3160 psr_force_hw_tracking_exit(intel_dp);
3161 }
3162 } else {
3163 psr_force_hw_tracking_exit(intel_dp);
3164
3165 if (!intel_dp->psr.active && !intel_dp->psr.busy_frontbuffer_bits)
3166 queue_work(dev_priv->unordered_wq, &intel_dp->psr.work);
3167 }
3168 }
3169
3170 /**
3171 * intel_psr_flush - Flush PSR
3172 * @display: display device
3173 * @frontbuffer_bits: frontbuffer plane tracking bits
3174 * @origin: which operation caused the flush
3175 *
3176 * Since the hardware frontbuffer tracking has gaps we need to integrate
3177 * with the software frontbuffer tracking. This function gets called every
3178 * time frontbuffer rendering has completed and flushed out to memory. PSR
3179 * can be enabled again if no other frontbuffer relevant to PSR is dirty.
3180 *
3181 * Dirty frontbuffers relevant to PSR are tracked in busy_frontbuffer_bits.
3182 */
intel_psr_flush(struct intel_display * display,unsigned frontbuffer_bits,enum fb_op_origin origin)3183 void intel_psr_flush(struct intel_display *display,
3184 unsigned frontbuffer_bits, enum fb_op_origin origin)
3185 {
3186 struct intel_encoder *encoder;
3187
3188 for_each_intel_encoder_with_psr(display->drm, encoder) {
3189 unsigned int pipe_frontbuffer_bits = frontbuffer_bits;
3190 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
3191
3192 mutex_lock(&intel_dp->psr.lock);
3193 if (!intel_dp->psr.enabled) {
3194 mutex_unlock(&intel_dp->psr.lock);
3195 continue;
3196 }
3197
3198 pipe_frontbuffer_bits &=
3199 INTEL_FRONTBUFFER_ALL_MASK(intel_dp->psr.pipe);
3200 intel_dp->psr.busy_frontbuffer_bits &= ~pipe_frontbuffer_bits;
3201
3202 /*
3203 * If the PSR is paused by an explicit intel_psr_paused() call,
3204 * we have to ensure that the PSR is not activated until
3205 * intel_psr_resume() is called.
3206 */
3207 if (intel_dp->psr.paused)
3208 goto unlock;
3209
3210 if (origin == ORIGIN_FLIP ||
3211 (origin == ORIGIN_CURSOR_UPDATE &&
3212 !intel_dp->psr.psr2_sel_fetch_enabled)) {
3213 tgl_dc3co_flush_locked(intel_dp, frontbuffer_bits, origin);
3214 goto unlock;
3215 }
3216
3217 if (pipe_frontbuffer_bits == 0)
3218 goto unlock;
3219
3220 /* By definition flush = invalidate + flush */
3221 _psr_flush_handle(intel_dp);
3222 unlock:
3223 mutex_unlock(&intel_dp->psr.lock);
3224 }
3225 }
3226
3227 /**
3228 * intel_psr_init - Init basic PSR work and mutex.
3229 * @intel_dp: Intel DP
3230 *
3231 * This function is called after the initializing connector.
3232 * (the initializing of connector treats the handling of connector capabilities)
3233 * And it initializes basic PSR stuff for each DP Encoder.
3234 */
intel_psr_init(struct intel_dp * intel_dp)3235 void intel_psr_init(struct intel_dp *intel_dp)
3236 {
3237 struct intel_display *display = to_intel_display(intel_dp);
3238 struct drm_i915_private *dev_priv = to_i915(display->drm);
3239 struct intel_connector *connector = intel_dp->attached_connector;
3240 struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
3241
3242 if (!(HAS_PSR(display) || HAS_DP20(dev_priv)))
3243 return;
3244
3245 /*
3246 * HSW spec explicitly says PSR is tied to port A.
3247 * BDW+ platforms have a instance of PSR registers per transcoder but
3248 * BDW, GEN9 and GEN11 are not validated by HW team in other transcoder
3249 * than eDP one.
3250 * For now it only supports one instance of PSR for BDW, GEN9 and GEN11.
3251 * So lets keep it hardcoded to PORT_A for BDW, GEN9 and GEN11.
3252 * But GEN12 supports a instance of PSR registers per transcoder.
3253 */
3254 if (DISPLAY_VER(display) < 12 && dig_port->base.port != PORT_A) {
3255 drm_dbg_kms(display->drm,
3256 "PSR condition failed: Port not supported\n");
3257 return;
3258 }
3259
3260 if ((HAS_DP20(dev_priv) && !intel_dp_is_edp(intel_dp)) ||
3261 DISPLAY_VER(display) >= 20)
3262 intel_dp->psr.source_panel_replay_support = true;
3263
3264 if (HAS_PSR(display) && intel_dp_is_edp(intel_dp))
3265 intel_dp->psr.source_support = true;
3266
3267 /* Set link_standby x link_off defaults */
3268 if (DISPLAY_VER(display) < 12)
3269 /* For new platforms up to TGL let's respect VBT back again */
3270 intel_dp->psr.link_standby = connector->panel.vbt.psr.full_link;
3271
3272 INIT_WORK(&intel_dp->psr.work, intel_psr_work);
3273 INIT_DELAYED_WORK(&intel_dp->psr.dc3co_work, tgl_dc3co_disable_work);
3274 mutex_init(&intel_dp->psr.lock);
3275 }
3276
psr_get_status_and_error_status(struct intel_dp * intel_dp,u8 * status,u8 * error_status)3277 static int psr_get_status_and_error_status(struct intel_dp *intel_dp,
3278 u8 *status, u8 *error_status)
3279 {
3280 struct drm_dp_aux *aux = &intel_dp->aux;
3281 int ret;
3282 unsigned int offset;
3283
3284 offset = intel_dp->psr.panel_replay_enabled ?
3285 DP_SINK_DEVICE_PR_AND_FRAME_LOCK_STATUS : DP_PSR_STATUS;
3286
3287 ret = drm_dp_dpcd_readb(aux, offset, status);
3288 if (ret != 1)
3289 return ret;
3290
3291 offset = intel_dp->psr.panel_replay_enabled ?
3292 DP_PANEL_REPLAY_ERROR_STATUS : DP_PSR_ERROR_STATUS;
3293
3294 ret = drm_dp_dpcd_readb(aux, offset, error_status);
3295 if (ret != 1)
3296 return ret;
3297
3298 *status = *status & DP_PSR_SINK_STATE_MASK;
3299
3300 return 0;
3301 }
3302
psr_alpm_check(struct intel_dp * intel_dp)3303 static void psr_alpm_check(struct intel_dp *intel_dp)
3304 {
3305 struct intel_display *display = to_intel_display(intel_dp);
3306 struct drm_dp_aux *aux = &intel_dp->aux;
3307 struct intel_psr *psr = &intel_dp->psr;
3308 u8 val;
3309 int r;
3310
3311 if (!psr->sel_update_enabled)
3312 return;
3313
3314 r = drm_dp_dpcd_readb(aux, DP_RECEIVER_ALPM_STATUS, &val);
3315 if (r != 1) {
3316 drm_err(display->drm, "Error reading ALPM status\n");
3317 return;
3318 }
3319
3320 if (val & DP_ALPM_LOCK_TIMEOUT_ERROR) {
3321 intel_psr_disable_locked(intel_dp);
3322 psr->sink_not_reliable = true;
3323 drm_dbg_kms(display->drm,
3324 "ALPM lock timeout error, disabling PSR\n");
3325
3326 /* Clearing error */
3327 drm_dp_dpcd_writeb(aux, DP_RECEIVER_ALPM_STATUS, val);
3328 }
3329 }
3330
psr_capability_changed_check(struct intel_dp * intel_dp)3331 static void psr_capability_changed_check(struct intel_dp *intel_dp)
3332 {
3333 struct intel_display *display = to_intel_display(intel_dp);
3334 struct intel_psr *psr = &intel_dp->psr;
3335 u8 val;
3336 int r;
3337
3338 r = drm_dp_dpcd_readb(&intel_dp->aux, DP_PSR_ESI, &val);
3339 if (r != 1) {
3340 drm_err(display->drm, "Error reading DP_PSR_ESI\n");
3341 return;
3342 }
3343
3344 if (val & DP_PSR_CAPS_CHANGE) {
3345 intel_psr_disable_locked(intel_dp);
3346 psr->sink_not_reliable = true;
3347 drm_dbg_kms(display->drm,
3348 "Sink PSR capability changed, disabling PSR\n");
3349
3350 /* Clearing it */
3351 drm_dp_dpcd_writeb(&intel_dp->aux, DP_PSR_ESI, val);
3352 }
3353 }
3354
3355 /*
3356 * On common bits:
3357 * DP_PSR_RFB_STORAGE_ERROR == DP_PANEL_REPLAY_RFB_STORAGE_ERROR
3358 * DP_PSR_VSC_SDP_UNCORRECTABLE_ERROR == DP_PANEL_REPLAY_VSC_SDP_UNCORRECTABLE_ERROR
3359 * DP_PSR_LINK_CRC_ERROR == DP_PANEL_REPLAY_LINK_CRC_ERROR
3360 * this function is relying on PSR definitions
3361 */
intel_psr_short_pulse(struct intel_dp * intel_dp)3362 void intel_psr_short_pulse(struct intel_dp *intel_dp)
3363 {
3364 struct intel_display *display = to_intel_display(intel_dp);
3365 struct intel_psr *psr = &intel_dp->psr;
3366 u8 status, error_status;
3367 const u8 errors = DP_PSR_RFB_STORAGE_ERROR |
3368 DP_PSR_VSC_SDP_UNCORRECTABLE_ERROR |
3369 DP_PSR_LINK_CRC_ERROR;
3370
3371 if (!CAN_PSR(intel_dp) && !CAN_PANEL_REPLAY(intel_dp))
3372 return;
3373
3374 mutex_lock(&psr->lock);
3375
3376 if (!psr->enabled)
3377 goto exit;
3378
3379 if (psr_get_status_and_error_status(intel_dp, &status, &error_status)) {
3380 drm_err(display->drm,
3381 "Error reading PSR status or error status\n");
3382 goto exit;
3383 }
3384
3385 if ((!psr->panel_replay_enabled && status == DP_PSR_SINK_INTERNAL_ERROR) ||
3386 (error_status & errors)) {
3387 intel_psr_disable_locked(intel_dp);
3388 psr->sink_not_reliable = true;
3389 }
3390
3391 if (!psr->panel_replay_enabled && status == DP_PSR_SINK_INTERNAL_ERROR &&
3392 !error_status)
3393 drm_dbg_kms(display->drm,
3394 "PSR sink internal error, disabling PSR\n");
3395 if (error_status & DP_PSR_RFB_STORAGE_ERROR)
3396 drm_dbg_kms(display->drm,
3397 "PSR RFB storage error, disabling PSR\n");
3398 if (error_status & DP_PSR_VSC_SDP_UNCORRECTABLE_ERROR)
3399 drm_dbg_kms(display->drm,
3400 "PSR VSC SDP uncorrectable error, disabling PSR\n");
3401 if (error_status & DP_PSR_LINK_CRC_ERROR)
3402 drm_dbg_kms(display->drm,
3403 "PSR Link CRC error, disabling PSR\n");
3404
3405 if (error_status & ~errors)
3406 drm_err(display->drm,
3407 "PSR_ERROR_STATUS unhandled errors %x\n",
3408 error_status & ~errors);
3409 /* clear status register */
3410 drm_dp_dpcd_writeb(&intel_dp->aux, DP_PSR_ERROR_STATUS, error_status);
3411
3412 if (!psr->panel_replay_enabled) {
3413 psr_alpm_check(intel_dp);
3414 psr_capability_changed_check(intel_dp);
3415 }
3416
3417 exit:
3418 mutex_unlock(&psr->lock);
3419 }
3420
intel_psr_enabled(struct intel_dp * intel_dp)3421 bool intel_psr_enabled(struct intel_dp *intel_dp)
3422 {
3423 bool ret;
3424
3425 if (!CAN_PSR(intel_dp))
3426 return false;
3427
3428 mutex_lock(&intel_dp->psr.lock);
3429 ret = intel_dp->psr.enabled;
3430 mutex_unlock(&intel_dp->psr.lock);
3431
3432 return ret;
3433 }
3434
3435 /**
3436 * intel_psr_lock - grab PSR lock
3437 * @crtc_state: the crtc state
3438 *
3439 * This is initially meant to be used by around CRTC update, when
3440 * vblank sensitive registers are updated and we need grab the lock
3441 * before it to avoid vblank evasion.
3442 */
intel_psr_lock(const struct intel_crtc_state * crtc_state)3443 void intel_psr_lock(const struct intel_crtc_state *crtc_state)
3444 {
3445 struct intel_display *display = to_intel_display(crtc_state);
3446 struct intel_encoder *encoder;
3447
3448 if (!crtc_state->has_psr)
3449 return;
3450
3451 for_each_intel_encoder_mask_with_psr(display->drm, encoder,
3452 crtc_state->uapi.encoder_mask) {
3453 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
3454
3455 mutex_lock(&intel_dp->psr.lock);
3456 break;
3457 }
3458 }
3459
3460 /**
3461 * intel_psr_unlock - release PSR lock
3462 * @crtc_state: the crtc state
3463 *
3464 * Release the PSR lock that was held during pipe update.
3465 */
intel_psr_unlock(const struct intel_crtc_state * crtc_state)3466 void intel_psr_unlock(const struct intel_crtc_state *crtc_state)
3467 {
3468 struct intel_display *display = to_intel_display(crtc_state);
3469 struct intel_encoder *encoder;
3470
3471 if (!crtc_state->has_psr)
3472 return;
3473
3474 for_each_intel_encoder_mask_with_psr(display->drm, encoder,
3475 crtc_state->uapi.encoder_mask) {
3476 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
3477
3478 mutex_unlock(&intel_dp->psr.lock);
3479 break;
3480 }
3481 }
3482
3483 static void
psr_source_status(struct intel_dp * intel_dp,struct seq_file * m)3484 psr_source_status(struct intel_dp *intel_dp, struct seq_file *m)
3485 {
3486 struct intel_display *display = to_intel_display(intel_dp);
3487 enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
3488 const char *status = "unknown";
3489 u32 val, status_val;
3490
3491 if (intel_dp_is_edp(intel_dp) && (intel_dp->psr.sel_update_enabled ||
3492 intel_dp->psr.panel_replay_enabled)) {
3493 static const char * const live_status[] = {
3494 "IDLE",
3495 "CAPTURE",
3496 "CAPTURE_FS",
3497 "SLEEP",
3498 "BUFON_FW",
3499 "ML_UP",
3500 "SU_STANDBY",
3501 "FAST_SLEEP",
3502 "DEEP_SLEEP",
3503 "BUF_ON",
3504 "TG_ON"
3505 };
3506 val = intel_de_read(display,
3507 EDP_PSR2_STATUS(display, cpu_transcoder));
3508 status_val = REG_FIELD_GET(EDP_PSR2_STATUS_STATE_MASK, val);
3509 if (status_val < ARRAY_SIZE(live_status))
3510 status = live_status[status_val];
3511 } else {
3512 static const char * const live_status[] = {
3513 "IDLE",
3514 "SRDONACK",
3515 "SRDENT",
3516 "BUFOFF",
3517 "BUFON",
3518 "AUXACK",
3519 "SRDOFFACK",
3520 "SRDENT_ON",
3521 };
3522 val = intel_de_read(display,
3523 psr_status_reg(display, cpu_transcoder));
3524 status_val = REG_FIELD_GET(EDP_PSR_STATUS_STATE_MASK, val);
3525 if (status_val < ARRAY_SIZE(live_status))
3526 status = live_status[status_val];
3527 }
3528
3529 seq_printf(m, "Source PSR/PanelReplay status: %s [0x%08x]\n", status, val);
3530 }
3531
intel_psr_sink_capability(struct intel_dp * intel_dp,struct seq_file * m)3532 static void intel_psr_sink_capability(struct intel_dp *intel_dp,
3533 struct seq_file *m)
3534 {
3535 struct intel_psr *psr = &intel_dp->psr;
3536
3537 seq_printf(m, "Sink support: PSR = %s",
3538 str_yes_no(psr->sink_support));
3539
3540 if (psr->sink_support)
3541 seq_printf(m, " [0x%02x]", intel_dp->psr_dpcd[0]);
3542 if (intel_dp->psr_dpcd[0] == DP_PSR2_WITH_Y_COORD_ET_SUPPORTED)
3543 seq_printf(m, " (Early Transport)");
3544 seq_printf(m, ", Panel Replay = %s", str_yes_no(psr->sink_panel_replay_support));
3545 seq_printf(m, ", Panel Replay Selective Update = %s",
3546 str_yes_no(psr->sink_panel_replay_su_support));
3547 if (intel_dp->pr_dpcd & DP_PANEL_REPLAY_EARLY_TRANSPORT_SUPPORT)
3548 seq_printf(m, " (Early Transport)");
3549 seq_printf(m, "\n");
3550 }
3551
intel_psr_print_mode(struct intel_dp * intel_dp,struct seq_file * m)3552 static void intel_psr_print_mode(struct intel_dp *intel_dp,
3553 struct seq_file *m)
3554 {
3555 struct intel_psr *psr = &intel_dp->psr;
3556 const char *status, *mode, *region_et;
3557
3558 if (psr->enabled)
3559 status = " enabled";
3560 else
3561 status = "disabled";
3562
3563 if (psr->panel_replay_enabled && psr->sel_update_enabled)
3564 mode = "Panel Replay Selective Update";
3565 else if (psr->panel_replay_enabled)
3566 mode = "Panel Replay";
3567 else if (psr->sel_update_enabled)
3568 mode = "PSR2";
3569 else if (psr->enabled)
3570 mode = "PSR1";
3571 else
3572 mode = "";
3573
3574 if (psr->su_region_et_enabled)
3575 region_et = " (Early Transport)";
3576 else
3577 region_et = "";
3578
3579 seq_printf(m, "PSR mode: %s%s%s\n", mode, status, region_et);
3580 }
3581
intel_psr_status(struct seq_file * m,struct intel_dp * intel_dp)3582 static int intel_psr_status(struct seq_file *m, struct intel_dp *intel_dp)
3583 {
3584 struct intel_display *display = to_intel_display(intel_dp);
3585 struct drm_i915_private *dev_priv = to_i915(display->drm);
3586 enum transcoder cpu_transcoder = intel_dp->psr.transcoder;
3587 struct intel_psr *psr = &intel_dp->psr;
3588 intel_wakeref_t wakeref;
3589 bool enabled;
3590 u32 val, psr2_ctl;
3591
3592 intel_psr_sink_capability(intel_dp, m);
3593
3594 if (!(psr->sink_support || psr->sink_panel_replay_support))
3595 return 0;
3596
3597 wakeref = intel_runtime_pm_get(&dev_priv->runtime_pm);
3598 mutex_lock(&psr->lock);
3599
3600 intel_psr_print_mode(intel_dp, m);
3601
3602 if (!psr->enabled) {
3603 seq_printf(m, "PSR sink not reliable: %s\n",
3604 str_yes_no(psr->sink_not_reliable));
3605
3606 goto unlock;
3607 }
3608
3609 if (psr->panel_replay_enabled) {
3610 val = intel_de_read(display, TRANS_DP2_CTL(cpu_transcoder));
3611
3612 if (intel_dp_is_edp(intel_dp))
3613 psr2_ctl = intel_de_read(display,
3614 EDP_PSR2_CTL(display,
3615 cpu_transcoder));
3616
3617 enabled = val & TRANS_DP2_PANEL_REPLAY_ENABLE;
3618 } else if (psr->sel_update_enabled) {
3619 val = intel_de_read(display,
3620 EDP_PSR2_CTL(display, cpu_transcoder));
3621 enabled = val & EDP_PSR2_ENABLE;
3622 } else {
3623 val = intel_de_read(display, psr_ctl_reg(display, cpu_transcoder));
3624 enabled = val & EDP_PSR_ENABLE;
3625 }
3626 seq_printf(m, "Source PSR/PanelReplay ctl: %s [0x%08x]\n",
3627 str_enabled_disabled(enabled), val);
3628 if (psr->panel_replay_enabled && intel_dp_is_edp(intel_dp))
3629 seq_printf(m, "PSR2_CTL: 0x%08x\n",
3630 psr2_ctl);
3631 psr_source_status(intel_dp, m);
3632 seq_printf(m, "Busy frontbuffer bits: 0x%08x\n",
3633 psr->busy_frontbuffer_bits);
3634
3635 /*
3636 * SKL+ Perf counter is reset to 0 everytime DC state is entered
3637 */
3638 val = intel_de_read(display, psr_perf_cnt_reg(display, cpu_transcoder));
3639 seq_printf(m, "Performance counter: %u\n",
3640 REG_FIELD_GET(EDP_PSR_PERF_CNT_MASK, val));
3641
3642 if (psr->debug & I915_PSR_DEBUG_IRQ) {
3643 seq_printf(m, "Last attempted entry at: %lld\n",
3644 psr->last_entry_attempt);
3645 seq_printf(m, "Last exit at: %lld\n", psr->last_exit);
3646 }
3647
3648 if (psr->sel_update_enabled) {
3649 u32 su_frames_val[3];
3650 int frame;
3651
3652 /*
3653 * Reading all 3 registers before hand to minimize crossing a
3654 * frame boundary between register reads
3655 */
3656 for (frame = 0; frame < PSR2_SU_STATUS_FRAMES; frame += 3) {
3657 val = intel_de_read(display,
3658 PSR2_SU_STATUS(display, cpu_transcoder, frame));
3659 su_frames_val[frame / 3] = val;
3660 }
3661
3662 seq_puts(m, "Frame:\tPSR2 SU blocks:\n");
3663
3664 for (frame = 0; frame < PSR2_SU_STATUS_FRAMES; frame++) {
3665 u32 su_blocks;
3666
3667 su_blocks = su_frames_val[frame / 3] &
3668 PSR2_SU_STATUS_MASK(frame);
3669 su_blocks = su_blocks >> PSR2_SU_STATUS_SHIFT(frame);
3670 seq_printf(m, "%d\t%d\n", frame, su_blocks);
3671 }
3672
3673 seq_printf(m, "PSR2 selective fetch: %s\n",
3674 str_enabled_disabled(psr->psr2_sel_fetch_enabled));
3675 }
3676
3677 unlock:
3678 mutex_unlock(&psr->lock);
3679 intel_runtime_pm_put(&dev_priv->runtime_pm, wakeref);
3680
3681 return 0;
3682 }
3683
i915_edp_psr_status_show(struct seq_file * m,void * data)3684 static int i915_edp_psr_status_show(struct seq_file *m, void *data)
3685 {
3686 struct intel_display *display = m->private;
3687 struct intel_dp *intel_dp = NULL;
3688 struct intel_encoder *encoder;
3689
3690 if (!HAS_PSR(display))
3691 return -ENODEV;
3692
3693 /* Find the first EDP which supports PSR */
3694 for_each_intel_encoder_with_psr(display->drm, encoder) {
3695 intel_dp = enc_to_intel_dp(encoder);
3696 break;
3697 }
3698
3699 if (!intel_dp)
3700 return -ENODEV;
3701
3702 return intel_psr_status(m, intel_dp);
3703 }
3704 DEFINE_SHOW_ATTRIBUTE(i915_edp_psr_status);
3705
3706 static int
i915_edp_psr_debug_set(void * data,u64 val)3707 i915_edp_psr_debug_set(void *data, u64 val)
3708 {
3709 struct intel_display *display = data;
3710 struct drm_i915_private *dev_priv = to_i915(display->drm);
3711 struct intel_encoder *encoder;
3712 intel_wakeref_t wakeref;
3713 int ret = -ENODEV;
3714
3715 if (!HAS_PSR(display))
3716 return ret;
3717
3718 for_each_intel_encoder_with_psr(display->drm, encoder) {
3719 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
3720
3721 drm_dbg_kms(display->drm, "Setting PSR debug to %llx\n", val);
3722
3723 wakeref = intel_runtime_pm_get(&dev_priv->runtime_pm);
3724
3725 // TODO: split to each transcoder's PSR debug state
3726 ret = intel_psr_debug_set(intel_dp, val);
3727
3728 intel_runtime_pm_put(&dev_priv->runtime_pm, wakeref);
3729 }
3730
3731 return ret;
3732 }
3733
3734 static int
i915_edp_psr_debug_get(void * data,u64 * val)3735 i915_edp_psr_debug_get(void *data, u64 *val)
3736 {
3737 struct intel_display *display = data;
3738 struct intel_encoder *encoder;
3739
3740 if (!HAS_PSR(display))
3741 return -ENODEV;
3742
3743 for_each_intel_encoder_with_psr(display->drm, encoder) {
3744 struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
3745
3746 // TODO: split to each transcoder's PSR debug state
3747 *val = READ_ONCE(intel_dp->psr.debug);
3748 return 0;
3749 }
3750
3751 return -ENODEV;
3752 }
3753
3754 DEFINE_SIMPLE_ATTRIBUTE(i915_edp_psr_debug_fops,
3755 i915_edp_psr_debug_get, i915_edp_psr_debug_set,
3756 "%llu\n");
3757
intel_psr_debugfs_register(struct intel_display * display)3758 void intel_psr_debugfs_register(struct intel_display *display)
3759 {
3760 struct drm_minor *minor = display->drm->primary;
3761
3762 debugfs_create_file("i915_edp_psr_debug", 0644, minor->debugfs_root,
3763 display, &i915_edp_psr_debug_fops);
3764
3765 debugfs_create_file("i915_edp_psr_status", 0444, minor->debugfs_root,
3766 display, &i915_edp_psr_status_fops);
3767 }
3768
psr_mode_str(struct intel_dp * intel_dp)3769 static const char *psr_mode_str(struct intel_dp *intel_dp)
3770 {
3771 if (intel_dp->psr.panel_replay_enabled)
3772 return "PANEL-REPLAY";
3773 else if (intel_dp->psr.enabled)
3774 return "PSR";
3775
3776 return "unknown";
3777 }
3778
i915_psr_sink_status_show(struct seq_file * m,void * data)3779 static int i915_psr_sink_status_show(struct seq_file *m, void *data)
3780 {
3781 struct intel_connector *connector = m->private;
3782 struct intel_dp *intel_dp = intel_attached_dp(connector);
3783 static const char * const sink_status[] = {
3784 "inactive",
3785 "transition to active, capture and display",
3786 "active, display from RFB",
3787 "active, capture and display on sink device timings",
3788 "transition to inactive, capture and display, timing re-sync",
3789 "reserved",
3790 "reserved",
3791 "sink internal error",
3792 };
3793 const char *str;
3794 int ret;
3795 u8 status, error_status;
3796
3797 if (!(CAN_PSR(intel_dp) || CAN_PANEL_REPLAY(intel_dp))) {
3798 seq_puts(m, "PSR/Panel-Replay Unsupported\n");
3799 return -ENODEV;
3800 }
3801
3802 if (connector->base.status != connector_status_connected)
3803 return -ENODEV;
3804
3805 ret = psr_get_status_and_error_status(intel_dp, &status, &error_status);
3806 if (ret)
3807 return ret;
3808
3809 status &= DP_PSR_SINK_STATE_MASK;
3810 if (status < ARRAY_SIZE(sink_status))
3811 str = sink_status[status];
3812 else
3813 str = "unknown";
3814
3815 seq_printf(m, "Sink %s status: 0x%x [%s]\n", psr_mode_str(intel_dp), status, str);
3816
3817 seq_printf(m, "Sink %s error status: 0x%x", psr_mode_str(intel_dp), error_status);
3818
3819 if (error_status & (DP_PSR_RFB_STORAGE_ERROR |
3820 DP_PSR_VSC_SDP_UNCORRECTABLE_ERROR |
3821 DP_PSR_LINK_CRC_ERROR))
3822 seq_puts(m, ":\n");
3823 else
3824 seq_puts(m, "\n");
3825 if (error_status & DP_PSR_RFB_STORAGE_ERROR)
3826 seq_printf(m, "\t%s RFB storage error\n", psr_mode_str(intel_dp));
3827 if (error_status & DP_PSR_VSC_SDP_UNCORRECTABLE_ERROR)
3828 seq_printf(m, "\t%s VSC SDP uncorrectable error\n", psr_mode_str(intel_dp));
3829 if (error_status & DP_PSR_LINK_CRC_ERROR)
3830 seq_printf(m, "\t%s Link CRC error\n", psr_mode_str(intel_dp));
3831
3832 return ret;
3833 }
3834 DEFINE_SHOW_ATTRIBUTE(i915_psr_sink_status);
3835
i915_psr_status_show(struct seq_file * m,void * data)3836 static int i915_psr_status_show(struct seq_file *m, void *data)
3837 {
3838 struct intel_connector *connector = m->private;
3839 struct intel_dp *intel_dp = intel_attached_dp(connector);
3840
3841 return intel_psr_status(m, intel_dp);
3842 }
3843 DEFINE_SHOW_ATTRIBUTE(i915_psr_status);
3844
intel_psr_connector_debugfs_add(struct intel_connector * connector)3845 void intel_psr_connector_debugfs_add(struct intel_connector *connector)
3846 {
3847 struct intel_display *display = to_intel_display(connector);
3848 struct drm_i915_private *i915 = to_i915(connector->base.dev);
3849 struct dentry *root = connector->base.debugfs_entry;
3850
3851 /* TODO: Add support for MST connectors as well. */
3852 if ((connector->base.connector_type != DRM_MODE_CONNECTOR_eDP &&
3853 connector->base.connector_type != DRM_MODE_CONNECTOR_DisplayPort) ||
3854 connector->mst_port)
3855 return;
3856
3857 debugfs_create_file("i915_psr_sink_status", 0444, root,
3858 connector, &i915_psr_sink_status_fops);
3859
3860 if (HAS_PSR(display) || HAS_DP20(i915))
3861 debugfs_create_file("i915_psr_status", 0444, root,
3862 connector, &i915_psr_status_fops);
3863 }
3864